commit 9e9641ab5c72eaae4a8a7c79a1b35c2bbc06da52 Author: RamkumarP Date: Thu Jun 19 07:13:31 2025 +0000 test commit diff --git a/index.html b/index.html new file mode 100644 index 0000000..296b217 --- /dev/null +++ b/index.html @@ -0,0 +1,15 @@ + + + + + + + Hexr Factory Internal Portal + + + +
+ + + + \ No newline at end of file diff --git a/node_modules/.bin/esbuild b/node_modules/.bin/esbuild new file mode 100644 index 0000000..63bb6d4 --- /dev/null +++ b/node_modules/.bin/esbuild @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../esbuild/bin/esbuild" "$@" +else + exec node "$basedir/../esbuild/bin/esbuild" "$@" +fi diff --git a/node_modules/.bin/esbuild.cmd b/node_modules/.bin/esbuild.cmd new file mode 100644 index 0000000..d368539 --- /dev/null +++ b/node_modules/.bin/esbuild.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\esbuild\bin\esbuild" %* diff --git a/node_modules/.bin/esbuild.ps1 b/node_modules/.bin/esbuild.ps1 new file mode 100644 index 0000000..81ffbf9 --- /dev/null +++ b/node_modules/.bin/esbuild.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../esbuild/bin/esbuild" $args + } else { + & "$basedir/node$exe" "$basedir/../esbuild/bin/esbuild" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../esbuild/bin/esbuild" $args + } else { + & "node$exe" "$basedir/../esbuild/bin/esbuild" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/nanoid b/node_modules/.bin/nanoid new file mode 100644 index 0000000..46220bd --- /dev/null +++ b/node_modules/.bin/nanoid @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../nanoid/bin/nanoid.cjs" "$@" +else + exec node "$basedir/../nanoid/bin/nanoid.cjs" "$@" +fi diff --git a/node_modules/.bin/nanoid.cmd b/node_modules/.bin/nanoid.cmd new file mode 100644 index 0000000..601a2c8 --- /dev/null +++ b/node_modules/.bin/nanoid.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nanoid\bin\nanoid.cjs" %* diff --git a/node_modules/.bin/nanoid.ps1 b/node_modules/.bin/nanoid.ps1 new file mode 100644 index 0000000..d8a4d7a --- /dev/null +++ b/node_modules/.bin/nanoid.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args + } else { + & "$basedir/node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args + } else { + & "node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/rollup b/node_modules/.bin/rollup new file mode 100644 index 0000000..998fc16 --- /dev/null +++ b/node_modules/.bin/rollup @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../rollup/dist/bin/rollup" "$@" +else + exec node "$basedir/../rollup/dist/bin/rollup" "$@" +fi diff --git a/node_modules/.bin/rollup.cmd b/node_modules/.bin/rollup.cmd new file mode 100644 index 0000000..d9a0a35 --- /dev/null +++ b/node_modules/.bin/rollup.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rollup\dist\bin\rollup" %* diff --git a/node_modules/.bin/rollup.ps1 b/node_modules/.bin/rollup.ps1 new file mode 100644 index 0000000..10f657d --- /dev/null +++ b/node_modules/.bin/rollup.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../rollup/dist/bin/rollup" $args + } else { + & "$basedir/node$exe" "$basedir/../rollup/dist/bin/rollup" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../rollup/dist/bin/rollup" $args + } else { + & "node$exe" "$basedir/../rollup/dist/bin/rollup" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/vite b/node_modules/.bin/vite new file mode 100644 index 0000000..014463f --- /dev/null +++ b/node_modules/.bin/vite @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../vite/bin/vite.js" "$@" +else + exec node "$basedir/../vite/bin/vite.js" "$@" +fi diff --git a/node_modules/.bin/vite.cmd b/node_modules/.bin/vite.cmd new file mode 100644 index 0000000..e824f3a --- /dev/null +++ b/node_modules/.bin/vite.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\vite\bin\vite.js" %* diff --git a/node_modules/.bin/vite.ps1 b/node_modules/.bin/vite.ps1 new file mode 100644 index 0000000..a7759bc --- /dev/null +++ b/node_modules/.bin/vite.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../vite/bin/vite.js" $args + } else { + & "$basedir/node$exe" "$basedir/../vite/bin/vite.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../vite/bin/vite.js" $args + } else { + & "node$exe" "$basedir/../vite/bin/vite.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..3e63ffa --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,312 @@ +{ + "name": "modern-website", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@esbuild/win32-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", + "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.43.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.43.0.tgz", + "integrity": "sha512-SnGhLiE5rlK0ofq8kzuDkM0g7FN1s5VYY+YSMTibP7CqShxCQvqtNxTARS4xX4PFJfHjG0ZQYX9iGzI3FQh5Aw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/estree": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", + "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz", + "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.5", + "@esbuild/android-arm": "0.25.5", + "@esbuild/android-arm64": "0.25.5", + "@esbuild/android-x64": "0.25.5", + "@esbuild/darwin-arm64": "0.25.5", + "@esbuild/darwin-x64": "0.25.5", + "@esbuild/freebsd-arm64": "0.25.5", + "@esbuild/freebsd-x64": "0.25.5", + "@esbuild/linux-arm": "0.25.5", + "@esbuild/linux-arm64": "0.25.5", + "@esbuild/linux-ia32": "0.25.5", + "@esbuild/linux-loong64": "0.25.5", + "@esbuild/linux-mips64el": "0.25.5", + "@esbuild/linux-ppc64": "0.25.5", + "@esbuild/linux-riscv64": "0.25.5", + "@esbuild/linux-s390x": "0.25.5", + "@esbuild/linux-x64": "0.25.5", + "@esbuild/netbsd-arm64": "0.25.5", + "@esbuild/netbsd-x64": "0.25.5", + "@esbuild/openbsd-arm64": "0.25.5", + "@esbuild/openbsd-x64": "0.25.5", + "@esbuild/sunos-x64": "0.25.5", + "@esbuild/win32-arm64": "0.25.5", + "@esbuild/win32-ia32": "0.25.5", + "@esbuild/win32-x64": "0.25.5" + } + }, + "node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/rollup": { + "version": "4.43.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.43.0.tgz", + "integrity": "sha512-wdN2Kd3Twh8MAEOEJZsuxuLKCsBEo4PVNLK6tQWAn10VhsVewQLzcucMgLolRlhFybGxfclbPeEYBaP6RvUFGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.7" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.43.0", + "@rollup/rollup-android-arm64": "4.43.0", + "@rollup/rollup-darwin-arm64": "4.43.0", + "@rollup/rollup-darwin-x64": "4.43.0", + "@rollup/rollup-freebsd-arm64": "4.43.0", + "@rollup/rollup-freebsd-x64": "4.43.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.43.0", + "@rollup/rollup-linux-arm-musleabihf": "4.43.0", + "@rollup/rollup-linux-arm64-gnu": "4.43.0", + "@rollup/rollup-linux-arm64-musl": "4.43.0", + "@rollup/rollup-linux-loongarch64-gnu": "4.43.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.43.0", + "@rollup/rollup-linux-riscv64-gnu": "4.43.0", + "@rollup/rollup-linux-riscv64-musl": "4.43.0", + "@rollup/rollup-linux-s390x-gnu": "4.43.0", + "@rollup/rollup-linux-x64-gnu": "4.43.0", + "@rollup/rollup-linux-x64-musl": "4.43.0", + "@rollup/rollup-win32-arm64-msvc": "4.43.0", + "@rollup/rollup-win32-ia32-msvc": "4.43.0", + "@rollup/rollup-win32-x64-msvc": "4.43.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/vite": { + "version": "6.3.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", + "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + } + } +} diff --git a/node_modules/.vite/deps/_metadata.json b/node_modules/.vite/deps/_metadata.json new file mode 100644 index 0000000..0b9be04 --- /dev/null +++ b/node_modules/.vite/deps/_metadata.json @@ -0,0 +1,8 @@ +{ + "hash": "e12dfb77", + "configHash": "6387374e", + "lockfileHash": "1a6a7327", + "browserHash": "5a36384d", + "optimized": {}, + "chunks": {} +} \ No newline at end of file diff --git a/node_modules/.vite/deps/package.json b/node_modules/.vite/deps/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/.vite/deps/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@esbuild/win32-x64/README.md b/node_modules/@esbuild/win32-x64/README.md new file mode 100644 index 0000000..a99ee7c --- /dev/null +++ b/node_modules/@esbuild/win32-x64/README.md @@ -0,0 +1,3 @@ +# esbuild + +This is the Windows 64-bit binary for esbuild, a JavaScript bundler and minifier. See https://github.com/evanw/esbuild for details. diff --git a/node_modules/@esbuild/win32-x64/esbuild.exe b/node_modules/@esbuild/win32-x64/esbuild.exe new file mode 100644 index 0000000..3759b1c Binary files /dev/null and b/node_modules/@esbuild/win32-x64/esbuild.exe differ diff --git a/node_modules/@esbuild/win32-x64/package.json b/node_modules/@esbuild/win32-x64/package.json new file mode 100644 index 0000000..8daf624 --- /dev/null +++ b/node_modules/@esbuild/win32-x64/package.json @@ -0,0 +1,20 @@ +{ + "name": "@esbuild/win32-x64", + "version": "0.25.5", + "description": "The Windows 64-bit binary for esbuild, a JavaScript bundler.", + "repository": { + "type": "git", + "url": "git+https://github.com/evanw/esbuild.git" + }, + "license": "MIT", + "preferUnplugged": true, + "engines": { + "node": ">=18" + }, + "os": [ + "win32" + ], + "cpu": [ + "x64" + ] +} diff --git a/node_modules/@rollup/rollup-win32-x64-msvc/README.md b/node_modules/@rollup/rollup-win32-x64-msvc/README.md new file mode 100644 index 0000000..7382dbc --- /dev/null +++ b/node_modules/@rollup/rollup-win32-x64-msvc/README.md @@ -0,0 +1,3 @@ +# `@rollup/rollup-win32-x64-msvc` + +This is the **x86_64-pc-windows-msvc** binary for `rollup` diff --git a/node_modules/@rollup/rollup-win32-x64-msvc/package.json b/node_modules/@rollup/rollup-win32-x64-msvc/package.json new file mode 100644 index 0000000..c9e64f5 --- /dev/null +++ b/node_modules/@rollup/rollup-win32-x64-msvc/package.json @@ -0,0 +1,19 @@ +{ + "name": "@rollup/rollup-win32-x64-msvc", + "version": "4.43.0", + "os": [ + "win32" + ], + "cpu": [ + "x64" + ], + "files": [ + "rollup.win32-x64-msvc.node" + ], + "description": "Native bindings for Rollup", + "author": "Lukas Taegert-Atkinson", + "homepage": "https://rollupjs.org/", + "license": "MIT", + "repository": "rollup/rollup", + "main": "./rollup.win32-x64-msvc.node" +} \ No newline at end of file diff --git a/node_modules/@rollup/rollup-win32-x64-msvc/rollup.win32-x64-msvc.node b/node_modules/@rollup/rollup-win32-x64-msvc/rollup.win32-x64-msvc.node new file mode 100644 index 0000000..34810f9 Binary files /dev/null and b/node_modules/@rollup/rollup-win32-x64-msvc/rollup.win32-x64-msvc.node differ diff --git a/node_modules/@types/estree/LICENSE b/node_modules/@types/estree/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/estree/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/estree/README.md b/node_modules/@types/estree/README.md new file mode 100644 index 0000000..b0baf5f --- /dev/null +++ b/node_modules/@types/estree/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/estree` + +# Summary +This package contains type definitions for estree (https://github.com/estree/estree). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/estree. + +### Additional Details + * Last updated: Mon, 24 Mar 2025 07:34:10 GMT + * Dependencies: none + +# Credits +These definitions were written by [RReverser](https://github.com/RReverser). diff --git a/node_modules/@types/estree/flow.d.ts b/node_modules/@types/estree/flow.d.ts new file mode 100644 index 0000000..9d001a9 --- /dev/null +++ b/node_modules/@types/estree/flow.d.ts @@ -0,0 +1,167 @@ +declare namespace ESTree { + interface FlowTypeAnnotation extends Node {} + + interface FlowBaseTypeAnnotation extends FlowTypeAnnotation {} + + interface FlowLiteralTypeAnnotation extends FlowTypeAnnotation, Literal {} + + interface FlowDeclaration extends Declaration {} + + interface AnyTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface ArrayTypeAnnotation extends FlowTypeAnnotation { + elementType: FlowTypeAnnotation; + } + + interface BooleanLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {} + + interface BooleanTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface ClassImplements extends Node { + id: Identifier; + typeParameters?: TypeParameterInstantiation | null; + } + + interface ClassProperty { + key: Expression; + value?: Expression | null; + typeAnnotation?: TypeAnnotation | null; + computed: boolean; + static: boolean; + } + + interface DeclareClass extends FlowDeclaration { + id: Identifier; + typeParameters?: TypeParameterDeclaration | null; + body: ObjectTypeAnnotation; + extends: InterfaceExtends[]; + } + + interface DeclareFunction extends FlowDeclaration { + id: Identifier; + } + + interface DeclareModule extends FlowDeclaration { + id: Literal | Identifier; + body: BlockStatement; + } + + interface DeclareVariable extends FlowDeclaration { + id: Identifier; + } + + interface FunctionTypeAnnotation extends FlowTypeAnnotation { + params: FunctionTypeParam[]; + returnType: FlowTypeAnnotation; + rest?: FunctionTypeParam | null; + typeParameters?: TypeParameterDeclaration | null; + } + + interface FunctionTypeParam { + name: Identifier; + typeAnnotation: FlowTypeAnnotation; + optional: boolean; + } + + interface GenericTypeAnnotation extends FlowTypeAnnotation { + id: Identifier | QualifiedTypeIdentifier; + typeParameters?: TypeParameterInstantiation | null; + } + + interface InterfaceExtends extends Node { + id: Identifier | QualifiedTypeIdentifier; + typeParameters?: TypeParameterInstantiation | null; + } + + interface InterfaceDeclaration extends FlowDeclaration { + id: Identifier; + typeParameters?: TypeParameterDeclaration | null; + extends: InterfaceExtends[]; + body: ObjectTypeAnnotation; + } + + interface IntersectionTypeAnnotation extends FlowTypeAnnotation { + types: FlowTypeAnnotation[]; + } + + interface MixedTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface NullableTypeAnnotation extends FlowTypeAnnotation { + typeAnnotation: TypeAnnotation; + } + + interface NumberLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {} + + interface NumberTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface StringLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {} + + interface StringTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface TupleTypeAnnotation extends FlowTypeAnnotation { + types: FlowTypeAnnotation[]; + } + + interface TypeofTypeAnnotation extends FlowTypeAnnotation { + argument: FlowTypeAnnotation; + } + + interface TypeAlias extends FlowDeclaration { + id: Identifier; + typeParameters?: TypeParameterDeclaration | null; + right: FlowTypeAnnotation; + } + + interface TypeAnnotation extends Node { + typeAnnotation: FlowTypeAnnotation; + } + + interface TypeCastExpression extends Expression { + expression: Expression; + typeAnnotation: TypeAnnotation; + } + + interface TypeParameterDeclaration extends Node { + params: Identifier[]; + } + + interface TypeParameterInstantiation extends Node { + params: FlowTypeAnnotation[]; + } + + interface ObjectTypeAnnotation extends FlowTypeAnnotation { + properties: ObjectTypeProperty[]; + indexers: ObjectTypeIndexer[]; + callProperties: ObjectTypeCallProperty[]; + } + + interface ObjectTypeCallProperty extends Node { + value: FunctionTypeAnnotation; + static: boolean; + } + + interface ObjectTypeIndexer extends Node { + id: Identifier; + key: FlowTypeAnnotation; + value: FlowTypeAnnotation; + static: boolean; + } + + interface ObjectTypeProperty extends Node { + key: Expression; + value: FlowTypeAnnotation; + optional: boolean; + static: boolean; + } + + interface QualifiedTypeIdentifier extends Node { + qualification: Identifier | QualifiedTypeIdentifier; + id: Identifier; + } + + interface UnionTypeAnnotation extends FlowTypeAnnotation { + types: FlowTypeAnnotation[]; + } + + interface VoidTypeAnnotation extends FlowBaseTypeAnnotation {} +} diff --git a/node_modules/@types/estree/index.d.ts b/node_modules/@types/estree/index.d.ts new file mode 100644 index 0000000..1c13265 --- /dev/null +++ b/node_modules/@types/estree/index.d.ts @@ -0,0 +1,694 @@ +// This definition file follows a somewhat unusual format. ESTree allows +// runtime type checks based on the `type` parameter. In order to explain this +// to typescript we want to use discriminated union types: +// https://github.com/Microsoft/TypeScript/pull/9163 +// +// For ESTree this is a bit tricky because the high level interfaces like +// Node or Function are pulling double duty. We want to pass common fields down +// to the interfaces that extend them (like Identifier or +// ArrowFunctionExpression), but you can't extend a type union or enforce +// common fields on them. So we've split the high level interfaces into two +// types, a base type which passes down inherited fields, and a type union of +// all types which extend the base type. Only the type union is exported, and +// the union is how other types refer to the collection of inheriting types. +// +// This makes the definitions file here somewhat more difficult to maintain, +// but it has the notable advantage of making ESTree much easier to use as +// an end user. + +export interface BaseNodeWithoutComments { + // Every leaf interface that extends BaseNode must specify a type property. + // The type property should be a string literal. For example, Identifier + // has: `type: "Identifier"` + type: string; + loc?: SourceLocation | null | undefined; + range?: [number, number] | undefined; +} + +export interface BaseNode extends BaseNodeWithoutComments { + leadingComments?: Comment[] | undefined; + trailingComments?: Comment[] | undefined; +} + +export interface NodeMap { + AssignmentProperty: AssignmentProperty; + CatchClause: CatchClause; + Class: Class; + ClassBody: ClassBody; + Expression: Expression; + Function: Function; + Identifier: Identifier; + Literal: Literal; + MethodDefinition: MethodDefinition; + ModuleDeclaration: ModuleDeclaration; + ModuleSpecifier: ModuleSpecifier; + Pattern: Pattern; + PrivateIdentifier: PrivateIdentifier; + Program: Program; + Property: Property; + PropertyDefinition: PropertyDefinition; + SpreadElement: SpreadElement; + Statement: Statement; + Super: Super; + SwitchCase: SwitchCase; + TemplateElement: TemplateElement; + VariableDeclarator: VariableDeclarator; +} + +export type Node = NodeMap[keyof NodeMap]; + +export interface Comment extends BaseNodeWithoutComments { + type: "Line" | "Block"; + value: string; +} + +export interface SourceLocation { + source?: string | null | undefined; + start: Position; + end: Position; +} + +export interface Position { + /** >= 1 */ + line: number; + /** >= 0 */ + column: number; +} + +export interface Program extends BaseNode { + type: "Program"; + sourceType: "script" | "module"; + body: Array; + comments?: Comment[] | undefined; +} + +export interface Directive extends BaseNode { + type: "ExpressionStatement"; + expression: Literal; + directive: string; +} + +export interface BaseFunction extends BaseNode { + params: Pattern[]; + generator?: boolean | undefined; + async?: boolean | undefined; + // The body is either BlockStatement or Expression because arrow functions + // can have a body that's either. FunctionDeclarations and + // FunctionExpressions have only BlockStatement bodies. + body: BlockStatement | Expression; +} + +export type Function = FunctionDeclaration | FunctionExpression | ArrowFunctionExpression; + +export type Statement = + | ExpressionStatement + | BlockStatement + | StaticBlock + | EmptyStatement + | DebuggerStatement + | WithStatement + | ReturnStatement + | LabeledStatement + | BreakStatement + | ContinueStatement + | IfStatement + | SwitchStatement + | ThrowStatement + | TryStatement + | WhileStatement + | DoWhileStatement + | ForStatement + | ForInStatement + | ForOfStatement + | Declaration; + +export interface BaseStatement extends BaseNode {} + +export interface EmptyStatement extends BaseStatement { + type: "EmptyStatement"; +} + +export interface BlockStatement extends BaseStatement { + type: "BlockStatement"; + body: Statement[]; + innerComments?: Comment[] | undefined; +} + +export interface StaticBlock extends Omit { + type: "StaticBlock"; +} + +export interface ExpressionStatement extends BaseStatement { + type: "ExpressionStatement"; + expression: Expression; +} + +export interface IfStatement extends BaseStatement { + type: "IfStatement"; + test: Expression; + consequent: Statement; + alternate?: Statement | null | undefined; +} + +export interface LabeledStatement extends BaseStatement { + type: "LabeledStatement"; + label: Identifier; + body: Statement; +} + +export interface BreakStatement extends BaseStatement { + type: "BreakStatement"; + label?: Identifier | null | undefined; +} + +export interface ContinueStatement extends BaseStatement { + type: "ContinueStatement"; + label?: Identifier | null | undefined; +} + +export interface WithStatement extends BaseStatement { + type: "WithStatement"; + object: Expression; + body: Statement; +} + +export interface SwitchStatement extends BaseStatement { + type: "SwitchStatement"; + discriminant: Expression; + cases: SwitchCase[]; +} + +export interface ReturnStatement extends BaseStatement { + type: "ReturnStatement"; + argument?: Expression | null | undefined; +} + +export interface ThrowStatement extends BaseStatement { + type: "ThrowStatement"; + argument: Expression; +} + +export interface TryStatement extends BaseStatement { + type: "TryStatement"; + block: BlockStatement; + handler?: CatchClause | null | undefined; + finalizer?: BlockStatement | null | undefined; +} + +export interface WhileStatement extends BaseStatement { + type: "WhileStatement"; + test: Expression; + body: Statement; +} + +export interface DoWhileStatement extends BaseStatement { + type: "DoWhileStatement"; + body: Statement; + test: Expression; +} + +export interface ForStatement extends BaseStatement { + type: "ForStatement"; + init?: VariableDeclaration | Expression | null | undefined; + test?: Expression | null | undefined; + update?: Expression | null | undefined; + body: Statement; +} + +export interface BaseForXStatement extends BaseStatement { + left: VariableDeclaration | Pattern; + right: Expression; + body: Statement; +} + +export interface ForInStatement extends BaseForXStatement { + type: "ForInStatement"; +} + +export interface DebuggerStatement extends BaseStatement { + type: "DebuggerStatement"; +} + +export type Declaration = FunctionDeclaration | VariableDeclaration | ClassDeclaration; + +export interface BaseDeclaration extends BaseStatement {} + +export interface MaybeNamedFunctionDeclaration extends BaseFunction, BaseDeclaration { + type: "FunctionDeclaration"; + /** It is null when a function declaration is a part of the `export default function` statement */ + id: Identifier | null; + body: BlockStatement; +} + +export interface FunctionDeclaration extends MaybeNamedFunctionDeclaration { + id: Identifier; +} + +export interface VariableDeclaration extends BaseDeclaration { + type: "VariableDeclaration"; + declarations: VariableDeclarator[]; + kind: "var" | "let" | "const"; +} + +export interface VariableDeclarator extends BaseNode { + type: "VariableDeclarator"; + id: Pattern; + init?: Expression | null | undefined; +} + +export interface ExpressionMap { + ArrayExpression: ArrayExpression; + ArrowFunctionExpression: ArrowFunctionExpression; + AssignmentExpression: AssignmentExpression; + AwaitExpression: AwaitExpression; + BinaryExpression: BinaryExpression; + CallExpression: CallExpression; + ChainExpression: ChainExpression; + ClassExpression: ClassExpression; + ConditionalExpression: ConditionalExpression; + FunctionExpression: FunctionExpression; + Identifier: Identifier; + ImportExpression: ImportExpression; + Literal: Literal; + LogicalExpression: LogicalExpression; + MemberExpression: MemberExpression; + MetaProperty: MetaProperty; + NewExpression: NewExpression; + ObjectExpression: ObjectExpression; + SequenceExpression: SequenceExpression; + TaggedTemplateExpression: TaggedTemplateExpression; + TemplateLiteral: TemplateLiteral; + ThisExpression: ThisExpression; + UnaryExpression: UnaryExpression; + UpdateExpression: UpdateExpression; + YieldExpression: YieldExpression; +} + +export type Expression = ExpressionMap[keyof ExpressionMap]; + +export interface BaseExpression extends BaseNode {} + +export type ChainElement = SimpleCallExpression | MemberExpression; + +export interface ChainExpression extends BaseExpression { + type: "ChainExpression"; + expression: ChainElement; +} + +export interface ThisExpression extends BaseExpression { + type: "ThisExpression"; +} + +export interface ArrayExpression extends BaseExpression { + type: "ArrayExpression"; + elements: Array; +} + +export interface ObjectExpression extends BaseExpression { + type: "ObjectExpression"; + properties: Array; +} + +export interface PrivateIdentifier extends BaseNode { + type: "PrivateIdentifier"; + name: string; +} + +export interface Property extends BaseNode { + type: "Property"; + key: Expression | PrivateIdentifier; + value: Expression | Pattern; // Could be an AssignmentProperty + kind: "init" | "get" | "set"; + method: boolean; + shorthand: boolean; + computed: boolean; +} + +export interface PropertyDefinition extends BaseNode { + type: "PropertyDefinition"; + key: Expression | PrivateIdentifier; + value?: Expression | null | undefined; + computed: boolean; + static: boolean; +} + +export interface FunctionExpression extends BaseFunction, BaseExpression { + id?: Identifier | null | undefined; + type: "FunctionExpression"; + body: BlockStatement; +} + +export interface SequenceExpression extends BaseExpression { + type: "SequenceExpression"; + expressions: Expression[]; +} + +export interface UnaryExpression extends BaseExpression { + type: "UnaryExpression"; + operator: UnaryOperator; + prefix: true; + argument: Expression; +} + +export interface BinaryExpression extends BaseExpression { + type: "BinaryExpression"; + operator: BinaryOperator; + left: Expression | PrivateIdentifier; + right: Expression; +} + +export interface AssignmentExpression extends BaseExpression { + type: "AssignmentExpression"; + operator: AssignmentOperator; + left: Pattern | MemberExpression; + right: Expression; +} + +export interface UpdateExpression extends BaseExpression { + type: "UpdateExpression"; + operator: UpdateOperator; + argument: Expression; + prefix: boolean; +} + +export interface LogicalExpression extends BaseExpression { + type: "LogicalExpression"; + operator: LogicalOperator; + left: Expression; + right: Expression; +} + +export interface ConditionalExpression extends BaseExpression { + type: "ConditionalExpression"; + test: Expression; + alternate: Expression; + consequent: Expression; +} + +export interface BaseCallExpression extends BaseExpression { + callee: Expression | Super; + arguments: Array; +} +export type CallExpression = SimpleCallExpression | NewExpression; + +export interface SimpleCallExpression extends BaseCallExpression { + type: "CallExpression"; + optional: boolean; +} + +export interface NewExpression extends BaseCallExpression { + type: "NewExpression"; +} + +export interface MemberExpression extends BaseExpression, BasePattern { + type: "MemberExpression"; + object: Expression | Super; + property: Expression | PrivateIdentifier; + computed: boolean; + optional: boolean; +} + +export type Pattern = Identifier | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | MemberExpression; + +export interface BasePattern extends BaseNode {} + +export interface SwitchCase extends BaseNode { + type: "SwitchCase"; + test?: Expression | null | undefined; + consequent: Statement[]; +} + +export interface CatchClause extends BaseNode { + type: "CatchClause"; + param: Pattern | null; + body: BlockStatement; +} + +export interface Identifier extends BaseNode, BaseExpression, BasePattern { + type: "Identifier"; + name: string; +} + +export type Literal = SimpleLiteral | RegExpLiteral | BigIntLiteral; + +export interface SimpleLiteral extends BaseNode, BaseExpression { + type: "Literal"; + value: string | boolean | number | null; + raw?: string | undefined; +} + +export interface RegExpLiteral extends BaseNode, BaseExpression { + type: "Literal"; + value?: RegExp | null | undefined; + regex: { + pattern: string; + flags: string; + }; + raw?: string | undefined; +} + +export interface BigIntLiteral extends BaseNode, BaseExpression { + type: "Literal"; + value?: bigint | null | undefined; + bigint: string; + raw?: string | undefined; +} + +export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"; + +export type BinaryOperator = + | "==" + | "!=" + | "===" + | "!==" + | "<" + | "<=" + | ">" + | ">=" + | "<<" + | ">>" + | ">>>" + | "+" + | "-" + | "*" + | "/" + | "%" + | "**" + | "|" + | "^" + | "&" + | "in" + | "instanceof"; + +export type LogicalOperator = "||" | "&&" | "??"; + +export type AssignmentOperator = + | "=" + | "+=" + | "-=" + | "*=" + | "/=" + | "%=" + | "**=" + | "<<=" + | ">>=" + | ">>>=" + | "|=" + | "^=" + | "&=" + | "||=" + | "&&=" + | "??="; + +export type UpdateOperator = "++" | "--"; + +export interface ForOfStatement extends BaseForXStatement { + type: "ForOfStatement"; + await: boolean; +} + +export interface Super extends BaseNode { + type: "Super"; +} + +export interface SpreadElement extends BaseNode { + type: "SpreadElement"; + argument: Expression; +} + +export interface ArrowFunctionExpression extends BaseExpression, BaseFunction { + type: "ArrowFunctionExpression"; + expression: boolean; + body: BlockStatement | Expression; +} + +export interface YieldExpression extends BaseExpression { + type: "YieldExpression"; + argument?: Expression | null | undefined; + delegate: boolean; +} + +export interface TemplateLiteral extends BaseExpression { + type: "TemplateLiteral"; + quasis: TemplateElement[]; + expressions: Expression[]; +} + +export interface TaggedTemplateExpression extends BaseExpression { + type: "TaggedTemplateExpression"; + tag: Expression; + quasi: TemplateLiteral; +} + +export interface TemplateElement extends BaseNode { + type: "TemplateElement"; + tail: boolean; + value: { + /** It is null when the template literal is tagged and the text has an invalid escape (e.g. - tag`\unicode and \u{55}`) */ + cooked?: string | null | undefined; + raw: string; + }; +} + +export interface AssignmentProperty extends Property { + value: Pattern; + kind: "init"; + method: boolean; // false +} + +export interface ObjectPattern extends BasePattern { + type: "ObjectPattern"; + properties: Array; +} + +export interface ArrayPattern extends BasePattern { + type: "ArrayPattern"; + elements: Array; +} + +export interface RestElement extends BasePattern { + type: "RestElement"; + argument: Pattern; +} + +export interface AssignmentPattern extends BasePattern { + type: "AssignmentPattern"; + left: Pattern; + right: Expression; +} + +export type Class = ClassDeclaration | ClassExpression; +export interface BaseClass extends BaseNode { + superClass?: Expression | null | undefined; + body: ClassBody; +} + +export interface ClassBody extends BaseNode { + type: "ClassBody"; + body: Array; +} + +export interface MethodDefinition extends BaseNode { + type: "MethodDefinition"; + key: Expression | PrivateIdentifier; + value: FunctionExpression; + kind: "constructor" | "method" | "get" | "set"; + computed: boolean; + static: boolean; +} + +export interface MaybeNamedClassDeclaration extends BaseClass, BaseDeclaration { + type: "ClassDeclaration"; + /** It is null when a class declaration is a part of the `export default class` statement */ + id: Identifier | null; +} + +export interface ClassDeclaration extends MaybeNamedClassDeclaration { + id: Identifier; +} + +export interface ClassExpression extends BaseClass, BaseExpression { + type: "ClassExpression"; + id?: Identifier | null | undefined; +} + +export interface MetaProperty extends BaseExpression { + type: "MetaProperty"; + meta: Identifier; + property: Identifier; +} + +export type ModuleDeclaration = + | ImportDeclaration + | ExportNamedDeclaration + | ExportDefaultDeclaration + | ExportAllDeclaration; +export interface BaseModuleDeclaration extends BaseNode {} + +export type ModuleSpecifier = ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier; +export interface BaseModuleSpecifier extends BaseNode { + local: Identifier; +} + +export interface ImportDeclaration extends BaseModuleDeclaration { + type: "ImportDeclaration"; + specifiers: Array; + attributes: ImportAttribute[]; + source: Literal; +} + +export interface ImportSpecifier extends BaseModuleSpecifier { + type: "ImportSpecifier"; + imported: Identifier | Literal; +} + +export interface ImportAttribute extends BaseNode { + type: "ImportAttribute"; + key: Identifier | Literal; + value: Literal; +} + +export interface ImportExpression extends BaseExpression { + type: "ImportExpression"; + source: Expression; + options?: Expression | null | undefined; +} + +export interface ImportDefaultSpecifier extends BaseModuleSpecifier { + type: "ImportDefaultSpecifier"; +} + +export interface ImportNamespaceSpecifier extends BaseModuleSpecifier { + type: "ImportNamespaceSpecifier"; +} + +export interface ExportNamedDeclaration extends BaseModuleDeclaration { + type: "ExportNamedDeclaration"; + declaration?: Declaration | null | undefined; + specifiers: ExportSpecifier[]; + attributes: ImportAttribute[]; + source?: Literal | null | undefined; +} + +export interface ExportSpecifier extends Omit { + type: "ExportSpecifier"; + local: Identifier | Literal; + exported: Identifier | Literal; +} + +export interface ExportDefaultDeclaration extends BaseModuleDeclaration { + type: "ExportDefaultDeclaration"; + declaration: MaybeNamedFunctionDeclaration | MaybeNamedClassDeclaration | Expression; +} + +export interface ExportAllDeclaration extends BaseModuleDeclaration { + type: "ExportAllDeclaration"; + exported: Identifier | Literal | null; + attributes: ImportAttribute[]; + source: Literal; +} + +export interface AwaitExpression extends BaseExpression { + type: "AwaitExpression"; + argument: Expression; +} diff --git a/node_modules/@types/estree/package.json b/node_modules/@types/estree/package.json new file mode 100644 index 0000000..61ce23a --- /dev/null +++ b/node_modules/@types/estree/package.json @@ -0,0 +1,27 @@ +{ + "name": "@types/estree", + "version": "1.0.7", + "description": "TypeScript definitions for estree", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/estree", + "license": "MIT", + "contributors": [ + { + "name": "RReverser", + "githubUsername": "RReverser", + "url": "https://github.com/RReverser" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/estree" + }, + "scripts": {}, + "dependencies": {}, + "peerDependencies": {}, + "typesPublisherContentHash": "1ab11f4e78319f80655b4ca20a073da0dc035be5f3290fb0bfa1e08a055d0c7d", + "typeScriptVersion": "5.0", + "nonNpm": true +} \ No newline at end of file diff --git a/node_modules/esbuild/LICENSE.md b/node_modules/esbuild/LICENSE.md new file mode 100644 index 0000000..2027e8d --- /dev/null +++ b/node_modules/esbuild/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Evan Wallace + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/esbuild/README.md b/node_modules/esbuild/README.md new file mode 100644 index 0000000..93863d1 --- /dev/null +++ b/node_modules/esbuild/README.md @@ -0,0 +1,3 @@ +# esbuild + +This is a JavaScript bundler and minifier. See https://github.com/evanw/esbuild and the [JavaScript API documentation](https://esbuild.github.io/api/) for details. diff --git a/node_modules/esbuild/bin/esbuild b/node_modules/esbuild/bin/esbuild new file mode 100644 index 0000000..731b8af --- /dev/null +++ b/node_modules/esbuild/bin/esbuild @@ -0,0 +1,222 @@ +#!/usr/bin/env node +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// lib/npm/node-platform.ts +var fs = require("fs"); +var os = require("os"); +var path = require("path"); +var ESBUILD_BINARY_PATH = process.env.ESBUILD_BINARY_PATH || ESBUILD_BINARY_PATH; +var isValidBinaryPath = (x) => !!x && x !== "/usr/bin/esbuild"; +var packageDarwin_arm64 = "@esbuild/darwin-arm64"; +var packageDarwin_x64 = "@esbuild/darwin-x64"; +var knownWindowsPackages = { + "win32 arm64 LE": "@esbuild/win32-arm64", + "win32 ia32 LE": "@esbuild/win32-ia32", + "win32 x64 LE": "@esbuild/win32-x64" +}; +var knownUnixlikePackages = { + "aix ppc64 BE": "@esbuild/aix-ppc64", + "android arm64 LE": "@esbuild/android-arm64", + "darwin arm64 LE": "@esbuild/darwin-arm64", + "darwin x64 LE": "@esbuild/darwin-x64", + "freebsd arm64 LE": "@esbuild/freebsd-arm64", + "freebsd x64 LE": "@esbuild/freebsd-x64", + "linux arm LE": "@esbuild/linux-arm", + "linux arm64 LE": "@esbuild/linux-arm64", + "linux ia32 LE": "@esbuild/linux-ia32", + "linux mips64el LE": "@esbuild/linux-mips64el", + "linux ppc64 LE": "@esbuild/linux-ppc64", + "linux riscv64 LE": "@esbuild/linux-riscv64", + "linux s390x BE": "@esbuild/linux-s390x", + "linux x64 LE": "@esbuild/linux-x64", + "linux loong64 LE": "@esbuild/linux-loong64", + "netbsd arm64 LE": "@esbuild/netbsd-arm64", + "netbsd x64 LE": "@esbuild/netbsd-x64", + "openbsd arm64 LE": "@esbuild/openbsd-arm64", + "openbsd x64 LE": "@esbuild/openbsd-x64", + "sunos x64 LE": "@esbuild/sunos-x64" +}; +var knownWebAssemblyFallbackPackages = { + "android arm LE": "@esbuild/android-arm", + "android x64 LE": "@esbuild/android-x64" +}; +function pkgAndSubpathForCurrentPlatform() { + let pkg; + let subpath; + let isWASM2 = false; + let platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`; + if (platformKey in knownWindowsPackages) { + pkg = knownWindowsPackages[platformKey]; + subpath = "esbuild.exe"; + } else if (platformKey in knownUnixlikePackages) { + pkg = knownUnixlikePackages[platformKey]; + subpath = "bin/esbuild"; + } else if (platformKey in knownWebAssemblyFallbackPackages) { + pkg = knownWebAssemblyFallbackPackages[platformKey]; + subpath = "bin/esbuild"; + isWASM2 = true; + } else { + throw new Error(`Unsupported platform: ${platformKey}`); + } + return { pkg, subpath, isWASM: isWASM2 }; +} +function pkgForSomeOtherPlatform() { + const libMainJS = require.resolve("esbuild"); + const nodeModulesDirectory = path.dirname(path.dirname(path.dirname(libMainJS))); + if (path.basename(nodeModulesDirectory) === "node_modules") { + for (const unixKey in knownUnixlikePackages) { + try { + const pkg = knownUnixlikePackages[unixKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + for (const windowsKey in knownWindowsPackages) { + try { + const pkg = knownWindowsPackages[windowsKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + } + return null; +} +function downloadedBinPath(pkg, subpath) { + const esbuildLibDir = path.dirname(require.resolve("esbuild")); + return path.join(esbuildLibDir, `downloaded-${pkg.replace("/", "-")}-${path.basename(subpath)}`); +} +function generateBinPath() { + if (isValidBinaryPath(ESBUILD_BINARY_PATH)) { + if (!fs.existsSync(ESBUILD_BINARY_PATH)) { + console.warn(`[esbuild] Ignoring bad configuration: ESBUILD_BINARY_PATH=${ESBUILD_BINARY_PATH}`); + } else { + return { binPath: ESBUILD_BINARY_PATH, isWASM: false }; + } + } + const { pkg, subpath, isWASM: isWASM2 } = pkgAndSubpathForCurrentPlatform(); + let binPath2; + try { + binPath2 = require.resolve(`${pkg}/${subpath}`); + } catch (e) { + binPath2 = downloadedBinPath(pkg, subpath); + if (!fs.existsSync(binPath2)) { + try { + require.resolve(pkg); + } catch { + const otherPkg = pkgForSomeOtherPlatform(); + if (otherPkg) { + let suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild on Windows or macOS and copying "node_modules" +into a Docker image that runs Linux, or by copying "node_modules" between +Windows and WSL environments. + +If you are installing with npm, you can try not copying the "node_modules" +directory when you copy the files over, and running "npm ci" or "npm install" +on the destination platform after the copy. Or you could consider using yarn +instead of npm which has built-in support for installing a package on multiple +platforms simultaneously. + +If you are installing with yarn, you can try listing both this platform and the +other platform in your ".yarnrc.yml" file using the "supportedArchitectures" +feature: https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + if (pkg === packageDarwin_x64 && otherPkg === packageDarwin_arm64 || pkg === packageDarwin_arm64 && otherPkg === packageDarwin_x64) { + suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild with npm running inside of Rosetta 2 and then +trying to use it with node running outside of Rosetta 2, or vice versa (Rosetta +2 is Apple's on-the-fly x86_64-to-arm64 translation service). + +If you are installing with npm, you can try ensuring that both npm and node are +not running under Rosetta 2 and then reinstalling esbuild. This likely involves +changing how you installed npm and/or node. For example, installing node with +the universal installer here should work: https://nodejs.org/en/download/. Or +you could consider using yarn instead of npm which has built-in support for +installing a package on multiple platforms simultaneously. + +If you are installing with yarn, you can try listing both "arm64" and "x64" +in your ".yarnrc.yml" file using the "supportedArchitectures" feature: +https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + } + throw new Error(` +You installed esbuild for another platform than the one you're currently using. +This won't work because esbuild is written with native code and needs to +install a platform-specific binary executable. +${suggestions} +Another alternative is to use the "esbuild-wasm" package instead, which works +the same way on all platforms. But it comes with a heavy performance cost and +can sometimes be 10x slower than the "esbuild" package, so you may also not +want to do that. +`); + } + throw new Error(`The package "${pkg}" could not be found, and is needed by esbuild. + +If you are installing esbuild with npm, make sure that you don't specify the +"--no-optional" or "--omit=optional" flags. The "optionalDependencies" feature +of "package.json" is used by esbuild to install the correct binary executable +for your current platform.`); + } + throw e; + } + } + if (/\.zip\//.test(binPath2)) { + let pnpapi; + try { + pnpapi = require("pnpapi"); + } catch (e) { + } + if (pnpapi) { + const root = pnpapi.getPackageInformation(pnpapi.topLevel).packageLocation; + const binTargetPath = path.join( + root, + "node_modules", + ".cache", + "esbuild", + `pnpapi-${pkg.replace("/", "-")}-${"0.25.5"}-${path.basename(subpath)}` + ); + if (!fs.existsSync(binTargetPath)) { + fs.mkdirSync(path.dirname(binTargetPath), { recursive: true }); + fs.copyFileSync(binPath2, binTargetPath); + fs.chmodSync(binTargetPath, 493); + } + return { binPath: binTargetPath, isWASM: isWASM2 }; + } + } + return { binPath: binPath2, isWASM: isWASM2 }; +} + +// lib/npm/node-shim.ts +var { binPath, isWASM } = generateBinPath(); +if (isWASM) { + require("child_process").execFileSync("node", [binPath].concat(process.argv.slice(2)), { stdio: "inherit" }); +} else { + require("child_process").execFileSync(binPath, process.argv.slice(2), { stdio: "inherit" }); +} diff --git a/node_modules/esbuild/install.js b/node_modules/esbuild/install.js new file mode 100644 index 0000000..5954864 --- /dev/null +++ b/node_modules/esbuild/install.js @@ -0,0 +1,287 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// lib/npm/node-platform.ts +var fs = require("fs"); +var os = require("os"); +var path = require("path"); +var ESBUILD_BINARY_PATH = process.env.ESBUILD_BINARY_PATH || ESBUILD_BINARY_PATH; +var isValidBinaryPath = (x) => !!x && x !== "/usr/bin/esbuild"; +var knownWindowsPackages = { + "win32 arm64 LE": "@esbuild/win32-arm64", + "win32 ia32 LE": "@esbuild/win32-ia32", + "win32 x64 LE": "@esbuild/win32-x64" +}; +var knownUnixlikePackages = { + "aix ppc64 BE": "@esbuild/aix-ppc64", + "android arm64 LE": "@esbuild/android-arm64", + "darwin arm64 LE": "@esbuild/darwin-arm64", + "darwin x64 LE": "@esbuild/darwin-x64", + "freebsd arm64 LE": "@esbuild/freebsd-arm64", + "freebsd x64 LE": "@esbuild/freebsd-x64", + "linux arm LE": "@esbuild/linux-arm", + "linux arm64 LE": "@esbuild/linux-arm64", + "linux ia32 LE": "@esbuild/linux-ia32", + "linux mips64el LE": "@esbuild/linux-mips64el", + "linux ppc64 LE": "@esbuild/linux-ppc64", + "linux riscv64 LE": "@esbuild/linux-riscv64", + "linux s390x BE": "@esbuild/linux-s390x", + "linux x64 LE": "@esbuild/linux-x64", + "linux loong64 LE": "@esbuild/linux-loong64", + "netbsd arm64 LE": "@esbuild/netbsd-arm64", + "netbsd x64 LE": "@esbuild/netbsd-x64", + "openbsd arm64 LE": "@esbuild/openbsd-arm64", + "openbsd x64 LE": "@esbuild/openbsd-x64", + "sunos x64 LE": "@esbuild/sunos-x64" +}; +var knownWebAssemblyFallbackPackages = { + "android arm LE": "@esbuild/android-arm", + "android x64 LE": "@esbuild/android-x64" +}; +function pkgAndSubpathForCurrentPlatform() { + let pkg; + let subpath; + let isWASM = false; + let platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`; + if (platformKey in knownWindowsPackages) { + pkg = knownWindowsPackages[platformKey]; + subpath = "esbuild.exe"; + } else if (platformKey in knownUnixlikePackages) { + pkg = knownUnixlikePackages[platformKey]; + subpath = "bin/esbuild"; + } else if (platformKey in knownWebAssemblyFallbackPackages) { + pkg = knownWebAssemblyFallbackPackages[platformKey]; + subpath = "bin/esbuild"; + isWASM = true; + } else { + throw new Error(`Unsupported platform: ${platformKey}`); + } + return { pkg, subpath, isWASM }; +} +function downloadedBinPath(pkg, subpath) { + const esbuildLibDir = path.dirname(require.resolve("esbuild")); + return path.join(esbuildLibDir, `downloaded-${pkg.replace("/", "-")}-${path.basename(subpath)}`); +} + +// lib/npm/node-install.ts +var fs2 = require("fs"); +var os2 = require("os"); +var path2 = require("path"); +var zlib = require("zlib"); +var https = require("https"); +var child_process = require("child_process"); +var versionFromPackageJSON = require(path2.join(__dirname, "package.json")).version; +var toPath = path2.join(__dirname, "bin", "esbuild"); +var isToPathJS = true; +function validateBinaryVersion(...command) { + command.push("--version"); + let stdout; + try { + stdout = child_process.execFileSync(command.shift(), command, { + // Without this, this install script strangely crashes with the error + // "EACCES: permission denied, write" but only on Ubuntu Linux when node is + // installed from the Snap Store. This is not a problem when you download + // the official version of node. The problem appears to be that stderr + // (i.e. file descriptor 2) isn't writable? + // + // More info: + // - https://snapcraft.io/ (what the Snap Store is) + // - https://nodejs.org/dist/ (download the official version of node) + // - https://github.com/evanw/esbuild/issues/1711#issuecomment-1027554035 + // + stdio: "pipe" + }).toString().trim(); + } catch (err) { + if (os2.platform() === "darwin" && /_SecTrustEvaluateWithError/.test(err + "")) { + let os3 = "this version of macOS"; + try { + os3 = "macOS " + child_process.execFileSync("sw_vers", ["-productVersion"]).toString().trim(); + } catch { + } + throw new Error(`The "esbuild" package cannot be installed because ${os3} is too outdated. + +The Go compiler (which esbuild relies on) no longer supports ${os3}, +which means the "esbuild" binary executable can't be run. You can either: + + * Update your version of macOS to one that the Go compiler supports + * Use the "esbuild-wasm" package instead of the "esbuild" package + * Build esbuild yourself using an older version of the Go compiler +`); + } + throw err; + } + if (stdout !== versionFromPackageJSON) { + throw new Error(`Expected ${JSON.stringify(versionFromPackageJSON)} but got ${JSON.stringify(stdout)}`); + } +} +function isYarn() { + const { npm_config_user_agent } = process.env; + if (npm_config_user_agent) { + return /\byarn\//.test(npm_config_user_agent); + } + return false; +} +function fetch(url) { + return new Promise((resolve, reject) => { + https.get(url, (res) => { + if ((res.statusCode === 301 || res.statusCode === 302) && res.headers.location) + return fetch(res.headers.location).then(resolve, reject); + if (res.statusCode !== 200) + return reject(new Error(`Server responded with ${res.statusCode}`)); + let chunks = []; + res.on("data", (chunk) => chunks.push(chunk)); + res.on("end", () => resolve(Buffer.concat(chunks))); + }).on("error", reject); + }); +} +function extractFileFromTarGzip(buffer, subpath) { + try { + buffer = zlib.unzipSync(buffer); + } catch (err) { + throw new Error(`Invalid gzip data in archive: ${err && err.message || err}`); + } + let str = (i, n) => String.fromCharCode(...buffer.subarray(i, i + n)).replace(/\0.*$/, ""); + let offset = 0; + subpath = `package/${subpath}`; + while (offset < buffer.length) { + let name = str(offset, 100); + let size = parseInt(str(offset + 124, 12), 8); + offset += 512; + if (!isNaN(size)) { + if (name === subpath) return buffer.subarray(offset, offset + size); + offset += size + 511 & ~511; + } + } + throw new Error(`Could not find ${JSON.stringify(subpath)} in archive`); +} +function installUsingNPM(pkg, subpath, binPath) { + const env = { ...process.env, npm_config_global: void 0 }; + const esbuildLibDir = path2.dirname(require.resolve("esbuild")); + const installDir = path2.join(esbuildLibDir, "npm-install"); + fs2.mkdirSync(installDir); + try { + fs2.writeFileSync(path2.join(installDir, "package.json"), "{}"); + child_process.execSync( + `npm install --loglevel=error --prefer-offline --no-audit --progress=false ${pkg}@${versionFromPackageJSON}`, + { cwd: installDir, stdio: "pipe", env } + ); + const installedBinPath = path2.join(installDir, "node_modules", pkg, subpath); + fs2.renameSync(installedBinPath, binPath); + } finally { + try { + removeRecursive(installDir); + } catch { + } + } +} +function removeRecursive(dir) { + for (const entry of fs2.readdirSync(dir)) { + const entryPath = path2.join(dir, entry); + let stats; + try { + stats = fs2.lstatSync(entryPath); + } catch { + continue; + } + if (stats.isDirectory()) removeRecursive(entryPath); + else fs2.unlinkSync(entryPath); + } + fs2.rmdirSync(dir); +} +function applyManualBinaryPathOverride(overridePath) { + const pathString = JSON.stringify(overridePath); + fs2.writeFileSync(toPath, `#!/usr/bin/env node +require('child_process').execFileSync(${pathString}, process.argv.slice(2), { stdio: 'inherit' }); +`); + const libMain = path2.join(__dirname, "lib", "main.js"); + const code = fs2.readFileSync(libMain, "utf8"); + fs2.writeFileSync(libMain, `var ESBUILD_BINARY_PATH = ${pathString}; +${code}`); +} +function maybeOptimizePackage(binPath) { + if (os2.platform() !== "win32" && !isYarn()) { + const tempPath = path2.join(__dirname, "bin-esbuild"); + try { + fs2.linkSync(binPath, tempPath); + fs2.renameSync(tempPath, toPath); + isToPathJS = false; + fs2.unlinkSync(tempPath); + } catch { + } + } +} +async function downloadDirectlyFromNPM(pkg, subpath, binPath) { + const url = `https://registry.npmjs.org/${pkg}/-/${pkg.replace("@esbuild/", "")}-${versionFromPackageJSON}.tgz`; + console.error(`[esbuild] Trying to download ${JSON.stringify(url)}`); + try { + fs2.writeFileSync(binPath, extractFileFromTarGzip(await fetch(url), subpath)); + fs2.chmodSync(binPath, 493); + } catch (e) { + console.error(`[esbuild] Failed to download ${JSON.stringify(url)}: ${e && e.message || e}`); + throw e; + } +} +async function checkAndPreparePackage() { + if (isValidBinaryPath(ESBUILD_BINARY_PATH)) { + if (!fs2.existsSync(ESBUILD_BINARY_PATH)) { + console.warn(`[esbuild] Ignoring bad configuration: ESBUILD_BINARY_PATH=${ESBUILD_BINARY_PATH}`); + } else { + applyManualBinaryPathOverride(ESBUILD_BINARY_PATH); + return; + } + } + const { pkg, subpath } = pkgAndSubpathForCurrentPlatform(); + let binPath; + try { + binPath = require.resolve(`${pkg}/${subpath}`); + } catch (e) { + console.error(`[esbuild] Failed to find package "${pkg}" on the file system + +This can happen if you use the "--no-optional" flag. The "optionalDependencies" +package.json feature is used by esbuild to install the correct binary executable +for your current platform. This install script will now attempt to work around +this. If that fails, you need to remove the "--no-optional" flag to use esbuild. +`); + binPath = downloadedBinPath(pkg, subpath); + try { + console.error(`[esbuild] Trying to install package "${pkg}" using npm`); + installUsingNPM(pkg, subpath, binPath); + } catch (e2) { + console.error(`[esbuild] Failed to install package "${pkg}" using npm: ${e2 && e2.message || e2}`); + try { + await downloadDirectlyFromNPM(pkg, subpath, binPath); + } catch (e3) { + throw new Error(`Failed to install package "${pkg}"`); + } + } + } + maybeOptimizePackage(binPath); +} +checkAndPreparePackage().then(() => { + if (isToPathJS) { + validateBinaryVersion(process.execPath, toPath); + } else { + validateBinaryVersion(toPath); + } +}); diff --git a/node_modules/esbuild/lib/main.d.ts b/node_modules/esbuild/lib/main.d.ts new file mode 100644 index 0000000..424b302 --- /dev/null +++ b/node_modules/esbuild/lib/main.d.ts @@ -0,0 +1,711 @@ +export type Platform = 'browser' | 'node' | 'neutral' +export type Format = 'iife' | 'cjs' | 'esm' +export type Loader = 'base64' | 'binary' | 'copy' | 'css' | 'dataurl' | 'default' | 'empty' | 'file' | 'js' | 'json' | 'jsx' | 'local-css' | 'text' | 'ts' | 'tsx' +export type LogLevel = 'verbose' | 'debug' | 'info' | 'warning' | 'error' | 'silent' +export type Charset = 'ascii' | 'utf8' +export type Drop = 'console' | 'debugger' + +interface CommonOptions { + /** Documentation: https://esbuild.github.io/api/#sourcemap */ + sourcemap?: boolean | 'linked' | 'inline' | 'external' | 'both' + /** Documentation: https://esbuild.github.io/api/#legal-comments */ + legalComments?: 'none' | 'inline' | 'eof' | 'linked' | 'external' + /** Documentation: https://esbuild.github.io/api/#source-root */ + sourceRoot?: string + /** Documentation: https://esbuild.github.io/api/#sources-content */ + sourcesContent?: boolean + + /** Documentation: https://esbuild.github.io/api/#format */ + format?: Format + /** Documentation: https://esbuild.github.io/api/#global-name */ + globalName?: string + /** Documentation: https://esbuild.github.io/api/#target */ + target?: string | string[] + /** Documentation: https://esbuild.github.io/api/#supported */ + supported?: Record + /** Documentation: https://esbuild.github.io/api/#platform */ + platform?: Platform + + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + mangleProps?: RegExp + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + reserveProps?: RegExp + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + mangleQuoted?: boolean + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + mangleCache?: Record + /** Documentation: https://esbuild.github.io/api/#drop */ + drop?: Drop[] + /** Documentation: https://esbuild.github.io/api/#drop-labels */ + dropLabels?: string[] + /** Documentation: https://esbuild.github.io/api/#minify */ + minify?: boolean + /** Documentation: https://esbuild.github.io/api/#minify */ + minifyWhitespace?: boolean + /** Documentation: https://esbuild.github.io/api/#minify */ + minifyIdentifiers?: boolean + /** Documentation: https://esbuild.github.io/api/#minify */ + minifySyntax?: boolean + /** Documentation: https://esbuild.github.io/api/#line-limit */ + lineLimit?: number + /** Documentation: https://esbuild.github.io/api/#charset */ + charset?: Charset + /** Documentation: https://esbuild.github.io/api/#tree-shaking */ + treeShaking?: boolean + /** Documentation: https://esbuild.github.io/api/#ignore-annotations */ + ignoreAnnotations?: boolean + + /** Documentation: https://esbuild.github.io/api/#jsx */ + jsx?: 'transform' | 'preserve' | 'automatic' + /** Documentation: https://esbuild.github.io/api/#jsx-factory */ + jsxFactory?: string + /** Documentation: https://esbuild.github.io/api/#jsx-fragment */ + jsxFragment?: string + /** Documentation: https://esbuild.github.io/api/#jsx-import-source */ + jsxImportSource?: string + /** Documentation: https://esbuild.github.io/api/#jsx-development */ + jsxDev?: boolean + /** Documentation: https://esbuild.github.io/api/#jsx-side-effects */ + jsxSideEffects?: boolean + + /** Documentation: https://esbuild.github.io/api/#define */ + define?: { [key: string]: string } + /** Documentation: https://esbuild.github.io/api/#pure */ + pure?: string[] + /** Documentation: https://esbuild.github.io/api/#keep-names */ + keepNames?: boolean + + /** Documentation: https://esbuild.github.io/api/#color */ + color?: boolean + /** Documentation: https://esbuild.github.io/api/#log-level */ + logLevel?: LogLevel + /** Documentation: https://esbuild.github.io/api/#log-limit */ + logLimit?: number + /** Documentation: https://esbuild.github.io/api/#log-override */ + logOverride?: Record + + /** Documentation: https://esbuild.github.io/api/#tsconfig-raw */ + tsconfigRaw?: string | TsconfigRaw +} + +export interface TsconfigRaw { + compilerOptions?: { + alwaysStrict?: boolean + baseUrl?: string + experimentalDecorators?: boolean + importsNotUsedAsValues?: 'remove' | 'preserve' | 'error' + jsx?: 'preserve' | 'react-native' | 'react' | 'react-jsx' | 'react-jsxdev' + jsxFactory?: string + jsxFragmentFactory?: string + jsxImportSource?: string + paths?: Record + preserveValueImports?: boolean + strict?: boolean + target?: string + useDefineForClassFields?: boolean + verbatimModuleSyntax?: boolean + } +} + +export interface BuildOptions extends CommonOptions { + /** Documentation: https://esbuild.github.io/api/#bundle */ + bundle?: boolean + /** Documentation: https://esbuild.github.io/api/#splitting */ + splitting?: boolean + /** Documentation: https://esbuild.github.io/api/#preserve-symlinks */ + preserveSymlinks?: boolean + /** Documentation: https://esbuild.github.io/api/#outfile */ + outfile?: string + /** Documentation: https://esbuild.github.io/api/#metafile */ + metafile?: boolean + /** Documentation: https://esbuild.github.io/api/#outdir */ + outdir?: string + /** Documentation: https://esbuild.github.io/api/#outbase */ + outbase?: string + /** Documentation: https://esbuild.github.io/api/#external */ + external?: string[] + /** Documentation: https://esbuild.github.io/api/#packages */ + packages?: 'bundle' | 'external' + /** Documentation: https://esbuild.github.io/api/#alias */ + alias?: Record + /** Documentation: https://esbuild.github.io/api/#loader */ + loader?: { [ext: string]: Loader } + /** Documentation: https://esbuild.github.io/api/#resolve-extensions */ + resolveExtensions?: string[] + /** Documentation: https://esbuild.github.io/api/#main-fields */ + mainFields?: string[] + /** Documentation: https://esbuild.github.io/api/#conditions */ + conditions?: string[] + /** Documentation: https://esbuild.github.io/api/#write */ + write?: boolean + /** Documentation: https://esbuild.github.io/api/#allow-overwrite */ + allowOverwrite?: boolean + /** Documentation: https://esbuild.github.io/api/#tsconfig */ + tsconfig?: string + /** Documentation: https://esbuild.github.io/api/#out-extension */ + outExtension?: { [ext: string]: string } + /** Documentation: https://esbuild.github.io/api/#public-path */ + publicPath?: string + /** Documentation: https://esbuild.github.io/api/#entry-names */ + entryNames?: string + /** Documentation: https://esbuild.github.io/api/#chunk-names */ + chunkNames?: string + /** Documentation: https://esbuild.github.io/api/#asset-names */ + assetNames?: string + /** Documentation: https://esbuild.github.io/api/#inject */ + inject?: string[] + /** Documentation: https://esbuild.github.io/api/#banner */ + banner?: { [type: string]: string } + /** Documentation: https://esbuild.github.io/api/#footer */ + footer?: { [type: string]: string } + /** Documentation: https://esbuild.github.io/api/#entry-points */ + entryPoints?: string[] | Record | { in: string, out: string }[] + /** Documentation: https://esbuild.github.io/api/#stdin */ + stdin?: StdinOptions + /** Documentation: https://esbuild.github.io/plugins/ */ + plugins?: Plugin[] + /** Documentation: https://esbuild.github.io/api/#working-directory */ + absWorkingDir?: string + /** Documentation: https://esbuild.github.io/api/#node-paths */ + nodePaths?: string[]; // The "NODE_PATH" variable from Node.js +} + +export interface StdinOptions { + contents: string | Uint8Array + resolveDir?: string + sourcefile?: string + loader?: Loader +} + +export interface Message { + id: string + pluginName: string + text: string + location: Location | null + notes: Note[] + + /** + * Optional user-specified data that is passed through unmodified. You can + * use this to stash the original error, for example. + */ + detail: any +} + +export interface Note { + text: string + location: Location | null +} + +export interface Location { + file: string + namespace: string + /** 1-based */ + line: number + /** 0-based, in bytes */ + column: number + /** in bytes */ + length: number + lineText: string + suggestion: string +} + +export interface OutputFile { + path: string + contents: Uint8Array + hash: string + /** "contents" as text (changes automatically with "contents") */ + readonly text: string +} + +export interface BuildResult { + errors: Message[] + warnings: Message[] + /** Only when "write: false" */ + outputFiles: OutputFile[] | (ProvidedOptions['write'] extends false ? never : undefined) + /** Only when "metafile: true" */ + metafile: Metafile | (ProvidedOptions['metafile'] extends true ? never : undefined) + /** Only when "mangleCache" is present */ + mangleCache: Record | (ProvidedOptions['mangleCache'] extends Object ? never : undefined) +} + +export interface BuildFailure extends Error { + errors: Message[] + warnings: Message[] +} + +/** Documentation: https://esbuild.github.io/api/#serve-arguments */ +export interface ServeOptions { + port?: number + host?: string + servedir?: string + keyfile?: string + certfile?: string + fallback?: string + cors?: CORSOptions + onRequest?: (args: ServeOnRequestArgs) => void +} + +/** Documentation: https://esbuild.github.io/api/#cors */ +export interface CORSOptions { + origin?: string | string[] +} + +export interface ServeOnRequestArgs { + remoteAddress: string + method: string + path: string + status: number + /** The time to generate the response, not to send it */ + timeInMS: number +} + +/** Documentation: https://esbuild.github.io/api/#serve-return-values */ +export interface ServeResult { + port: number + hosts: string[] +} + +export interface TransformOptions extends CommonOptions { + /** Documentation: https://esbuild.github.io/api/#sourcefile */ + sourcefile?: string + /** Documentation: https://esbuild.github.io/api/#loader */ + loader?: Loader + /** Documentation: https://esbuild.github.io/api/#banner */ + banner?: string + /** Documentation: https://esbuild.github.io/api/#footer */ + footer?: string +} + +export interface TransformResult { + code: string + map: string + warnings: Message[] + /** Only when "mangleCache" is present */ + mangleCache: Record | (ProvidedOptions['mangleCache'] extends Object ? never : undefined) + /** Only when "legalComments" is "external" */ + legalComments: string | (ProvidedOptions['legalComments'] extends 'external' ? never : undefined) +} + +export interface TransformFailure extends Error { + errors: Message[] + warnings: Message[] +} + +export interface Plugin { + name: string + setup: (build: PluginBuild) => (void | Promise) +} + +export interface PluginBuild { + /** Documentation: https://esbuild.github.io/plugins/#build-options */ + initialOptions: BuildOptions + + /** Documentation: https://esbuild.github.io/plugins/#resolve */ + resolve(path: string, options?: ResolveOptions): Promise + + /** Documentation: https://esbuild.github.io/plugins/#on-start */ + onStart(callback: () => + (OnStartResult | null | void | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-end */ + onEnd(callback: (result: BuildResult) => + (OnEndResult | null | void | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-resolve */ + onResolve(options: OnResolveOptions, callback: (args: OnResolveArgs) => + (OnResolveResult | null | undefined | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-load */ + onLoad(options: OnLoadOptions, callback: (args: OnLoadArgs) => + (OnLoadResult | null | undefined | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-dispose */ + onDispose(callback: () => void): void + + // This is a full copy of the esbuild library in case you need it + esbuild: { + context: typeof context, + build: typeof build, + buildSync: typeof buildSync, + transform: typeof transform, + transformSync: typeof transformSync, + formatMessages: typeof formatMessages, + formatMessagesSync: typeof formatMessagesSync, + analyzeMetafile: typeof analyzeMetafile, + analyzeMetafileSync: typeof analyzeMetafileSync, + initialize: typeof initialize, + version: typeof version, + } +} + +/** Documentation: https://esbuild.github.io/plugins/#resolve-options */ +export interface ResolveOptions { + pluginName?: string + importer?: string + namespace?: string + resolveDir?: string + kind?: ImportKind + pluginData?: any + with?: Record +} + +/** Documentation: https://esbuild.github.io/plugins/#resolve-results */ +export interface ResolveResult { + errors: Message[] + warnings: Message[] + + path: string + external: boolean + sideEffects: boolean + namespace: string + suffix: string + pluginData: any +} + +export interface OnStartResult { + errors?: PartialMessage[] + warnings?: PartialMessage[] +} + +export interface OnEndResult { + errors?: PartialMessage[] + warnings?: PartialMessage[] +} + +/** Documentation: https://esbuild.github.io/plugins/#on-resolve-options */ +export interface OnResolveOptions { + filter: RegExp + namespace?: string +} + +/** Documentation: https://esbuild.github.io/plugins/#on-resolve-arguments */ +export interface OnResolveArgs { + path: string + importer: string + namespace: string + resolveDir: string + kind: ImportKind + pluginData: any + with: Record +} + +export type ImportKind = + | 'entry-point' + + // JS + | 'import-statement' + | 'require-call' + | 'dynamic-import' + | 'require-resolve' + + // CSS + | 'import-rule' + | 'composes-from' + | 'url-token' + +/** Documentation: https://esbuild.github.io/plugins/#on-resolve-results */ +export interface OnResolveResult { + pluginName?: string + + errors?: PartialMessage[] + warnings?: PartialMessage[] + + path?: string + external?: boolean + sideEffects?: boolean + namespace?: string + suffix?: string + pluginData?: any + + watchFiles?: string[] + watchDirs?: string[] +} + +/** Documentation: https://esbuild.github.io/plugins/#on-load-options */ +export interface OnLoadOptions { + filter: RegExp + namespace?: string +} + +/** Documentation: https://esbuild.github.io/plugins/#on-load-arguments */ +export interface OnLoadArgs { + path: string + namespace: string + suffix: string + pluginData: any + with: Record +} + +/** Documentation: https://esbuild.github.io/plugins/#on-load-results */ +export interface OnLoadResult { + pluginName?: string + + errors?: PartialMessage[] + warnings?: PartialMessage[] + + contents?: string | Uint8Array + resolveDir?: string + loader?: Loader + pluginData?: any + + watchFiles?: string[] + watchDirs?: string[] +} + +export interface PartialMessage { + id?: string + pluginName?: string + text?: string + location?: Partial | null + notes?: PartialNote[] + detail?: any +} + +export interface PartialNote { + text?: string + location?: Partial | null +} + +/** Documentation: https://esbuild.github.io/api/#metafile */ +export interface Metafile { + inputs: { + [path: string]: { + bytes: number + imports: { + path: string + kind: ImportKind + external?: boolean + original?: string + with?: Record + }[] + format?: 'cjs' | 'esm' + with?: Record + } + } + outputs: { + [path: string]: { + bytes: number + inputs: { + [path: string]: { + bytesInOutput: number + } + } + imports: { + path: string + kind: ImportKind | 'file-loader' + external?: boolean + }[] + exports: string[] + entryPoint?: string + cssBundle?: string + } + } +} + +export interface FormatMessagesOptions { + kind: 'error' | 'warning' + color?: boolean + terminalWidth?: number +} + +export interface AnalyzeMetafileOptions { + color?: boolean + verbose?: boolean +} + +export interface WatchOptions { +} + +export interface BuildContext { + /** Documentation: https://esbuild.github.io/api/#rebuild */ + rebuild(): Promise> + + /** Documentation: https://esbuild.github.io/api/#watch */ + watch(options?: WatchOptions): Promise + + /** Documentation: https://esbuild.github.io/api/#serve */ + serve(options?: ServeOptions): Promise + + cancel(): Promise + dispose(): Promise +} + +// This is a TypeScript type-level function which replaces any keys in "In" +// that aren't in "Out" with "never". We use this to reject properties with +// typos in object literals. See: https://stackoverflow.com/questions/49580725 +type SameShape = In & { [Key in Exclude]: never } + +/** + * This function invokes the "esbuild" command-line tool for you. It returns a + * promise that either resolves with a "BuildResult" object or rejects with a + * "BuildFailure" object. + * + * - Works in node: yes + * - Works in browser: yes + * + * Documentation: https://esbuild.github.io/api/#build + */ +export declare function build(options: SameShape): Promise> + +/** + * This is the advanced long-running form of "build" that supports additional + * features such as watch mode and a local development server. + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#build + */ +export declare function context(options: SameShape): Promise> + +/** + * This function transforms a single JavaScript file. It can be used to minify + * JavaScript, convert TypeScript/JSX to JavaScript, or convert newer JavaScript + * to older JavaScript. It returns a promise that is either resolved with a + * "TransformResult" object or rejected with a "TransformFailure" object. + * + * - Works in node: yes + * - Works in browser: yes + * + * Documentation: https://esbuild.github.io/api/#transform + */ +export declare function transform(input: string | Uint8Array, options?: SameShape): Promise> + +/** + * Converts log messages to formatted message strings suitable for printing in + * the terminal. This allows you to reuse the built-in behavior of esbuild's + * log message formatter. This is a batch-oriented API for efficiency. + * + * - Works in node: yes + * - Works in browser: yes + */ +export declare function formatMessages(messages: PartialMessage[], options: FormatMessagesOptions): Promise + +/** + * Pretty-prints an analysis of the metafile JSON to a string. This is just for + * convenience to be able to match esbuild's pretty-printing exactly. If you want + * to customize it, you can just inspect the data in the metafile yourself. + * + * - Works in node: yes + * - Works in browser: yes + * + * Documentation: https://esbuild.github.io/api/#analyze + */ +export declare function analyzeMetafile(metafile: Metafile | string, options?: AnalyzeMetafileOptions): Promise + +/** + * A synchronous version of "build". + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#build + */ +export declare function buildSync(options: SameShape): BuildResult + +/** + * A synchronous version of "transform". + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#transform + */ +export declare function transformSync(input: string | Uint8Array, options?: SameShape): TransformResult + +/** + * A synchronous version of "formatMessages". + * + * - Works in node: yes + * - Works in browser: no + */ +export declare function formatMessagesSync(messages: PartialMessage[], options: FormatMessagesOptions): string[] + +/** + * A synchronous version of "analyzeMetafile". + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#analyze + */ +export declare function analyzeMetafileSync(metafile: Metafile | string, options?: AnalyzeMetafileOptions): string + +/** + * This configures the browser-based version of esbuild. It is necessary to + * call this first and wait for the returned promise to be resolved before + * making other API calls when using esbuild in the browser. + * + * - Works in node: yes + * - Works in browser: yes ("options" is required) + * + * Documentation: https://esbuild.github.io/api/#browser + */ +export declare function initialize(options: InitializeOptions): Promise + +export interface InitializeOptions { + /** + * The URL of the "esbuild.wasm" file. This must be provided when running + * esbuild in the browser. + */ + wasmURL?: string | URL + + /** + * The result of calling "new WebAssembly.Module(buffer)" where "buffer" + * is a typed array or ArrayBuffer containing the binary code of the + * "esbuild.wasm" file. + * + * You can use this as an alternative to "wasmURL" for environments where it's + * not possible to download the WebAssembly module. + */ + wasmModule?: WebAssembly.Module + + /** + * By default esbuild runs the WebAssembly-based browser API in a web worker + * to avoid blocking the UI thread. This can be disabled by setting "worker" + * to false. + */ + worker?: boolean +} + +export let version: string + +// Call this function to terminate esbuild's child process. The child process +// is not terminated and re-created after each API call because it's more +// efficient to keep it around when there are multiple API calls. +// +// In node this happens automatically before the parent node process exits. So +// you only need to call this if you know you will not make any more esbuild +// API calls and you want to clean up resources. +// +// Unlike node, Deno lacks the necessary APIs to clean up child processes +// automatically. You must manually call stop() in Deno when you're done +// using esbuild or Deno will continue running forever. +// +// Another reason you might want to call this is if you are using esbuild from +// within a Deno test. Deno fails tests that create a child process without +// killing it before the test ends, so you have to call this function (and +// await the returned promise) in every Deno test that uses esbuild. +export declare function stop(): Promise + +// Note: These declarations exist to avoid type errors when you omit "dom" from +// "lib" in your "tsconfig.json" file. TypeScript confusingly declares the +// global "WebAssembly" type in "lib.dom.d.ts" even though it has nothing to do +// with the browser DOM and is present in many non-browser JavaScript runtimes +// (e.g. node and deno). Declaring it here allows esbuild's API to be used in +// these scenarios. +// +// There's an open issue about getting this problem corrected (although these +// declarations will need to remain even if this is fixed for backward +// compatibility with older TypeScript versions): +// +// https://github.com/microsoft/TypeScript-DOM-lib-generator/issues/826 +// +declare global { + namespace WebAssembly { + interface Module { + } + } + interface URL { + } +} diff --git a/node_modules/esbuild/lib/main.js b/node_modules/esbuild/lib/main.js new file mode 100644 index 0000000..3f41a57 --- /dev/null +++ b/node_modules/esbuild/lib/main.js @@ -0,0 +1,2237 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// lib/npm/node.ts +var node_exports = {}; +__export(node_exports, { + analyzeMetafile: () => analyzeMetafile, + analyzeMetafileSync: () => analyzeMetafileSync, + build: () => build, + buildSync: () => buildSync, + context: () => context, + default: () => node_default, + formatMessages: () => formatMessages, + formatMessagesSync: () => formatMessagesSync, + initialize: () => initialize, + stop: () => stop, + transform: () => transform, + transformSync: () => transformSync, + version: () => version +}); +module.exports = __toCommonJS(node_exports); + +// lib/shared/stdio_protocol.ts +function encodePacket(packet) { + let visit = (value) => { + if (value === null) { + bb.write8(0); + } else if (typeof value === "boolean") { + bb.write8(1); + bb.write8(+value); + } else if (typeof value === "number") { + bb.write8(2); + bb.write32(value | 0); + } else if (typeof value === "string") { + bb.write8(3); + bb.write(encodeUTF8(value)); + } else if (value instanceof Uint8Array) { + bb.write8(4); + bb.write(value); + } else if (value instanceof Array) { + bb.write8(5); + bb.write32(value.length); + for (let item of value) { + visit(item); + } + } else { + let keys = Object.keys(value); + bb.write8(6); + bb.write32(keys.length); + for (let key of keys) { + bb.write(encodeUTF8(key)); + visit(value[key]); + } + } + }; + let bb = new ByteBuffer(); + bb.write32(0); + bb.write32(packet.id << 1 | +!packet.isRequest); + visit(packet.value); + writeUInt32LE(bb.buf, bb.len - 4, 0); + return bb.buf.subarray(0, bb.len); +} +function decodePacket(bytes) { + let visit = () => { + switch (bb.read8()) { + case 0: + return null; + case 1: + return !!bb.read8(); + case 2: + return bb.read32(); + case 3: + return decodeUTF8(bb.read()); + case 4: + return bb.read(); + case 5: { + let count = bb.read32(); + let value2 = []; + for (let i = 0; i < count; i++) { + value2.push(visit()); + } + return value2; + } + case 6: { + let count = bb.read32(); + let value2 = {}; + for (let i = 0; i < count; i++) { + value2[decodeUTF8(bb.read())] = visit(); + } + return value2; + } + default: + throw new Error("Invalid packet"); + } + }; + let bb = new ByteBuffer(bytes); + let id = bb.read32(); + let isRequest = (id & 1) === 0; + id >>>= 1; + let value = visit(); + if (bb.ptr !== bytes.length) { + throw new Error("Invalid packet"); + } + return { id, isRequest, value }; +} +var ByteBuffer = class { + constructor(buf = new Uint8Array(1024)) { + this.buf = buf; + this.len = 0; + this.ptr = 0; + } + _write(delta) { + if (this.len + delta > this.buf.length) { + let clone = new Uint8Array((this.len + delta) * 2); + clone.set(this.buf); + this.buf = clone; + } + this.len += delta; + return this.len - delta; + } + write8(value) { + let offset = this._write(1); + this.buf[offset] = value; + } + write32(value) { + let offset = this._write(4); + writeUInt32LE(this.buf, value, offset); + } + write(bytes) { + let offset = this._write(4 + bytes.length); + writeUInt32LE(this.buf, bytes.length, offset); + this.buf.set(bytes, offset + 4); + } + _read(delta) { + if (this.ptr + delta > this.buf.length) { + throw new Error("Invalid packet"); + } + this.ptr += delta; + return this.ptr - delta; + } + read8() { + return this.buf[this._read(1)]; + } + read32() { + return readUInt32LE(this.buf, this._read(4)); + } + read() { + let length = this.read32(); + let bytes = new Uint8Array(length); + let ptr = this._read(bytes.length); + bytes.set(this.buf.subarray(ptr, ptr + length)); + return bytes; + } +}; +var encodeUTF8; +var decodeUTF8; +var encodeInvariant; +if (typeof TextEncoder !== "undefined" && typeof TextDecoder !== "undefined") { + let encoder = new TextEncoder(); + let decoder = new TextDecoder(); + encodeUTF8 = (text) => encoder.encode(text); + decodeUTF8 = (bytes) => decoder.decode(bytes); + encodeInvariant = 'new TextEncoder().encode("")'; +} else if (typeof Buffer !== "undefined") { + encodeUTF8 = (text) => Buffer.from(text); + decodeUTF8 = (bytes) => { + let { buffer, byteOffset, byteLength } = bytes; + return Buffer.from(buffer, byteOffset, byteLength).toString(); + }; + encodeInvariant = 'Buffer.from("")'; +} else { + throw new Error("No UTF-8 codec found"); +} +if (!(encodeUTF8("") instanceof Uint8Array)) + throw new Error(`Invariant violation: "${encodeInvariant} instanceof Uint8Array" is incorrectly false + +This indicates that your JavaScript environment is broken. You cannot use +esbuild in this environment because esbuild relies on this invariant. This +is not a problem with esbuild. You need to fix your environment instead. +`); +function readUInt32LE(buffer, offset) { + return buffer[offset++] | buffer[offset++] << 8 | buffer[offset++] << 16 | buffer[offset++] << 24; +} +function writeUInt32LE(buffer, value, offset) { + buffer[offset++] = value; + buffer[offset++] = value >> 8; + buffer[offset++] = value >> 16; + buffer[offset++] = value >> 24; +} + +// lib/shared/common.ts +var quote = JSON.stringify; +var buildLogLevelDefault = "warning"; +var transformLogLevelDefault = "silent"; +function validateAndJoinStringArray(values, what) { + const toJoin = []; + for (const value of values) { + validateStringValue(value, what); + if (value.indexOf(",") >= 0) throw new Error(`Invalid ${what}: ${value}`); + toJoin.push(value); + } + return toJoin.join(","); +} +var canBeAnything = () => null; +var mustBeBoolean = (value) => typeof value === "boolean" ? null : "a boolean"; +var mustBeString = (value) => typeof value === "string" ? null : "a string"; +var mustBeRegExp = (value) => value instanceof RegExp ? null : "a RegExp object"; +var mustBeInteger = (value) => typeof value === "number" && value === (value | 0) ? null : "an integer"; +var mustBeValidPortNumber = (value) => typeof value === "number" && value === (value | 0) && value >= 0 && value <= 65535 ? null : "a valid port number"; +var mustBeFunction = (value) => typeof value === "function" ? null : "a function"; +var mustBeArray = (value) => Array.isArray(value) ? null : "an array"; +var mustBeArrayOfStrings = (value) => Array.isArray(value) && value.every((x) => typeof x === "string") ? null : "an array of strings"; +var mustBeObject = (value) => typeof value === "object" && value !== null && !Array.isArray(value) ? null : "an object"; +var mustBeEntryPoints = (value) => typeof value === "object" && value !== null ? null : "an array or an object"; +var mustBeWebAssemblyModule = (value) => value instanceof WebAssembly.Module ? null : "a WebAssembly.Module"; +var mustBeObjectOrNull = (value) => typeof value === "object" && !Array.isArray(value) ? null : "an object or null"; +var mustBeStringOrBoolean = (value) => typeof value === "string" || typeof value === "boolean" ? null : "a string or a boolean"; +var mustBeStringOrObject = (value) => typeof value === "string" || typeof value === "object" && value !== null && !Array.isArray(value) ? null : "a string or an object"; +var mustBeStringOrArrayOfStrings = (value) => typeof value === "string" || Array.isArray(value) && value.every((x) => typeof x === "string") ? null : "a string or an array of strings"; +var mustBeStringOrUint8Array = (value) => typeof value === "string" || value instanceof Uint8Array ? null : "a string or a Uint8Array"; +var mustBeStringOrURL = (value) => typeof value === "string" || value instanceof URL ? null : "a string or a URL"; +function getFlag(object, keys, key, mustBeFn) { + let value = object[key]; + keys[key + ""] = true; + if (value === void 0) return void 0; + let mustBe = mustBeFn(value); + if (mustBe !== null) throw new Error(`${quote(key)} must be ${mustBe}`); + return value; +} +function checkForInvalidFlags(object, keys, where) { + for (let key in object) { + if (!(key in keys)) { + throw new Error(`Invalid option ${where}: ${quote(key)}`); + } + } +} +function validateInitializeOptions(options) { + let keys = /* @__PURE__ */ Object.create(null); + let wasmURL = getFlag(options, keys, "wasmURL", mustBeStringOrURL); + let wasmModule = getFlag(options, keys, "wasmModule", mustBeWebAssemblyModule); + let worker = getFlag(options, keys, "worker", mustBeBoolean); + checkForInvalidFlags(options, keys, "in initialize() call"); + return { + wasmURL, + wasmModule, + worker + }; +} +function validateMangleCache(mangleCache) { + let validated; + if (mangleCache !== void 0) { + validated = /* @__PURE__ */ Object.create(null); + for (let key in mangleCache) { + let value = mangleCache[key]; + if (typeof value === "string" || value === false) { + validated[key] = value; + } else { + throw new Error(`Expected ${quote(key)} in mangle cache to map to either a string or false`); + } + } + } + return validated; +} +function pushLogFlags(flags, options, keys, isTTY2, logLevelDefault) { + let color = getFlag(options, keys, "color", mustBeBoolean); + let logLevel = getFlag(options, keys, "logLevel", mustBeString); + let logLimit = getFlag(options, keys, "logLimit", mustBeInteger); + if (color !== void 0) flags.push(`--color=${color}`); + else if (isTTY2) flags.push(`--color=true`); + flags.push(`--log-level=${logLevel || logLevelDefault}`); + flags.push(`--log-limit=${logLimit || 0}`); +} +function validateStringValue(value, what, key) { + if (typeof value !== "string") { + throw new Error(`Expected value for ${what}${key !== void 0 ? " " + quote(key) : ""} to be a string, got ${typeof value} instead`); + } + return value; +} +function pushCommonFlags(flags, options, keys) { + let legalComments = getFlag(options, keys, "legalComments", mustBeString); + let sourceRoot = getFlag(options, keys, "sourceRoot", mustBeString); + let sourcesContent = getFlag(options, keys, "sourcesContent", mustBeBoolean); + let target = getFlag(options, keys, "target", mustBeStringOrArrayOfStrings); + let format = getFlag(options, keys, "format", mustBeString); + let globalName = getFlag(options, keys, "globalName", mustBeString); + let mangleProps = getFlag(options, keys, "mangleProps", mustBeRegExp); + let reserveProps = getFlag(options, keys, "reserveProps", mustBeRegExp); + let mangleQuoted = getFlag(options, keys, "mangleQuoted", mustBeBoolean); + let minify = getFlag(options, keys, "minify", mustBeBoolean); + let minifySyntax = getFlag(options, keys, "minifySyntax", mustBeBoolean); + let minifyWhitespace = getFlag(options, keys, "minifyWhitespace", mustBeBoolean); + let minifyIdentifiers = getFlag(options, keys, "minifyIdentifiers", mustBeBoolean); + let lineLimit = getFlag(options, keys, "lineLimit", mustBeInteger); + let drop = getFlag(options, keys, "drop", mustBeArrayOfStrings); + let dropLabels = getFlag(options, keys, "dropLabels", mustBeArrayOfStrings); + let charset = getFlag(options, keys, "charset", mustBeString); + let treeShaking = getFlag(options, keys, "treeShaking", mustBeBoolean); + let ignoreAnnotations = getFlag(options, keys, "ignoreAnnotations", mustBeBoolean); + let jsx = getFlag(options, keys, "jsx", mustBeString); + let jsxFactory = getFlag(options, keys, "jsxFactory", mustBeString); + let jsxFragment = getFlag(options, keys, "jsxFragment", mustBeString); + let jsxImportSource = getFlag(options, keys, "jsxImportSource", mustBeString); + let jsxDev = getFlag(options, keys, "jsxDev", mustBeBoolean); + let jsxSideEffects = getFlag(options, keys, "jsxSideEffects", mustBeBoolean); + let define = getFlag(options, keys, "define", mustBeObject); + let logOverride = getFlag(options, keys, "logOverride", mustBeObject); + let supported = getFlag(options, keys, "supported", mustBeObject); + let pure = getFlag(options, keys, "pure", mustBeArrayOfStrings); + let keepNames = getFlag(options, keys, "keepNames", mustBeBoolean); + let platform = getFlag(options, keys, "platform", mustBeString); + let tsconfigRaw = getFlag(options, keys, "tsconfigRaw", mustBeStringOrObject); + if (legalComments) flags.push(`--legal-comments=${legalComments}`); + if (sourceRoot !== void 0) flags.push(`--source-root=${sourceRoot}`); + if (sourcesContent !== void 0) flags.push(`--sources-content=${sourcesContent}`); + if (target) flags.push(`--target=${validateAndJoinStringArray(Array.isArray(target) ? target : [target], "target")}`); + if (format) flags.push(`--format=${format}`); + if (globalName) flags.push(`--global-name=${globalName}`); + if (platform) flags.push(`--platform=${platform}`); + if (tsconfigRaw) flags.push(`--tsconfig-raw=${typeof tsconfigRaw === "string" ? tsconfigRaw : JSON.stringify(tsconfigRaw)}`); + if (minify) flags.push("--minify"); + if (minifySyntax) flags.push("--minify-syntax"); + if (minifyWhitespace) flags.push("--minify-whitespace"); + if (minifyIdentifiers) flags.push("--minify-identifiers"); + if (lineLimit) flags.push(`--line-limit=${lineLimit}`); + if (charset) flags.push(`--charset=${charset}`); + if (treeShaking !== void 0) flags.push(`--tree-shaking=${treeShaking}`); + if (ignoreAnnotations) flags.push(`--ignore-annotations`); + if (drop) for (let what of drop) flags.push(`--drop:${validateStringValue(what, "drop")}`); + if (dropLabels) flags.push(`--drop-labels=${validateAndJoinStringArray(dropLabels, "drop label")}`); + if (mangleProps) flags.push(`--mangle-props=${jsRegExpToGoRegExp(mangleProps)}`); + if (reserveProps) flags.push(`--reserve-props=${jsRegExpToGoRegExp(reserveProps)}`); + if (mangleQuoted !== void 0) flags.push(`--mangle-quoted=${mangleQuoted}`); + if (jsx) flags.push(`--jsx=${jsx}`); + if (jsxFactory) flags.push(`--jsx-factory=${jsxFactory}`); + if (jsxFragment) flags.push(`--jsx-fragment=${jsxFragment}`); + if (jsxImportSource) flags.push(`--jsx-import-source=${jsxImportSource}`); + if (jsxDev) flags.push(`--jsx-dev`); + if (jsxSideEffects) flags.push(`--jsx-side-effects`); + if (define) { + for (let key in define) { + if (key.indexOf("=") >= 0) throw new Error(`Invalid define: ${key}`); + flags.push(`--define:${key}=${validateStringValue(define[key], "define", key)}`); + } + } + if (logOverride) { + for (let key in logOverride) { + if (key.indexOf("=") >= 0) throw new Error(`Invalid log override: ${key}`); + flags.push(`--log-override:${key}=${validateStringValue(logOverride[key], "log override", key)}`); + } + } + if (supported) { + for (let key in supported) { + if (key.indexOf("=") >= 0) throw new Error(`Invalid supported: ${key}`); + const value = supported[key]; + if (typeof value !== "boolean") throw new Error(`Expected value for supported ${quote(key)} to be a boolean, got ${typeof value} instead`); + flags.push(`--supported:${key}=${value}`); + } + } + if (pure) for (let fn of pure) flags.push(`--pure:${validateStringValue(fn, "pure")}`); + if (keepNames) flags.push(`--keep-names`); +} +function flagsForBuildOptions(callName, options, isTTY2, logLevelDefault, writeDefault) { + var _a2; + let flags = []; + let entries = []; + let keys = /* @__PURE__ */ Object.create(null); + let stdinContents = null; + let stdinResolveDir = null; + pushLogFlags(flags, options, keys, isTTY2, logLevelDefault); + pushCommonFlags(flags, options, keys); + let sourcemap = getFlag(options, keys, "sourcemap", mustBeStringOrBoolean); + let bundle = getFlag(options, keys, "bundle", mustBeBoolean); + let splitting = getFlag(options, keys, "splitting", mustBeBoolean); + let preserveSymlinks = getFlag(options, keys, "preserveSymlinks", mustBeBoolean); + let metafile = getFlag(options, keys, "metafile", mustBeBoolean); + let outfile = getFlag(options, keys, "outfile", mustBeString); + let outdir = getFlag(options, keys, "outdir", mustBeString); + let outbase = getFlag(options, keys, "outbase", mustBeString); + let tsconfig = getFlag(options, keys, "tsconfig", mustBeString); + let resolveExtensions = getFlag(options, keys, "resolveExtensions", mustBeArrayOfStrings); + let nodePathsInput = getFlag(options, keys, "nodePaths", mustBeArrayOfStrings); + let mainFields = getFlag(options, keys, "mainFields", mustBeArrayOfStrings); + let conditions = getFlag(options, keys, "conditions", mustBeArrayOfStrings); + let external = getFlag(options, keys, "external", mustBeArrayOfStrings); + let packages = getFlag(options, keys, "packages", mustBeString); + let alias = getFlag(options, keys, "alias", mustBeObject); + let loader = getFlag(options, keys, "loader", mustBeObject); + let outExtension = getFlag(options, keys, "outExtension", mustBeObject); + let publicPath = getFlag(options, keys, "publicPath", mustBeString); + let entryNames = getFlag(options, keys, "entryNames", mustBeString); + let chunkNames = getFlag(options, keys, "chunkNames", mustBeString); + let assetNames = getFlag(options, keys, "assetNames", mustBeString); + let inject = getFlag(options, keys, "inject", mustBeArrayOfStrings); + let banner = getFlag(options, keys, "banner", mustBeObject); + let footer = getFlag(options, keys, "footer", mustBeObject); + let entryPoints = getFlag(options, keys, "entryPoints", mustBeEntryPoints); + let absWorkingDir = getFlag(options, keys, "absWorkingDir", mustBeString); + let stdin = getFlag(options, keys, "stdin", mustBeObject); + let write = (_a2 = getFlag(options, keys, "write", mustBeBoolean)) != null ? _a2 : writeDefault; + let allowOverwrite = getFlag(options, keys, "allowOverwrite", mustBeBoolean); + let mangleCache = getFlag(options, keys, "mangleCache", mustBeObject); + keys.plugins = true; + checkForInvalidFlags(options, keys, `in ${callName}() call`); + if (sourcemap) flags.push(`--sourcemap${sourcemap === true ? "" : `=${sourcemap}`}`); + if (bundle) flags.push("--bundle"); + if (allowOverwrite) flags.push("--allow-overwrite"); + if (splitting) flags.push("--splitting"); + if (preserveSymlinks) flags.push("--preserve-symlinks"); + if (metafile) flags.push(`--metafile`); + if (outfile) flags.push(`--outfile=${outfile}`); + if (outdir) flags.push(`--outdir=${outdir}`); + if (outbase) flags.push(`--outbase=${outbase}`); + if (tsconfig) flags.push(`--tsconfig=${tsconfig}`); + if (packages) flags.push(`--packages=${packages}`); + if (resolveExtensions) flags.push(`--resolve-extensions=${validateAndJoinStringArray(resolveExtensions, "resolve extension")}`); + if (publicPath) flags.push(`--public-path=${publicPath}`); + if (entryNames) flags.push(`--entry-names=${entryNames}`); + if (chunkNames) flags.push(`--chunk-names=${chunkNames}`); + if (assetNames) flags.push(`--asset-names=${assetNames}`); + if (mainFields) flags.push(`--main-fields=${validateAndJoinStringArray(mainFields, "main field")}`); + if (conditions) flags.push(`--conditions=${validateAndJoinStringArray(conditions, "condition")}`); + if (external) for (let name of external) flags.push(`--external:${validateStringValue(name, "external")}`); + if (alias) { + for (let old in alias) { + if (old.indexOf("=") >= 0) throw new Error(`Invalid package name in alias: ${old}`); + flags.push(`--alias:${old}=${validateStringValue(alias[old], "alias", old)}`); + } + } + if (banner) { + for (let type in banner) { + if (type.indexOf("=") >= 0) throw new Error(`Invalid banner file type: ${type}`); + flags.push(`--banner:${type}=${validateStringValue(banner[type], "banner", type)}`); + } + } + if (footer) { + for (let type in footer) { + if (type.indexOf("=") >= 0) throw new Error(`Invalid footer file type: ${type}`); + flags.push(`--footer:${type}=${validateStringValue(footer[type], "footer", type)}`); + } + } + if (inject) for (let path3 of inject) flags.push(`--inject:${validateStringValue(path3, "inject")}`); + if (loader) { + for (let ext in loader) { + if (ext.indexOf("=") >= 0) throw new Error(`Invalid loader extension: ${ext}`); + flags.push(`--loader:${ext}=${validateStringValue(loader[ext], "loader", ext)}`); + } + } + if (outExtension) { + for (let ext in outExtension) { + if (ext.indexOf("=") >= 0) throw new Error(`Invalid out extension: ${ext}`); + flags.push(`--out-extension:${ext}=${validateStringValue(outExtension[ext], "out extension", ext)}`); + } + } + if (entryPoints) { + if (Array.isArray(entryPoints)) { + for (let i = 0, n = entryPoints.length; i < n; i++) { + let entryPoint = entryPoints[i]; + if (typeof entryPoint === "object" && entryPoint !== null) { + let entryPointKeys = /* @__PURE__ */ Object.create(null); + let input = getFlag(entryPoint, entryPointKeys, "in", mustBeString); + let output = getFlag(entryPoint, entryPointKeys, "out", mustBeString); + checkForInvalidFlags(entryPoint, entryPointKeys, "in entry point at index " + i); + if (input === void 0) throw new Error('Missing property "in" for entry point at index ' + i); + if (output === void 0) throw new Error('Missing property "out" for entry point at index ' + i); + entries.push([output, input]); + } else { + entries.push(["", validateStringValue(entryPoint, "entry point at index " + i)]); + } + } + } else { + for (let key in entryPoints) { + entries.push([key, validateStringValue(entryPoints[key], "entry point", key)]); + } + } + } + if (stdin) { + let stdinKeys = /* @__PURE__ */ Object.create(null); + let contents = getFlag(stdin, stdinKeys, "contents", mustBeStringOrUint8Array); + let resolveDir = getFlag(stdin, stdinKeys, "resolveDir", mustBeString); + let sourcefile = getFlag(stdin, stdinKeys, "sourcefile", mustBeString); + let loader2 = getFlag(stdin, stdinKeys, "loader", mustBeString); + checkForInvalidFlags(stdin, stdinKeys, 'in "stdin" object'); + if (sourcefile) flags.push(`--sourcefile=${sourcefile}`); + if (loader2) flags.push(`--loader=${loader2}`); + if (resolveDir) stdinResolveDir = resolveDir; + if (typeof contents === "string") stdinContents = encodeUTF8(contents); + else if (contents instanceof Uint8Array) stdinContents = contents; + } + let nodePaths = []; + if (nodePathsInput) { + for (let value of nodePathsInput) { + value += ""; + nodePaths.push(value); + } + } + return { + entries, + flags, + write, + stdinContents, + stdinResolveDir, + absWorkingDir, + nodePaths, + mangleCache: validateMangleCache(mangleCache) + }; +} +function flagsForTransformOptions(callName, options, isTTY2, logLevelDefault) { + let flags = []; + let keys = /* @__PURE__ */ Object.create(null); + pushLogFlags(flags, options, keys, isTTY2, logLevelDefault); + pushCommonFlags(flags, options, keys); + let sourcemap = getFlag(options, keys, "sourcemap", mustBeStringOrBoolean); + let sourcefile = getFlag(options, keys, "sourcefile", mustBeString); + let loader = getFlag(options, keys, "loader", mustBeString); + let banner = getFlag(options, keys, "banner", mustBeString); + let footer = getFlag(options, keys, "footer", mustBeString); + let mangleCache = getFlag(options, keys, "mangleCache", mustBeObject); + checkForInvalidFlags(options, keys, `in ${callName}() call`); + if (sourcemap) flags.push(`--sourcemap=${sourcemap === true ? "external" : sourcemap}`); + if (sourcefile) flags.push(`--sourcefile=${sourcefile}`); + if (loader) flags.push(`--loader=${loader}`); + if (banner) flags.push(`--banner=${banner}`); + if (footer) flags.push(`--footer=${footer}`); + return { + flags, + mangleCache: validateMangleCache(mangleCache) + }; +} +function createChannel(streamIn) { + const requestCallbacksByKey = {}; + const closeData = { didClose: false, reason: "" }; + let responseCallbacks = {}; + let nextRequestID = 0; + let nextBuildKey = 0; + let stdout = new Uint8Array(16 * 1024); + let stdoutUsed = 0; + let readFromStdout = (chunk) => { + let limit = stdoutUsed + chunk.length; + if (limit > stdout.length) { + let swap = new Uint8Array(limit * 2); + swap.set(stdout); + stdout = swap; + } + stdout.set(chunk, stdoutUsed); + stdoutUsed += chunk.length; + let offset = 0; + while (offset + 4 <= stdoutUsed) { + let length = readUInt32LE(stdout, offset); + if (offset + 4 + length > stdoutUsed) { + break; + } + offset += 4; + handleIncomingPacket(stdout.subarray(offset, offset + length)); + offset += length; + } + if (offset > 0) { + stdout.copyWithin(0, offset, stdoutUsed); + stdoutUsed -= offset; + } + }; + let afterClose = (error) => { + closeData.didClose = true; + if (error) closeData.reason = ": " + (error.message || error); + const text = "The service was stopped" + closeData.reason; + for (let id in responseCallbacks) { + responseCallbacks[id](text, null); + } + responseCallbacks = {}; + }; + let sendRequest = (refs, value, callback) => { + if (closeData.didClose) return callback("The service is no longer running" + closeData.reason, null); + let id = nextRequestID++; + responseCallbacks[id] = (error, response) => { + try { + callback(error, response); + } finally { + if (refs) refs.unref(); + } + }; + if (refs) refs.ref(); + streamIn.writeToStdin(encodePacket({ id, isRequest: true, value })); + }; + let sendResponse = (id, value) => { + if (closeData.didClose) throw new Error("The service is no longer running" + closeData.reason); + streamIn.writeToStdin(encodePacket({ id, isRequest: false, value })); + }; + let handleRequest = async (id, request) => { + try { + if (request.command === "ping") { + sendResponse(id, {}); + return; + } + if (typeof request.key === "number") { + const requestCallbacks = requestCallbacksByKey[request.key]; + if (!requestCallbacks) { + return; + } + const callback = requestCallbacks[request.command]; + if (callback) { + await callback(id, request); + return; + } + } + throw new Error(`Invalid command: ` + request.command); + } catch (e) { + const errors = [extractErrorMessageV8(e, streamIn, null, void 0, "")]; + try { + sendResponse(id, { errors }); + } catch { + } + } + }; + let isFirstPacket = true; + let handleIncomingPacket = (bytes) => { + if (isFirstPacket) { + isFirstPacket = false; + let binaryVersion = String.fromCharCode(...bytes); + if (binaryVersion !== "0.25.5") { + throw new Error(`Cannot start service: Host version "${"0.25.5"}" does not match binary version ${quote(binaryVersion)}`); + } + return; + } + let packet = decodePacket(bytes); + if (packet.isRequest) { + handleRequest(packet.id, packet.value); + } else { + let callback = responseCallbacks[packet.id]; + delete responseCallbacks[packet.id]; + if (packet.value.error) callback(packet.value.error, {}); + else callback(null, packet.value); + } + }; + let buildOrContext = ({ callName, refs, options, isTTY: isTTY2, defaultWD: defaultWD2, callback }) => { + let refCount = 0; + const buildKey = nextBuildKey++; + const requestCallbacks = {}; + const buildRefs = { + ref() { + if (++refCount === 1) { + if (refs) refs.ref(); + } + }, + unref() { + if (--refCount === 0) { + delete requestCallbacksByKey[buildKey]; + if (refs) refs.unref(); + } + } + }; + requestCallbacksByKey[buildKey] = requestCallbacks; + buildRefs.ref(); + buildOrContextImpl( + callName, + buildKey, + sendRequest, + sendResponse, + buildRefs, + streamIn, + requestCallbacks, + options, + isTTY2, + defaultWD2, + (err, res) => { + try { + callback(err, res); + } finally { + buildRefs.unref(); + } + } + ); + }; + let transform2 = ({ callName, refs, input, options, isTTY: isTTY2, fs: fs3, callback }) => { + const details = createObjectStash(); + let start = (inputPath) => { + try { + if (typeof input !== "string" && !(input instanceof Uint8Array)) + throw new Error('The input to "transform" must be a string or a Uint8Array'); + let { + flags, + mangleCache + } = flagsForTransformOptions(callName, options, isTTY2, transformLogLevelDefault); + let request = { + command: "transform", + flags, + inputFS: inputPath !== null, + input: inputPath !== null ? encodeUTF8(inputPath) : typeof input === "string" ? encodeUTF8(input) : input + }; + if (mangleCache) request.mangleCache = mangleCache; + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + let errors = replaceDetailsInMessages(response.errors, details); + let warnings = replaceDetailsInMessages(response.warnings, details); + let outstanding = 1; + let next = () => { + if (--outstanding === 0) { + let result = { + warnings, + code: response.code, + map: response.map, + mangleCache: void 0, + legalComments: void 0 + }; + if ("legalComments" in response) result.legalComments = response == null ? void 0 : response.legalComments; + if (response.mangleCache) result.mangleCache = response == null ? void 0 : response.mangleCache; + callback(null, result); + } + }; + if (errors.length > 0) return callback(failureErrorWithLog("Transform failed", errors, warnings), null); + if (response.codeFS) { + outstanding++; + fs3.readFile(response.code, (err, contents) => { + if (err !== null) { + callback(err, null); + } else { + response.code = contents; + next(); + } + }); + } + if (response.mapFS) { + outstanding++; + fs3.readFile(response.map, (err, contents) => { + if (err !== null) { + callback(err, null); + } else { + response.map = contents; + next(); + } + }); + } + next(); + }); + } catch (e) { + let flags = []; + try { + pushLogFlags(flags, options, {}, isTTY2, transformLogLevelDefault); + } catch { + } + const error = extractErrorMessageV8(e, streamIn, details, void 0, ""); + sendRequest(refs, { command: "error", flags, error }, () => { + error.detail = details.load(error.detail); + callback(failureErrorWithLog("Transform failed", [error], []), null); + }); + } + }; + if ((typeof input === "string" || input instanceof Uint8Array) && input.length > 1024 * 1024) { + let next = start; + start = () => fs3.writeFile(input, next); + } + start(null); + }; + let formatMessages2 = ({ callName, refs, messages, options, callback }) => { + if (!options) throw new Error(`Missing second argument in ${callName}() call`); + let keys = {}; + let kind = getFlag(options, keys, "kind", mustBeString); + let color = getFlag(options, keys, "color", mustBeBoolean); + let terminalWidth = getFlag(options, keys, "terminalWidth", mustBeInteger); + checkForInvalidFlags(options, keys, `in ${callName}() call`); + if (kind === void 0) throw new Error(`Missing "kind" in ${callName}() call`); + if (kind !== "error" && kind !== "warning") throw new Error(`Expected "kind" to be "error" or "warning" in ${callName}() call`); + let request = { + command: "format-msgs", + messages: sanitizeMessages(messages, "messages", null, "", terminalWidth), + isWarning: kind === "warning" + }; + if (color !== void 0) request.color = color; + if (terminalWidth !== void 0) request.terminalWidth = terminalWidth; + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + callback(null, response.messages); + }); + }; + let analyzeMetafile2 = ({ callName, refs, metafile, options, callback }) => { + if (options === void 0) options = {}; + let keys = {}; + let color = getFlag(options, keys, "color", mustBeBoolean); + let verbose = getFlag(options, keys, "verbose", mustBeBoolean); + checkForInvalidFlags(options, keys, `in ${callName}() call`); + let request = { + command: "analyze-metafile", + metafile + }; + if (color !== void 0) request.color = color; + if (verbose !== void 0) request.verbose = verbose; + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + callback(null, response.result); + }); + }; + return { + readFromStdout, + afterClose, + service: { + buildOrContext, + transform: transform2, + formatMessages: formatMessages2, + analyzeMetafile: analyzeMetafile2 + } + }; +} +function buildOrContextImpl(callName, buildKey, sendRequest, sendResponse, refs, streamIn, requestCallbacks, options, isTTY2, defaultWD2, callback) { + const details = createObjectStash(); + const isContext = callName === "context"; + const handleError = (e, pluginName) => { + const flags = []; + try { + pushLogFlags(flags, options, {}, isTTY2, buildLogLevelDefault); + } catch { + } + const message = extractErrorMessageV8(e, streamIn, details, void 0, pluginName); + sendRequest(refs, { command: "error", flags, error: message }, () => { + message.detail = details.load(message.detail); + callback(failureErrorWithLog(isContext ? "Context failed" : "Build failed", [message], []), null); + }); + }; + let plugins; + if (typeof options === "object") { + const value = options.plugins; + if (value !== void 0) { + if (!Array.isArray(value)) return handleError(new Error(`"plugins" must be an array`), ""); + plugins = value; + } + } + if (plugins && plugins.length > 0) { + if (streamIn.isSync) return handleError(new Error("Cannot use plugins in synchronous API calls"), ""); + handlePlugins( + buildKey, + sendRequest, + sendResponse, + refs, + streamIn, + requestCallbacks, + options, + plugins, + details + ).then( + (result) => { + if (!result.ok) return handleError(result.error, result.pluginName); + try { + buildOrContextContinue(result.requestPlugins, result.runOnEndCallbacks, result.scheduleOnDisposeCallbacks); + } catch (e) { + handleError(e, ""); + } + }, + (e) => handleError(e, "") + ); + return; + } + try { + buildOrContextContinue(null, (result, done) => done([], []), () => { + }); + } catch (e) { + handleError(e, ""); + } + function buildOrContextContinue(requestPlugins, runOnEndCallbacks, scheduleOnDisposeCallbacks) { + const writeDefault = streamIn.hasFS; + const { + entries, + flags, + write, + stdinContents, + stdinResolveDir, + absWorkingDir, + nodePaths, + mangleCache + } = flagsForBuildOptions(callName, options, isTTY2, buildLogLevelDefault, writeDefault); + if (write && !streamIn.hasFS) throw new Error(`The "write" option is unavailable in this environment`); + const request = { + command: "build", + key: buildKey, + entries, + flags, + write, + stdinContents, + stdinResolveDir, + absWorkingDir: absWorkingDir || defaultWD2, + nodePaths, + context: isContext + }; + if (requestPlugins) request.plugins = requestPlugins; + if (mangleCache) request.mangleCache = mangleCache; + const buildResponseToResult = (response, callback2) => { + const result = { + errors: replaceDetailsInMessages(response.errors, details), + warnings: replaceDetailsInMessages(response.warnings, details), + outputFiles: void 0, + metafile: void 0, + mangleCache: void 0 + }; + const originalErrors = result.errors.slice(); + const originalWarnings = result.warnings.slice(); + if (response.outputFiles) result.outputFiles = response.outputFiles.map(convertOutputFiles); + if (response.metafile) result.metafile = JSON.parse(response.metafile); + if (response.mangleCache) result.mangleCache = response.mangleCache; + if (response.writeToStdout !== void 0) console.log(decodeUTF8(response.writeToStdout).replace(/\n$/, "")); + runOnEndCallbacks(result, (onEndErrors, onEndWarnings) => { + if (originalErrors.length > 0 || onEndErrors.length > 0) { + const error = failureErrorWithLog("Build failed", originalErrors.concat(onEndErrors), originalWarnings.concat(onEndWarnings)); + return callback2(error, null, onEndErrors, onEndWarnings); + } + callback2(null, result, onEndErrors, onEndWarnings); + }); + }; + let latestResultPromise; + let provideLatestResult; + if (isContext) + requestCallbacks["on-end"] = (id, request2) => new Promise((resolve) => { + buildResponseToResult(request2, (err, result, onEndErrors, onEndWarnings) => { + const response = { + errors: onEndErrors, + warnings: onEndWarnings + }; + if (provideLatestResult) provideLatestResult(err, result); + latestResultPromise = void 0; + provideLatestResult = void 0; + sendResponse(id, response); + resolve(); + }); + }); + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + if (!isContext) { + return buildResponseToResult(response, (err, res) => { + scheduleOnDisposeCallbacks(); + return callback(err, res); + }); + } + if (response.errors.length > 0) { + return callback(failureErrorWithLog("Context failed", response.errors, response.warnings), null); + } + let didDispose = false; + const result = { + rebuild: () => { + if (!latestResultPromise) latestResultPromise = new Promise((resolve, reject) => { + let settlePromise; + provideLatestResult = (err, result2) => { + if (!settlePromise) settlePromise = () => err ? reject(err) : resolve(result2); + }; + const triggerAnotherBuild = () => { + const request2 = { + command: "rebuild", + key: buildKey + }; + sendRequest(refs, request2, (error2, response2) => { + if (error2) { + reject(new Error(error2)); + } else if (settlePromise) { + settlePromise(); + } else { + triggerAnotherBuild(); + } + }); + }; + triggerAnotherBuild(); + }); + return latestResultPromise; + }, + watch: (options2 = {}) => new Promise((resolve, reject) => { + if (!streamIn.hasFS) throw new Error(`Cannot use the "watch" API in this environment`); + const keys = {}; + checkForInvalidFlags(options2, keys, `in watch() call`); + const request2 = { + command: "watch", + key: buildKey + }; + sendRequest(refs, request2, (error2) => { + if (error2) reject(new Error(error2)); + else resolve(void 0); + }); + }), + serve: (options2 = {}) => new Promise((resolve, reject) => { + if (!streamIn.hasFS) throw new Error(`Cannot use the "serve" API in this environment`); + const keys = {}; + const port = getFlag(options2, keys, "port", mustBeValidPortNumber); + const host = getFlag(options2, keys, "host", mustBeString); + const servedir = getFlag(options2, keys, "servedir", mustBeString); + const keyfile = getFlag(options2, keys, "keyfile", mustBeString); + const certfile = getFlag(options2, keys, "certfile", mustBeString); + const fallback = getFlag(options2, keys, "fallback", mustBeString); + const cors = getFlag(options2, keys, "cors", mustBeObject); + const onRequest = getFlag(options2, keys, "onRequest", mustBeFunction); + checkForInvalidFlags(options2, keys, `in serve() call`); + const request2 = { + command: "serve", + key: buildKey, + onRequest: !!onRequest + }; + if (port !== void 0) request2.port = port; + if (host !== void 0) request2.host = host; + if (servedir !== void 0) request2.servedir = servedir; + if (keyfile !== void 0) request2.keyfile = keyfile; + if (certfile !== void 0) request2.certfile = certfile; + if (fallback !== void 0) request2.fallback = fallback; + if (cors) { + const corsKeys = {}; + const origin = getFlag(cors, corsKeys, "origin", mustBeStringOrArrayOfStrings); + checkForInvalidFlags(cors, corsKeys, `on "cors" object`); + if (Array.isArray(origin)) request2.corsOrigin = origin; + else if (origin !== void 0) request2.corsOrigin = [origin]; + } + sendRequest(refs, request2, (error2, response2) => { + if (error2) return reject(new Error(error2)); + if (onRequest) { + requestCallbacks["serve-request"] = (id, request3) => { + onRequest(request3.args); + sendResponse(id, {}); + }; + } + resolve(response2); + }); + }), + cancel: () => new Promise((resolve) => { + if (didDispose) return resolve(); + const request2 = { + command: "cancel", + key: buildKey + }; + sendRequest(refs, request2, () => { + resolve(); + }); + }), + dispose: () => new Promise((resolve) => { + if (didDispose) return resolve(); + didDispose = true; + const request2 = { + command: "dispose", + key: buildKey + }; + sendRequest(refs, request2, () => { + resolve(); + scheduleOnDisposeCallbacks(); + refs.unref(); + }); + }) + }; + refs.ref(); + callback(null, result); + }); + } +} +var handlePlugins = async (buildKey, sendRequest, sendResponse, refs, streamIn, requestCallbacks, initialOptions, plugins, details) => { + let onStartCallbacks = []; + let onEndCallbacks = []; + let onResolveCallbacks = {}; + let onLoadCallbacks = {}; + let onDisposeCallbacks = []; + let nextCallbackID = 0; + let i = 0; + let requestPlugins = []; + let isSetupDone = false; + plugins = [...plugins]; + for (let item of plugins) { + let keys = {}; + if (typeof item !== "object") throw new Error(`Plugin at index ${i} must be an object`); + const name = getFlag(item, keys, "name", mustBeString); + if (typeof name !== "string" || name === "") throw new Error(`Plugin at index ${i} is missing a name`); + try { + let setup = getFlag(item, keys, "setup", mustBeFunction); + if (typeof setup !== "function") throw new Error(`Plugin is missing a setup function`); + checkForInvalidFlags(item, keys, `on plugin ${quote(name)}`); + let plugin = { + name, + onStart: false, + onEnd: false, + onResolve: [], + onLoad: [] + }; + i++; + let resolve = (path3, options = {}) => { + if (!isSetupDone) throw new Error('Cannot call "resolve" before plugin setup has completed'); + if (typeof path3 !== "string") throw new Error(`The path to resolve must be a string`); + let keys2 = /* @__PURE__ */ Object.create(null); + let pluginName = getFlag(options, keys2, "pluginName", mustBeString); + let importer = getFlag(options, keys2, "importer", mustBeString); + let namespace = getFlag(options, keys2, "namespace", mustBeString); + let resolveDir = getFlag(options, keys2, "resolveDir", mustBeString); + let kind = getFlag(options, keys2, "kind", mustBeString); + let pluginData = getFlag(options, keys2, "pluginData", canBeAnything); + let importAttributes = getFlag(options, keys2, "with", mustBeObject); + checkForInvalidFlags(options, keys2, "in resolve() call"); + return new Promise((resolve2, reject) => { + const request = { + command: "resolve", + path: path3, + key: buildKey, + pluginName: name + }; + if (pluginName != null) request.pluginName = pluginName; + if (importer != null) request.importer = importer; + if (namespace != null) request.namespace = namespace; + if (resolveDir != null) request.resolveDir = resolveDir; + if (kind != null) request.kind = kind; + else throw new Error(`Must specify "kind" when calling "resolve"`); + if (pluginData != null) request.pluginData = details.store(pluginData); + if (importAttributes != null) request.with = sanitizeStringMap(importAttributes, "with"); + sendRequest(refs, request, (error, response) => { + if (error !== null) reject(new Error(error)); + else resolve2({ + errors: replaceDetailsInMessages(response.errors, details), + warnings: replaceDetailsInMessages(response.warnings, details), + path: response.path, + external: response.external, + sideEffects: response.sideEffects, + namespace: response.namespace, + suffix: response.suffix, + pluginData: details.load(response.pluginData) + }); + }); + }); + }; + let promise = setup({ + initialOptions, + resolve, + onStart(callback) { + let registeredText = `This error came from the "onStart" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onStart"); + onStartCallbacks.push({ name, callback, note: registeredNote }); + plugin.onStart = true; + }, + onEnd(callback) { + let registeredText = `This error came from the "onEnd" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onEnd"); + onEndCallbacks.push({ name, callback, note: registeredNote }); + plugin.onEnd = true; + }, + onResolve(options, callback) { + let registeredText = `This error came from the "onResolve" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onResolve"); + let keys2 = {}; + let filter = getFlag(options, keys2, "filter", mustBeRegExp); + let namespace = getFlag(options, keys2, "namespace", mustBeString); + checkForInvalidFlags(options, keys2, `in onResolve() call for plugin ${quote(name)}`); + if (filter == null) throw new Error(`onResolve() call is missing a filter`); + let id = nextCallbackID++; + onResolveCallbacks[id] = { name, callback, note: registeredNote }; + plugin.onResolve.push({ id, filter: jsRegExpToGoRegExp(filter), namespace: namespace || "" }); + }, + onLoad(options, callback) { + let registeredText = `This error came from the "onLoad" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onLoad"); + let keys2 = {}; + let filter = getFlag(options, keys2, "filter", mustBeRegExp); + let namespace = getFlag(options, keys2, "namespace", mustBeString); + checkForInvalidFlags(options, keys2, `in onLoad() call for plugin ${quote(name)}`); + if (filter == null) throw new Error(`onLoad() call is missing a filter`); + let id = nextCallbackID++; + onLoadCallbacks[id] = { name, callback, note: registeredNote }; + plugin.onLoad.push({ id, filter: jsRegExpToGoRegExp(filter), namespace: namespace || "" }); + }, + onDispose(callback) { + onDisposeCallbacks.push(callback); + }, + esbuild: streamIn.esbuild + }); + if (promise) await promise; + requestPlugins.push(plugin); + } catch (e) { + return { ok: false, error: e, pluginName: name }; + } + } + requestCallbacks["on-start"] = async (id, request) => { + details.clear(); + let response = { errors: [], warnings: [] }; + await Promise.all(onStartCallbacks.map(async ({ name, callback, note }) => { + try { + let result = await callback(); + if (result != null) { + if (typeof result !== "object") throw new Error(`Expected onStart() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let errors = getFlag(result, keys, "errors", mustBeArray); + let warnings = getFlag(result, keys, "warnings", mustBeArray); + checkForInvalidFlags(result, keys, `from onStart() callback in plugin ${quote(name)}`); + if (errors != null) response.errors.push(...sanitizeMessages(errors, "errors", details, name, void 0)); + if (warnings != null) response.warnings.push(...sanitizeMessages(warnings, "warnings", details, name, void 0)); + } + } catch (e) { + response.errors.push(extractErrorMessageV8(e, streamIn, details, note && note(), name)); + } + })); + sendResponse(id, response); + }; + requestCallbacks["on-resolve"] = async (id, request) => { + let response = {}, name = "", callback, note; + for (let id2 of request.ids) { + try { + ({ name, callback, note } = onResolveCallbacks[id2]); + let result = await callback({ + path: request.path, + importer: request.importer, + namespace: request.namespace, + resolveDir: request.resolveDir, + kind: request.kind, + pluginData: details.load(request.pluginData), + with: request.with + }); + if (result != null) { + if (typeof result !== "object") throw new Error(`Expected onResolve() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let pluginName = getFlag(result, keys, "pluginName", mustBeString); + let path3 = getFlag(result, keys, "path", mustBeString); + let namespace = getFlag(result, keys, "namespace", mustBeString); + let suffix = getFlag(result, keys, "suffix", mustBeString); + let external = getFlag(result, keys, "external", mustBeBoolean); + let sideEffects = getFlag(result, keys, "sideEffects", mustBeBoolean); + let pluginData = getFlag(result, keys, "pluginData", canBeAnything); + let errors = getFlag(result, keys, "errors", mustBeArray); + let warnings = getFlag(result, keys, "warnings", mustBeArray); + let watchFiles = getFlag(result, keys, "watchFiles", mustBeArrayOfStrings); + let watchDirs = getFlag(result, keys, "watchDirs", mustBeArrayOfStrings); + checkForInvalidFlags(result, keys, `from onResolve() callback in plugin ${quote(name)}`); + response.id = id2; + if (pluginName != null) response.pluginName = pluginName; + if (path3 != null) response.path = path3; + if (namespace != null) response.namespace = namespace; + if (suffix != null) response.suffix = suffix; + if (external != null) response.external = external; + if (sideEffects != null) response.sideEffects = sideEffects; + if (pluginData != null) response.pluginData = details.store(pluginData); + if (errors != null) response.errors = sanitizeMessages(errors, "errors", details, name, void 0); + if (warnings != null) response.warnings = sanitizeMessages(warnings, "warnings", details, name, void 0); + if (watchFiles != null) response.watchFiles = sanitizeStringArray(watchFiles, "watchFiles"); + if (watchDirs != null) response.watchDirs = sanitizeStringArray(watchDirs, "watchDirs"); + break; + } + } catch (e) { + response = { id: id2, errors: [extractErrorMessageV8(e, streamIn, details, note && note(), name)] }; + break; + } + } + sendResponse(id, response); + }; + requestCallbacks["on-load"] = async (id, request) => { + let response = {}, name = "", callback, note; + for (let id2 of request.ids) { + try { + ({ name, callback, note } = onLoadCallbacks[id2]); + let result = await callback({ + path: request.path, + namespace: request.namespace, + suffix: request.suffix, + pluginData: details.load(request.pluginData), + with: request.with + }); + if (result != null) { + if (typeof result !== "object") throw new Error(`Expected onLoad() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let pluginName = getFlag(result, keys, "pluginName", mustBeString); + let contents = getFlag(result, keys, "contents", mustBeStringOrUint8Array); + let resolveDir = getFlag(result, keys, "resolveDir", mustBeString); + let pluginData = getFlag(result, keys, "pluginData", canBeAnything); + let loader = getFlag(result, keys, "loader", mustBeString); + let errors = getFlag(result, keys, "errors", mustBeArray); + let warnings = getFlag(result, keys, "warnings", mustBeArray); + let watchFiles = getFlag(result, keys, "watchFiles", mustBeArrayOfStrings); + let watchDirs = getFlag(result, keys, "watchDirs", mustBeArrayOfStrings); + checkForInvalidFlags(result, keys, `from onLoad() callback in plugin ${quote(name)}`); + response.id = id2; + if (pluginName != null) response.pluginName = pluginName; + if (contents instanceof Uint8Array) response.contents = contents; + else if (contents != null) response.contents = encodeUTF8(contents); + if (resolveDir != null) response.resolveDir = resolveDir; + if (pluginData != null) response.pluginData = details.store(pluginData); + if (loader != null) response.loader = loader; + if (errors != null) response.errors = sanitizeMessages(errors, "errors", details, name, void 0); + if (warnings != null) response.warnings = sanitizeMessages(warnings, "warnings", details, name, void 0); + if (watchFiles != null) response.watchFiles = sanitizeStringArray(watchFiles, "watchFiles"); + if (watchDirs != null) response.watchDirs = sanitizeStringArray(watchDirs, "watchDirs"); + break; + } + } catch (e) { + response = { id: id2, errors: [extractErrorMessageV8(e, streamIn, details, note && note(), name)] }; + break; + } + } + sendResponse(id, response); + }; + let runOnEndCallbacks = (result, done) => done([], []); + if (onEndCallbacks.length > 0) { + runOnEndCallbacks = (result, done) => { + (async () => { + const onEndErrors = []; + const onEndWarnings = []; + for (const { name, callback, note } of onEndCallbacks) { + let newErrors; + let newWarnings; + try { + const value = await callback(result); + if (value != null) { + if (typeof value !== "object") throw new Error(`Expected onEnd() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let errors = getFlag(value, keys, "errors", mustBeArray); + let warnings = getFlag(value, keys, "warnings", mustBeArray); + checkForInvalidFlags(value, keys, `from onEnd() callback in plugin ${quote(name)}`); + if (errors != null) newErrors = sanitizeMessages(errors, "errors", details, name, void 0); + if (warnings != null) newWarnings = sanitizeMessages(warnings, "warnings", details, name, void 0); + } + } catch (e) { + newErrors = [extractErrorMessageV8(e, streamIn, details, note && note(), name)]; + } + if (newErrors) { + onEndErrors.push(...newErrors); + try { + result.errors.push(...newErrors); + } catch { + } + } + if (newWarnings) { + onEndWarnings.push(...newWarnings); + try { + result.warnings.push(...newWarnings); + } catch { + } + } + } + done(onEndErrors, onEndWarnings); + })(); + }; + } + let scheduleOnDisposeCallbacks = () => { + for (const cb of onDisposeCallbacks) { + setTimeout(() => cb(), 0); + } + }; + isSetupDone = true; + return { + ok: true, + requestPlugins, + runOnEndCallbacks, + scheduleOnDisposeCallbacks + }; +}; +function createObjectStash() { + const map = /* @__PURE__ */ new Map(); + let nextID = 0; + return { + clear() { + map.clear(); + }, + load(id) { + return map.get(id); + }, + store(value) { + if (value === void 0) return -1; + const id = nextID++; + map.set(id, value); + return id; + } + }; +} +function extractCallerV8(e, streamIn, ident) { + let note; + let tried = false; + return () => { + if (tried) return note; + tried = true; + try { + let lines = (e.stack + "").split("\n"); + lines.splice(1, 1); + let location = parseStackLinesV8(streamIn, lines, ident); + if (location) { + note = { text: e.message, location }; + return note; + } + } catch { + } + }; +} +function extractErrorMessageV8(e, streamIn, stash, note, pluginName) { + let text = "Internal error"; + let location = null; + try { + text = (e && e.message || e) + ""; + } catch { + } + try { + location = parseStackLinesV8(streamIn, (e.stack + "").split("\n"), ""); + } catch { + } + return { id: "", pluginName, text, location, notes: note ? [note] : [], detail: stash ? stash.store(e) : -1 }; +} +function parseStackLinesV8(streamIn, lines, ident) { + let at = " at "; + if (streamIn.readFileSync && !lines[0].startsWith(at) && lines[1].startsWith(at)) { + for (let i = 1; i < lines.length; i++) { + let line = lines[i]; + if (!line.startsWith(at)) continue; + line = line.slice(at.length); + while (true) { + let match = /^(?:new |async )?\S+ \((.*)\)$/.exec(line); + if (match) { + line = match[1]; + continue; + } + match = /^eval at \S+ \((.*)\)(?:, \S+:\d+:\d+)?$/.exec(line); + if (match) { + line = match[1]; + continue; + } + match = /^(\S+):(\d+):(\d+)$/.exec(line); + if (match) { + let contents; + try { + contents = streamIn.readFileSync(match[1], "utf8"); + } catch { + break; + } + let lineText = contents.split(/\r\n|\r|\n|\u2028|\u2029/)[+match[2] - 1] || ""; + let column = +match[3] - 1; + let length = lineText.slice(column, column + ident.length) === ident ? ident.length : 0; + return { + file: match[1], + namespace: "file", + line: +match[2], + column: encodeUTF8(lineText.slice(0, column)).length, + length: encodeUTF8(lineText.slice(column, column + length)).length, + lineText: lineText + "\n" + lines.slice(1).join("\n"), + suggestion: "" + }; + } + break; + } + } + } + return null; +} +function failureErrorWithLog(text, errors, warnings) { + let limit = 5; + text += errors.length < 1 ? "" : ` with ${errors.length} error${errors.length < 2 ? "" : "s"}:` + errors.slice(0, limit + 1).map((e, i) => { + if (i === limit) return "\n..."; + if (!e.location) return ` +error: ${e.text}`; + let { file, line, column } = e.location; + let pluginText = e.pluginName ? `[plugin: ${e.pluginName}] ` : ""; + return ` +${file}:${line}:${column}: ERROR: ${pluginText}${e.text}`; + }).join(""); + let error = new Error(text); + for (const [key, value] of [["errors", errors], ["warnings", warnings]]) { + Object.defineProperty(error, key, { + configurable: true, + enumerable: true, + get: () => value, + set: (value2) => Object.defineProperty(error, key, { + configurable: true, + enumerable: true, + value: value2 + }) + }); + } + return error; +} +function replaceDetailsInMessages(messages, stash) { + for (const message of messages) { + message.detail = stash.load(message.detail); + } + return messages; +} +function sanitizeLocation(location, where, terminalWidth) { + if (location == null) return null; + let keys = {}; + let file = getFlag(location, keys, "file", mustBeString); + let namespace = getFlag(location, keys, "namespace", mustBeString); + let line = getFlag(location, keys, "line", mustBeInteger); + let column = getFlag(location, keys, "column", mustBeInteger); + let length = getFlag(location, keys, "length", mustBeInteger); + let lineText = getFlag(location, keys, "lineText", mustBeString); + let suggestion = getFlag(location, keys, "suggestion", mustBeString); + checkForInvalidFlags(location, keys, where); + if (lineText) { + const relevantASCII = lineText.slice( + 0, + (column && column > 0 ? column : 0) + (length && length > 0 ? length : 0) + (terminalWidth && terminalWidth > 0 ? terminalWidth : 80) + ); + if (!/[\x7F-\uFFFF]/.test(relevantASCII) && !/\n/.test(lineText)) { + lineText = relevantASCII; + } + } + return { + file: file || "", + namespace: namespace || "", + line: line || 0, + column: column || 0, + length: length || 0, + lineText: lineText || "", + suggestion: suggestion || "" + }; +} +function sanitizeMessages(messages, property, stash, fallbackPluginName, terminalWidth) { + let messagesClone = []; + let index = 0; + for (const message of messages) { + let keys = {}; + let id = getFlag(message, keys, "id", mustBeString); + let pluginName = getFlag(message, keys, "pluginName", mustBeString); + let text = getFlag(message, keys, "text", mustBeString); + let location = getFlag(message, keys, "location", mustBeObjectOrNull); + let notes = getFlag(message, keys, "notes", mustBeArray); + let detail = getFlag(message, keys, "detail", canBeAnything); + let where = `in element ${index} of "${property}"`; + checkForInvalidFlags(message, keys, where); + let notesClone = []; + if (notes) { + for (const note of notes) { + let noteKeys = {}; + let noteText = getFlag(note, noteKeys, "text", mustBeString); + let noteLocation = getFlag(note, noteKeys, "location", mustBeObjectOrNull); + checkForInvalidFlags(note, noteKeys, where); + notesClone.push({ + text: noteText || "", + location: sanitizeLocation(noteLocation, where, terminalWidth) + }); + } + } + messagesClone.push({ + id: id || "", + pluginName: pluginName || fallbackPluginName, + text: text || "", + location: sanitizeLocation(location, where, terminalWidth), + notes: notesClone, + detail: stash ? stash.store(detail) : -1 + }); + index++; + } + return messagesClone; +} +function sanitizeStringArray(values, property) { + const result = []; + for (const value of values) { + if (typeof value !== "string") throw new Error(`${quote(property)} must be an array of strings`); + result.push(value); + } + return result; +} +function sanitizeStringMap(map, property) { + const result = /* @__PURE__ */ Object.create(null); + for (const key in map) { + const value = map[key]; + if (typeof value !== "string") throw new Error(`key ${quote(key)} in object ${quote(property)} must be a string`); + result[key] = value; + } + return result; +} +function convertOutputFiles({ path: path3, contents, hash }) { + let text = null; + return { + path: path3, + contents, + hash, + get text() { + const binary = this.contents; + if (text === null || binary !== contents) { + contents = binary; + text = decodeUTF8(binary); + } + return text; + } + }; +} +function jsRegExpToGoRegExp(regexp) { + let result = regexp.source; + if (regexp.flags) result = `(?${regexp.flags})${result}`; + return result; +} + +// lib/npm/node-platform.ts +var fs = require("fs"); +var os = require("os"); +var path = require("path"); +var ESBUILD_BINARY_PATH = process.env.ESBUILD_BINARY_PATH || ESBUILD_BINARY_PATH; +var isValidBinaryPath = (x) => !!x && x !== "/usr/bin/esbuild"; +var packageDarwin_arm64 = "@esbuild/darwin-arm64"; +var packageDarwin_x64 = "@esbuild/darwin-x64"; +var knownWindowsPackages = { + "win32 arm64 LE": "@esbuild/win32-arm64", + "win32 ia32 LE": "@esbuild/win32-ia32", + "win32 x64 LE": "@esbuild/win32-x64" +}; +var knownUnixlikePackages = { + "aix ppc64 BE": "@esbuild/aix-ppc64", + "android arm64 LE": "@esbuild/android-arm64", + "darwin arm64 LE": "@esbuild/darwin-arm64", + "darwin x64 LE": "@esbuild/darwin-x64", + "freebsd arm64 LE": "@esbuild/freebsd-arm64", + "freebsd x64 LE": "@esbuild/freebsd-x64", + "linux arm LE": "@esbuild/linux-arm", + "linux arm64 LE": "@esbuild/linux-arm64", + "linux ia32 LE": "@esbuild/linux-ia32", + "linux mips64el LE": "@esbuild/linux-mips64el", + "linux ppc64 LE": "@esbuild/linux-ppc64", + "linux riscv64 LE": "@esbuild/linux-riscv64", + "linux s390x BE": "@esbuild/linux-s390x", + "linux x64 LE": "@esbuild/linux-x64", + "linux loong64 LE": "@esbuild/linux-loong64", + "netbsd arm64 LE": "@esbuild/netbsd-arm64", + "netbsd x64 LE": "@esbuild/netbsd-x64", + "openbsd arm64 LE": "@esbuild/openbsd-arm64", + "openbsd x64 LE": "@esbuild/openbsd-x64", + "sunos x64 LE": "@esbuild/sunos-x64" +}; +var knownWebAssemblyFallbackPackages = { + "android arm LE": "@esbuild/android-arm", + "android x64 LE": "@esbuild/android-x64" +}; +function pkgAndSubpathForCurrentPlatform() { + let pkg; + let subpath; + let isWASM = false; + let platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`; + if (platformKey in knownWindowsPackages) { + pkg = knownWindowsPackages[platformKey]; + subpath = "esbuild.exe"; + } else if (platformKey in knownUnixlikePackages) { + pkg = knownUnixlikePackages[platformKey]; + subpath = "bin/esbuild"; + } else if (platformKey in knownWebAssemblyFallbackPackages) { + pkg = knownWebAssemblyFallbackPackages[platformKey]; + subpath = "bin/esbuild"; + isWASM = true; + } else { + throw new Error(`Unsupported platform: ${platformKey}`); + } + return { pkg, subpath, isWASM }; +} +function pkgForSomeOtherPlatform() { + const libMainJS = require.resolve("esbuild"); + const nodeModulesDirectory = path.dirname(path.dirname(path.dirname(libMainJS))); + if (path.basename(nodeModulesDirectory) === "node_modules") { + for (const unixKey in knownUnixlikePackages) { + try { + const pkg = knownUnixlikePackages[unixKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + for (const windowsKey in knownWindowsPackages) { + try { + const pkg = knownWindowsPackages[windowsKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + } + return null; +} +function downloadedBinPath(pkg, subpath) { + const esbuildLibDir = path.dirname(require.resolve("esbuild")); + return path.join(esbuildLibDir, `downloaded-${pkg.replace("/", "-")}-${path.basename(subpath)}`); +} +function generateBinPath() { + if (isValidBinaryPath(ESBUILD_BINARY_PATH)) { + if (!fs.existsSync(ESBUILD_BINARY_PATH)) { + console.warn(`[esbuild] Ignoring bad configuration: ESBUILD_BINARY_PATH=${ESBUILD_BINARY_PATH}`); + } else { + return { binPath: ESBUILD_BINARY_PATH, isWASM: false }; + } + } + const { pkg, subpath, isWASM } = pkgAndSubpathForCurrentPlatform(); + let binPath; + try { + binPath = require.resolve(`${pkg}/${subpath}`); + } catch (e) { + binPath = downloadedBinPath(pkg, subpath); + if (!fs.existsSync(binPath)) { + try { + require.resolve(pkg); + } catch { + const otherPkg = pkgForSomeOtherPlatform(); + if (otherPkg) { + let suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild on Windows or macOS and copying "node_modules" +into a Docker image that runs Linux, or by copying "node_modules" between +Windows and WSL environments. + +If you are installing with npm, you can try not copying the "node_modules" +directory when you copy the files over, and running "npm ci" or "npm install" +on the destination platform after the copy. Or you could consider using yarn +instead of npm which has built-in support for installing a package on multiple +platforms simultaneously. + +If you are installing with yarn, you can try listing both this platform and the +other platform in your ".yarnrc.yml" file using the "supportedArchitectures" +feature: https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + if (pkg === packageDarwin_x64 && otherPkg === packageDarwin_arm64 || pkg === packageDarwin_arm64 && otherPkg === packageDarwin_x64) { + suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild with npm running inside of Rosetta 2 and then +trying to use it with node running outside of Rosetta 2, or vice versa (Rosetta +2 is Apple's on-the-fly x86_64-to-arm64 translation service). + +If you are installing with npm, you can try ensuring that both npm and node are +not running under Rosetta 2 and then reinstalling esbuild. This likely involves +changing how you installed npm and/or node. For example, installing node with +the universal installer here should work: https://nodejs.org/en/download/. Or +you could consider using yarn instead of npm which has built-in support for +installing a package on multiple platforms simultaneously. + +If you are installing with yarn, you can try listing both "arm64" and "x64" +in your ".yarnrc.yml" file using the "supportedArchitectures" feature: +https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + } + throw new Error(` +You installed esbuild for another platform than the one you're currently using. +This won't work because esbuild is written with native code and needs to +install a platform-specific binary executable. +${suggestions} +Another alternative is to use the "esbuild-wasm" package instead, which works +the same way on all platforms. But it comes with a heavy performance cost and +can sometimes be 10x slower than the "esbuild" package, so you may also not +want to do that. +`); + } + throw new Error(`The package "${pkg}" could not be found, and is needed by esbuild. + +If you are installing esbuild with npm, make sure that you don't specify the +"--no-optional" or "--omit=optional" flags. The "optionalDependencies" feature +of "package.json" is used by esbuild to install the correct binary executable +for your current platform.`); + } + throw e; + } + } + if (/\.zip\//.test(binPath)) { + let pnpapi; + try { + pnpapi = require("pnpapi"); + } catch (e) { + } + if (pnpapi) { + const root = pnpapi.getPackageInformation(pnpapi.topLevel).packageLocation; + const binTargetPath = path.join( + root, + "node_modules", + ".cache", + "esbuild", + `pnpapi-${pkg.replace("/", "-")}-${"0.25.5"}-${path.basename(subpath)}` + ); + if (!fs.existsSync(binTargetPath)) { + fs.mkdirSync(path.dirname(binTargetPath), { recursive: true }); + fs.copyFileSync(binPath, binTargetPath); + fs.chmodSync(binTargetPath, 493); + } + return { binPath: binTargetPath, isWASM }; + } + } + return { binPath, isWASM }; +} + +// lib/npm/node.ts +var child_process = require("child_process"); +var crypto = require("crypto"); +var path2 = require("path"); +var fs2 = require("fs"); +var os2 = require("os"); +var tty = require("tty"); +var worker_threads; +if (process.env.ESBUILD_WORKER_THREADS !== "0") { + try { + worker_threads = require("worker_threads"); + } catch { + } + let [major, minor] = process.versions.node.split("."); + if ( + // { + if ((!ESBUILD_BINARY_PATH || false) && (path2.basename(__filename) !== "main.js" || path2.basename(__dirname) !== "lib")) { + throw new Error( + `The esbuild JavaScript API cannot be bundled. Please mark the "esbuild" package as external so it's not included in the bundle. + +More information: The file containing the code for esbuild's JavaScript API (${__filename}) does not appear to be inside the esbuild package on the file system, which usually means that the esbuild package was bundled into another file. This is problematic because the API needs to run a binary executable inside the esbuild package which is located using a relative path from the API code to the executable. If the esbuild package is bundled, the relative path will be incorrect and the executable won't be found.` + ); + } + if (false) { + return ["node", [path2.join(__dirname, "..", "bin", "esbuild")]]; + } else { + const { binPath, isWASM } = generateBinPath(); + if (isWASM) { + return ["node", [binPath]]; + } else { + return [binPath, []]; + } + } +}; +var isTTY = () => tty.isatty(2); +var fsSync = { + readFile(tempFile, callback) { + try { + let contents = fs2.readFileSync(tempFile, "utf8"); + try { + fs2.unlinkSync(tempFile); + } catch { + } + callback(null, contents); + } catch (err) { + callback(err, null); + } + }, + writeFile(contents, callback) { + try { + let tempFile = randomFileName(); + fs2.writeFileSync(tempFile, contents); + callback(tempFile); + } catch { + callback(null); + } + } +}; +var fsAsync = { + readFile(tempFile, callback) { + try { + fs2.readFile(tempFile, "utf8", (err, contents) => { + try { + fs2.unlink(tempFile, () => callback(err, contents)); + } catch { + callback(err, contents); + } + }); + } catch (err) { + callback(err, null); + } + }, + writeFile(contents, callback) { + try { + let tempFile = randomFileName(); + fs2.writeFile(tempFile, contents, (err) => err !== null ? callback(null) : callback(tempFile)); + } catch { + callback(null); + } + } +}; +var version = "0.25.5"; +var build = (options) => ensureServiceIsRunning().build(options); +var context = (buildOptions) => ensureServiceIsRunning().context(buildOptions); +var transform = (input, options) => ensureServiceIsRunning().transform(input, options); +var formatMessages = (messages, options) => ensureServiceIsRunning().formatMessages(messages, options); +var analyzeMetafile = (messages, options) => ensureServiceIsRunning().analyzeMetafile(messages, options); +var buildSync = (options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.buildSync(options); + } + let result; + runServiceSync((service) => service.buildOrContext({ + callName: "buildSync", + refs: null, + options, + isTTY: isTTY(), + defaultWD, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var transformSync = (input, options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.transformSync(input, options); + } + let result; + runServiceSync((service) => service.transform({ + callName: "transformSync", + refs: null, + input, + options: options || {}, + isTTY: isTTY(), + fs: fsSync, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var formatMessagesSync = (messages, options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.formatMessagesSync(messages, options); + } + let result; + runServiceSync((service) => service.formatMessages({ + callName: "formatMessagesSync", + refs: null, + messages, + options, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var analyzeMetafileSync = (metafile, options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.analyzeMetafileSync(metafile, options); + } + let result; + runServiceSync((service) => service.analyzeMetafile({ + callName: "analyzeMetafileSync", + refs: null, + metafile: typeof metafile === "string" ? metafile : JSON.stringify(metafile), + options, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var stop = () => { + if (stopService) stopService(); + if (workerThreadService) workerThreadService.stop(); + return Promise.resolve(); +}; +var initializeWasCalled = false; +var initialize = (options) => { + options = validateInitializeOptions(options || {}); + if (options.wasmURL) throw new Error(`The "wasmURL" option only works in the browser`); + if (options.wasmModule) throw new Error(`The "wasmModule" option only works in the browser`); + if (options.worker) throw new Error(`The "worker" option only works in the browser`); + if (initializeWasCalled) throw new Error('Cannot call "initialize" more than once'); + ensureServiceIsRunning(); + initializeWasCalled = true; + return Promise.resolve(); +}; +var defaultWD = process.cwd(); +var longLivedService; +var stopService; +var ensureServiceIsRunning = () => { + if (longLivedService) return longLivedService; + let [command, args] = esbuildCommandAndArgs(); + let child = child_process.spawn(command, args.concat(`--service=${"0.25.5"}`, "--ping"), { + windowsHide: true, + stdio: ["pipe", "pipe", "inherit"], + cwd: defaultWD + }); + let { readFromStdout, afterClose, service } = createChannel({ + writeToStdin(bytes) { + child.stdin.write(bytes, (err) => { + if (err) afterClose(err); + }); + }, + readFileSync: fs2.readFileSync, + isSync: false, + hasFS: true, + esbuild: node_exports + }); + child.stdin.on("error", afterClose); + child.on("error", afterClose); + const stdin = child.stdin; + const stdout = child.stdout; + stdout.on("data", readFromStdout); + stdout.on("end", afterClose); + stopService = () => { + stdin.destroy(); + stdout.destroy(); + child.kill(); + initializeWasCalled = false; + longLivedService = void 0; + stopService = void 0; + }; + let refCount = 0; + child.unref(); + if (stdin.unref) { + stdin.unref(); + } + if (stdout.unref) { + stdout.unref(); + } + const refs = { + ref() { + if (++refCount === 1) child.ref(); + }, + unref() { + if (--refCount === 0) child.unref(); + } + }; + longLivedService = { + build: (options) => new Promise((resolve, reject) => { + service.buildOrContext({ + callName: "build", + refs, + options, + isTTY: isTTY(), + defaultWD, + callback: (err, res) => err ? reject(err) : resolve(res) + }); + }), + context: (options) => new Promise((resolve, reject) => service.buildOrContext({ + callName: "context", + refs, + options, + isTTY: isTTY(), + defaultWD, + callback: (err, res) => err ? reject(err) : resolve(res) + })), + transform: (input, options) => new Promise((resolve, reject) => service.transform({ + callName: "transform", + refs, + input, + options: options || {}, + isTTY: isTTY(), + fs: fsAsync, + callback: (err, res) => err ? reject(err) : resolve(res) + })), + formatMessages: (messages, options) => new Promise((resolve, reject) => service.formatMessages({ + callName: "formatMessages", + refs, + messages, + options, + callback: (err, res) => err ? reject(err) : resolve(res) + })), + analyzeMetafile: (metafile, options) => new Promise((resolve, reject) => service.analyzeMetafile({ + callName: "analyzeMetafile", + refs, + metafile: typeof metafile === "string" ? metafile : JSON.stringify(metafile), + options, + callback: (err, res) => err ? reject(err) : resolve(res) + })) + }; + return longLivedService; +}; +var runServiceSync = (callback) => { + let [command, args] = esbuildCommandAndArgs(); + let stdin = new Uint8Array(); + let { readFromStdout, afterClose, service } = createChannel({ + writeToStdin(bytes) { + if (stdin.length !== 0) throw new Error("Must run at most one command"); + stdin = bytes; + }, + isSync: true, + hasFS: true, + esbuild: node_exports + }); + callback(service); + let stdout = child_process.execFileSync(command, args.concat(`--service=${"0.25.5"}`), { + cwd: defaultWD, + windowsHide: true, + input: stdin, + // We don't know how large the output could be. If it's too large, the + // command will fail with ENOBUFS. Reserve 16mb for now since that feels + // like it should be enough. Also allow overriding this with an environment + // variable. + maxBuffer: +process.env.ESBUILD_MAX_BUFFER || 16 * 1024 * 1024 + }); + readFromStdout(stdout); + afterClose(null); +}; +var randomFileName = () => { + return path2.join(os2.tmpdir(), `esbuild-${crypto.randomBytes(32).toString("hex")}`); +}; +var workerThreadService = null; +var startWorkerThreadService = (worker_threads2) => { + let { port1: mainPort, port2: workerPort } = new worker_threads2.MessageChannel(); + let worker = new worker_threads2.Worker(__filename, { + workerData: { workerPort, defaultWD, esbuildVersion: "0.25.5" }, + transferList: [workerPort], + // From node's documentation: https://nodejs.org/api/worker_threads.html + // + // Take care when launching worker threads from preload scripts (scripts loaded + // and run using the `-r` command line flag). Unless the `execArgv` option is + // explicitly set, new Worker threads automatically inherit the command line flags + // from the running process and will preload the same preload scripts as the main + // thread. If the preload script unconditionally launches a worker thread, every + // thread spawned will spawn another until the application crashes. + // + execArgv: [] + }); + let nextID = 0; + let fakeBuildError = (text) => { + let error = new Error(`Build failed with 1 error: +error: ${text}`); + let errors = [{ id: "", pluginName: "", text, location: null, notes: [], detail: void 0 }]; + error.errors = errors; + error.warnings = []; + return error; + }; + let validateBuildSyncOptions = (options) => { + if (!options) return; + let plugins = options.plugins; + if (plugins && plugins.length > 0) throw fakeBuildError(`Cannot use plugins in synchronous API calls`); + }; + let applyProperties = (object, properties) => { + for (let key in properties) { + object[key] = properties[key]; + } + }; + let runCallSync = (command, args) => { + let id = nextID++; + let sharedBuffer = new SharedArrayBuffer(8); + let sharedBufferView = new Int32Array(sharedBuffer); + let msg = { sharedBuffer, id, command, args }; + worker.postMessage(msg); + let status = Atomics.wait(sharedBufferView, 0, 0); + if (status !== "ok" && status !== "not-equal") throw new Error("Internal error: Atomics.wait() failed: " + status); + let { message: { id: id2, resolve, reject, properties } } = worker_threads2.receiveMessageOnPort(mainPort); + if (id !== id2) throw new Error(`Internal error: Expected id ${id} but got id ${id2}`); + if (reject) { + applyProperties(reject, properties); + throw reject; + } + return resolve; + }; + worker.unref(); + return { + buildSync(options) { + validateBuildSyncOptions(options); + return runCallSync("build", [options]); + }, + transformSync(input, options) { + return runCallSync("transform", [input, options]); + }, + formatMessagesSync(messages, options) { + return runCallSync("formatMessages", [messages, options]); + }, + analyzeMetafileSync(metafile, options) { + return runCallSync("analyzeMetafile", [metafile, options]); + }, + stop() { + worker.terminate(); + workerThreadService = null; + } + }; +}; +var startSyncServiceWorker = () => { + let workerPort = worker_threads.workerData.workerPort; + let parentPort = worker_threads.parentPort; + let extractProperties = (object) => { + let properties = {}; + if (object && typeof object === "object") { + for (let key in object) { + properties[key] = object[key]; + } + } + return properties; + }; + try { + let service = ensureServiceIsRunning(); + defaultWD = worker_threads.workerData.defaultWD; + parentPort.on("message", (msg) => { + (async () => { + let { sharedBuffer, id, command, args } = msg; + let sharedBufferView = new Int32Array(sharedBuffer); + try { + switch (command) { + case "build": + workerPort.postMessage({ id, resolve: await service.build(args[0]) }); + break; + case "transform": + workerPort.postMessage({ id, resolve: await service.transform(args[0], args[1]) }); + break; + case "formatMessages": + workerPort.postMessage({ id, resolve: await service.formatMessages(args[0], args[1]) }); + break; + case "analyzeMetafile": + workerPort.postMessage({ id, resolve: await service.analyzeMetafile(args[0], args[1]) }); + break; + default: + throw new Error(`Invalid command: ${command}`); + } + } catch (reject) { + workerPort.postMessage({ id, reject, properties: extractProperties(reject) }); + } + Atomics.add(sharedBufferView, 0, 1); + Atomics.notify(sharedBufferView, 0, Infinity); + })(); + }); + } catch (reject) { + parentPort.on("message", (msg) => { + let { sharedBuffer, id } = msg; + let sharedBufferView = new Int32Array(sharedBuffer); + workerPort.postMessage({ id, reject, properties: extractProperties(reject) }); + Atomics.add(sharedBufferView, 0, 1); + Atomics.notify(sharedBufferView, 0, Infinity); + }); + } +}; +if (isInternalWorkerThread) { + startSyncServiceWorker(); +} +var node_default = node_exports; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + analyzeMetafile, + analyzeMetafileSync, + build, + buildSync, + context, + formatMessages, + formatMessagesSync, + initialize, + stop, + transform, + transformSync, + version +}); diff --git a/node_modules/esbuild/package.json b/node_modules/esbuild/package.json new file mode 100644 index 0000000..3e00b4e --- /dev/null +++ b/node_modules/esbuild/package.json @@ -0,0 +1,48 @@ +{ + "name": "esbuild", + "version": "0.25.5", + "description": "An extremely fast JavaScript and CSS bundler and minifier.", + "repository": { + "type": "git", + "url": "git+https://github.com/evanw/esbuild.git" + }, + "scripts": { + "postinstall": "node install.js" + }, + "main": "lib/main.js", + "types": "lib/main.d.ts", + "engines": { + "node": ">=18" + }, + "bin": { + "esbuild": "bin/esbuild" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.5", + "@esbuild/android-arm": "0.25.5", + "@esbuild/android-arm64": "0.25.5", + "@esbuild/android-x64": "0.25.5", + "@esbuild/darwin-arm64": "0.25.5", + "@esbuild/darwin-x64": "0.25.5", + "@esbuild/freebsd-arm64": "0.25.5", + "@esbuild/freebsd-x64": "0.25.5", + "@esbuild/linux-arm": "0.25.5", + "@esbuild/linux-arm64": "0.25.5", + "@esbuild/linux-ia32": "0.25.5", + "@esbuild/linux-loong64": "0.25.5", + "@esbuild/linux-mips64el": "0.25.5", + "@esbuild/linux-ppc64": "0.25.5", + "@esbuild/linux-riscv64": "0.25.5", + "@esbuild/linux-s390x": "0.25.5", + "@esbuild/linux-x64": "0.25.5", + "@esbuild/netbsd-arm64": "0.25.5", + "@esbuild/netbsd-x64": "0.25.5", + "@esbuild/openbsd-arm64": "0.25.5", + "@esbuild/openbsd-x64": "0.25.5", + "@esbuild/sunos-x64": "0.25.5", + "@esbuild/win32-arm64": "0.25.5", + "@esbuild/win32-ia32": "0.25.5", + "@esbuild/win32-x64": "0.25.5" + }, + "license": "MIT" +} diff --git a/node_modules/fdir/LICENSE b/node_modules/fdir/LICENSE new file mode 100644 index 0000000..bb7fdee --- /dev/null +++ b/node_modules/fdir/LICENSE @@ -0,0 +1,7 @@ +Copyright 2023 Abdullah Atta + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fdir/README.md b/node_modules/fdir/README.md new file mode 100644 index 0000000..5c70530 --- /dev/null +++ b/node_modules/fdir/README.md @@ -0,0 +1,91 @@ +

+ + +

The Fastest Directory Crawler & Globber for NodeJS

+

+ + + + + + + + +

+

+ +โšก **The Fastest:** Nothing similar (in the NodeJS world) beats `fdir` in speed. It can easily crawl a directory containing **1 million files in < 1 second.** + +๐Ÿ’ก **Stupidly Easy:** `fdir` uses expressive Builder pattern to build the crawler increasing code readability. + +๐Ÿค– **Zero Dependencies\*:** `fdir` only uses NodeJS `fs` & `path` modules. + +๐Ÿ•บ **Astonishingly Small:** < 2KB in size gzipped & minified. + +๐Ÿ–ฎ **Hackable:** Extending `fdir` is extremely simple now that the new Builder API is here. Feel free to experiment around. + +_\* `picomatch` must be installed manually by the user to support globbing._ + +## ๐Ÿš„ Quickstart + +### Installation + +You can install using `npm`: + +```sh +$ npm i fdir +``` + +or Yarn: + +```sh +$ yarn add fdir +``` + +### Usage + +```ts +import { fdir } from "fdir"; + +// create the builder +const api = new fdir().withFullPaths().crawl("path/to/dir"); + +// get all files in a directory synchronously +const files = api.sync(); + +// or asynchronously +api.withPromise().then((files) => { + // do something with the result here. +}); +``` + +## Documentation: + +Documentation for all methods is available [here](/documentation.md). + +## ๐Ÿ“Š Benchmarks: + +Please check the benchmark against the latest version [here](/BENCHMARKS.md). + +## ๐Ÿ™Used by: + +`fdir` is downloaded over 200k+ times a week by projects around the world. Here's a list of some notable projects using `fdir` in production: + +> Note: if you think your project should be here, feel free to open an issue. Notable is anything with a considerable amount of GitHub stars. + +1. [rollup/plugins](https://github.com/rollup/plugins) +2. [SuperchupuDev/tinyglobby](https://github.com/SuperchupuDev/tinyglobby) +3. [pulumi/pulumi](https://github.com/pulumi/pulumi) +4. [dotenvx/dotenvx](https://github.com/dotenvx/dotenvx) +5. [mdn/yari](https://github.com/mdn/yari) +6. [streetwriters/notesnook](https://github.com/streetwriters/notesnook) +7. [imba/imba](https://github.com/imba/imba) +8. [moroshko/react-scanner](https://github.com/moroshko/react-scanner) +9. [netlify/build](https://github.com/netlify/build) +10. [yassinedoghri/astro-i18next](https://github.com/yassinedoghri/astro-i18next) +11. [selfrefactor/rambda](https://github.com/selfrefactor/rambda) +12. [whyboris/Video-Hub-App](https://github.com/whyboris/Video-Hub-App) + +## ๐Ÿฆฎ LICENSE + +Copyright © 2024 Abdullah Atta under MIT. [Read full text here.](https://github.com/thecodrr/fdir/raw/master/LICENSE) diff --git a/node_modules/fdir/dist/api/async.d.ts b/node_modules/fdir/dist/api/async.d.ts new file mode 100644 index 0000000..f236ae4 --- /dev/null +++ b/node_modules/fdir/dist/api/async.d.ts @@ -0,0 +1,3 @@ +import { Output, Options, ResultCallback } from "../types"; +export declare function promise(root: string, options: Options): Promise; +export declare function callback(root: string, options: Options, callback: ResultCallback): void; diff --git a/node_modules/fdir/dist/api/async.js b/node_modules/fdir/dist/api/async.js new file mode 100644 index 0000000..efc6649 --- /dev/null +++ b/node_modules/fdir/dist/api/async.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.callback = exports.promise = void 0; +const walker_1 = require("./walker"); +function promise(root, options) { + return new Promise((resolve, reject) => { + callback(root, options, (err, output) => { + if (err) + return reject(err); + resolve(output); + }); + }); +} +exports.promise = promise; +function callback(root, options, callback) { + let walker = new walker_1.Walker(root, options, callback); + walker.start(); +} +exports.callback = callback; diff --git a/node_modules/fdir/dist/api/counter.d.ts b/node_modules/fdir/dist/api/counter.d.ts new file mode 100644 index 0000000..6982d0c --- /dev/null +++ b/node_modules/fdir/dist/api/counter.d.ts @@ -0,0 +1,12 @@ +export declare class Counter { + private _files; + private _directories; + set files(num: number); + get files(): number; + set directories(num: number); + get directories(): number; + /** + * @deprecated use `directories` instead + */ + get dirs(): number; +} diff --git a/node_modules/fdir/dist/api/counter.js b/node_modules/fdir/dist/api/counter.js new file mode 100644 index 0000000..685cb27 --- /dev/null +++ b/node_modules/fdir/dist/api/counter.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Counter = void 0; +class Counter { + _files = 0; + _directories = 0; + set files(num) { + this._files = num; + } + get files() { + return this._files; + } + set directories(num) { + this._directories = num; + } + get directories() { + return this._directories; + } + /** + * @deprecated use `directories` instead + */ + /* c8 ignore next 3 */ + get dirs() { + return this._directories; + } +} +exports.Counter = Counter; diff --git a/node_modules/fdir/dist/api/functions/get-array.d.ts b/node_modules/fdir/dist/api/functions/get-array.d.ts new file mode 100644 index 0000000..958437f --- /dev/null +++ b/node_modules/fdir/dist/api/functions/get-array.d.ts @@ -0,0 +1,3 @@ +import { Options } from "../../types"; +export type GetArrayFunction = (paths: string[]) => string[]; +export declare function build(options: Options): GetArrayFunction; diff --git a/node_modules/fdir/dist/api/functions/get-array.js b/node_modules/fdir/dist/api/functions/get-array.js new file mode 100644 index 0000000..1e02308 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/get-array.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +const getArray = (paths) => { + return paths; +}; +const getArrayGroup = () => { + return [""].slice(0, 0); +}; +function build(options) { + return options.group ? getArrayGroup : getArray; +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/functions/group-files.d.ts b/node_modules/fdir/dist/api/functions/group-files.d.ts new file mode 100644 index 0000000..da691f7 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/group-files.d.ts @@ -0,0 +1,3 @@ +import { Group, Options } from "../../types"; +export type GroupFilesFunction = (groups: Group[], directory: string, files: string[]) => void; +export declare function build(options: Options): GroupFilesFunction; diff --git a/node_modules/fdir/dist/api/functions/group-files.js b/node_modules/fdir/dist/api/functions/group-files.js new file mode 100644 index 0000000..4ccaa1a --- /dev/null +++ b/node_modules/fdir/dist/api/functions/group-files.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +const groupFiles = (groups, directory, files) => { + groups.push({ directory, files, dir: directory }); +}; +const empty = () => { }; +function build(options) { + return options.group ? groupFiles : empty; +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/functions/invoke-callback.d.ts b/node_modules/fdir/dist/api/functions/invoke-callback.d.ts new file mode 100644 index 0000000..968c429 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/invoke-callback.d.ts @@ -0,0 +1,3 @@ +import { Output, ResultCallback, WalkerState, Options } from "../../types"; +export type InvokeCallbackFunction = (state: WalkerState, error: Error | null, callback?: ResultCallback) => null | TOutput; +export declare function build(options: Options, isSynchronous: boolean): InvokeCallbackFunction; diff --git a/node_modules/fdir/dist/api/functions/invoke-callback.js b/node_modules/fdir/dist/api/functions/invoke-callback.js new file mode 100644 index 0000000..ed59ca2 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/invoke-callback.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +const onlyCountsSync = (state) => { + return state.counts; +}; +const groupsSync = (state) => { + return state.groups; +}; +const defaultSync = (state) => { + return state.paths; +}; +const limitFilesSync = (state) => { + return state.paths.slice(0, state.options.maxFiles); +}; +const onlyCountsAsync = (state, error, callback) => { + report(error, callback, state.counts, state.options.suppressErrors); + return null; +}; +const defaultAsync = (state, error, callback) => { + report(error, callback, state.paths, state.options.suppressErrors); + return null; +}; +const limitFilesAsync = (state, error, callback) => { + report(error, callback, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors); + return null; +}; +const groupsAsync = (state, error, callback) => { + report(error, callback, state.groups, state.options.suppressErrors); + return null; +}; +function report(error, callback, output, suppressErrors) { + if (error && !suppressErrors) + callback(error, output); + else + callback(null, output); +} +function build(options, isSynchronous) { + const { onlyCounts, group, maxFiles } = options; + if (onlyCounts) + return isSynchronous + ? onlyCountsSync + : onlyCountsAsync; + else if (group) + return isSynchronous + ? groupsSync + : groupsAsync; + else if (maxFiles) + return isSynchronous + ? limitFilesSync + : limitFilesAsync; + else + return isSynchronous + ? defaultSync + : defaultAsync; +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/functions/join-path.d.ts b/node_modules/fdir/dist/api/functions/join-path.d.ts new file mode 100644 index 0000000..63e33e8 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/join-path.d.ts @@ -0,0 +1,5 @@ +import { Options, PathSeparator } from "../../types"; +export declare function joinPathWithBasePath(filename: string, directoryPath: string): string; +export declare function joinDirectoryPath(filename: string, directoryPath: string, separator: PathSeparator): string; +export type JoinPathFunction = (filename: string, directoryPath: string) => string; +export declare function build(root: string, options: Options): JoinPathFunction; diff --git a/node_modules/fdir/dist/api/functions/join-path.js b/node_modules/fdir/dist/api/functions/join-path.js new file mode 100644 index 0000000..e84faf6 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/join-path.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = exports.joinDirectoryPath = exports.joinPathWithBasePath = void 0; +const path_1 = require("path"); +const utils_1 = require("../../utils"); +function joinPathWithBasePath(filename, directoryPath) { + return directoryPath + filename; +} +exports.joinPathWithBasePath = joinPathWithBasePath; +function joinPathWithRelativePath(root, options) { + return function (filename, directoryPath) { + const sameRoot = directoryPath.startsWith(root); + if (sameRoot) + return directoryPath.replace(root, "") + filename; + else + return ((0, utils_1.convertSlashes)((0, path_1.relative)(root, directoryPath), options.pathSeparator) + + options.pathSeparator + + filename); + }; +} +function joinPath(filename) { + return filename; +} +function joinDirectoryPath(filename, directoryPath, separator) { + return directoryPath + filename + separator; +} +exports.joinDirectoryPath = joinDirectoryPath; +function build(root, options) { + const { relativePaths, includeBasePath } = options; + return relativePaths && root + ? joinPathWithRelativePath(root, options) + : includeBasePath + ? joinPathWithBasePath + : joinPath; +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/functions/push-directory.d.ts b/node_modules/fdir/dist/api/functions/push-directory.d.ts new file mode 100644 index 0000000..1527920 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/push-directory.d.ts @@ -0,0 +1,3 @@ +import { FilterPredicate, Options } from "../../types"; +export type PushDirectoryFunction = (directoryPath: string, paths: string[], filters?: FilterPredicate[]) => void; +export declare function build(root: string, options: Options): PushDirectoryFunction; diff --git a/node_modules/fdir/dist/api/functions/push-directory.js b/node_modules/fdir/dist/api/functions/push-directory.js new file mode 100644 index 0000000..6858cb6 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/push-directory.js @@ -0,0 +1,37 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +function pushDirectoryWithRelativePath(root) { + return function (directoryPath, paths) { + paths.push(directoryPath.substring(root.length) || "."); + }; +} +function pushDirectoryFilterWithRelativePath(root) { + return function (directoryPath, paths, filters) { + const relativePath = directoryPath.substring(root.length) || "."; + if (filters.every((filter) => filter(relativePath, true))) { + paths.push(relativePath); + } + }; +} +const pushDirectory = (directoryPath, paths) => { + paths.push(directoryPath || "."); +}; +const pushDirectoryFilter = (directoryPath, paths, filters) => { + const path = directoryPath || "."; + if (filters.every((filter) => filter(path, true))) { + paths.push(path); + } +}; +const empty = () => { }; +function build(root, options) { + const { includeDirs, filters, relativePaths } = options; + if (!includeDirs) + return empty; + if (relativePaths) + return filters && filters.length + ? pushDirectoryFilterWithRelativePath(root) + : pushDirectoryWithRelativePath(root); + return filters && filters.length ? pushDirectoryFilter : pushDirectory; +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/functions/push-file.d.ts b/node_modules/fdir/dist/api/functions/push-file.d.ts new file mode 100644 index 0000000..6a4f2f6 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/push-file.d.ts @@ -0,0 +1,3 @@ +import { FilterPredicate, Options, Counts } from "../../types"; +export type PushFileFunction = (directoryPath: string, paths: string[], counts: Counts, filters?: FilterPredicate[]) => void; +export declare function build(options: Options): PushFileFunction; diff --git a/node_modules/fdir/dist/api/functions/push-file.js b/node_modules/fdir/dist/api/functions/push-file.js new file mode 100644 index 0000000..8884395 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/push-file.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +const pushFileFilterAndCount = (filename, _paths, counts, filters) => { + if (filters.every((filter) => filter(filename, false))) + counts.files++; +}; +const pushFileFilter = (filename, paths, _counts, filters) => { + if (filters.every((filter) => filter(filename, false))) + paths.push(filename); +}; +const pushFileCount = (_filename, _paths, counts, _filters) => { + counts.files++; +}; +const pushFile = (filename, paths) => { + paths.push(filename); +}; +const empty = () => { }; +function build(options) { + const { excludeFiles, filters, onlyCounts } = options; + if (excludeFiles) + return empty; + if (filters && filters.length) { + return onlyCounts ? pushFileFilterAndCount : pushFileFilter; + } + else if (onlyCounts) { + return pushFileCount; + } + else { + return pushFile; + } +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/functions/resolve-symlink.d.ts b/node_modules/fdir/dist/api/functions/resolve-symlink.d.ts new file mode 100644 index 0000000..303342d --- /dev/null +++ b/node_modules/fdir/dist/api/functions/resolve-symlink.d.ts @@ -0,0 +1,5 @@ +/// +import fs from "fs"; +import { WalkerState, Options } from "../../types"; +export type ResolveSymlinkFunction = (path: string, state: WalkerState, callback: (stat: fs.Stats, path: string) => void) => void; +export declare function build(options: Options, isSynchronous: boolean): ResolveSymlinkFunction | null; diff --git a/node_modules/fdir/dist/api/functions/resolve-symlink.js b/node_modules/fdir/dist/api/functions/resolve-symlink.js new file mode 100644 index 0000000..dbf0720 --- /dev/null +++ b/node_modules/fdir/dist/api/functions/resolve-symlink.js @@ -0,0 +1,67 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +const fs_1 = __importDefault(require("fs")); +const path_1 = require("path"); +const resolveSymlinksAsync = function (path, state, callback) { + const { queue, options: { suppressErrors }, } = state; + queue.enqueue(); + fs_1.default.realpath(path, (error, resolvedPath) => { + if (error) + return queue.dequeue(suppressErrors ? null : error, state); + fs_1.default.stat(resolvedPath, (error, stat) => { + if (error) + return queue.dequeue(suppressErrors ? null : error, state); + if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) + return queue.dequeue(null, state); + callback(stat, resolvedPath); + queue.dequeue(null, state); + }); + }); +}; +const resolveSymlinks = function (path, state, callback) { + const { queue, options: { suppressErrors }, } = state; + queue.enqueue(); + try { + const resolvedPath = fs_1.default.realpathSync(path); + const stat = fs_1.default.statSync(resolvedPath); + if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) + return; + callback(stat, resolvedPath); + } + catch (e) { + if (!suppressErrors) + throw e; + } +}; +function build(options, isSynchronous) { + if (!options.resolveSymlinks || options.excludeSymlinks) + return null; + return isSynchronous ? resolveSymlinks : resolveSymlinksAsync; +} +exports.build = build; +function isRecursive(path, resolved, state) { + if (state.options.useRealPaths) + return isRecursiveUsingRealPaths(resolved, state); + let parent = (0, path_1.dirname)(path); + let depth = 1; + while (parent !== state.root && depth < 2) { + const resolvedPath = state.symlinks.get(parent); + const isSameRoot = !!resolvedPath && + (resolvedPath === resolved || + resolvedPath.startsWith(resolved) || + resolved.startsWith(resolvedPath)); + if (isSameRoot) + depth++; + else + parent = (0, path_1.dirname)(parent); + } + state.symlinks.set(path, resolved); + return depth > 1; +} +function isRecursiveUsingRealPaths(resolved, state) { + return state.visited.includes(resolved + state.options.pathSeparator); +} diff --git a/node_modules/fdir/dist/api/functions/walk-directory.d.ts b/node_modules/fdir/dist/api/functions/walk-directory.d.ts new file mode 100644 index 0000000..af23d1d --- /dev/null +++ b/node_modules/fdir/dist/api/functions/walk-directory.d.ts @@ -0,0 +1,5 @@ +/// +import { WalkerState } from "../../types"; +import fs from "fs"; +export type WalkDirectoryFunction = (state: WalkerState, crawlPath: string, directoryPath: string, depth: number, callback: (entries: fs.Dirent[], directoryPath: string, depth: number) => void) => void; +export declare function build(isSynchronous: boolean): WalkDirectoryFunction; diff --git a/node_modules/fdir/dist/api/functions/walk-directory.js b/node_modules/fdir/dist/api/functions/walk-directory.js new file mode 100644 index 0000000..424302b --- /dev/null +++ b/node_modules/fdir/dist/api/functions/walk-directory.js @@ -0,0 +1,40 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.build = void 0; +const fs_1 = __importDefault(require("fs")); +const readdirOpts = { withFileTypes: true }; +const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback) => { + state.queue.enqueue(); + if (currentDepth < 0) + return state.queue.dequeue(null, state); + state.visited.push(crawlPath); + state.counts.directories++; + // Perf: Node >= 10 introduced withFileTypes that helps us + // skip an extra fs.stat call. + fs_1.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => { + callback(entries, directoryPath, currentDepth); + state.queue.dequeue(state.options.suppressErrors ? null : error, state); + }); +}; +const walkSync = (state, crawlPath, directoryPath, currentDepth, callback) => { + if (currentDepth < 0) + return; + state.visited.push(crawlPath); + state.counts.directories++; + let entries = []; + try { + entries = fs_1.default.readdirSync(crawlPath || ".", readdirOpts); + } + catch (e) { + if (!state.options.suppressErrors) + throw e; + } + callback(entries, directoryPath, currentDepth); +}; +function build(isSynchronous) { + return isSynchronous ? walkSync : walkAsync; +} +exports.build = build; diff --git a/node_modules/fdir/dist/api/queue.d.ts b/node_modules/fdir/dist/api/queue.d.ts new file mode 100644 index 0000000..68ab44a --- /dev/null +++ b/node_modules/fdir/dist/api/queue.d.ts @@ -0,0 +1,15 @@ +import { WalkerState } from "../types"; +type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void; +/** + * This is a custom stateless queue to track concurrent async fs calls. + * It increments a counter whenever a call is queued and decrements it + * as soon as it completes. When the counter hits 0, it calls onQueueEmpty. + */ +export declare class Queue { + private onQueueEmpty?; + count: number; + constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined); + enqueue(): number; + dequeue(error: Error | null, output: WalkerState): void; +} +export {}; diff --git a/node_modules/fdir/dist/api/queue.js b/node_modules/fdir/dist/api/queue.js new file mode 100644 index 0000000..4708d42 --- /dev/null +++ b/node_modules/fdir/dist/api/queue.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Queue = void 0; +/** + * This is a custom stateless queue to track concurrent async fs calls. + * It increments a counter whenever a call is queued and decrements it + * as soon as it completes. When the counter hits 0, it calls onQueueEmpty. + */ +class Queue { + onQueueEmpty; + count = 0; + constructor(onQueueEmpty) { + this.onQueueEmpty = onQueueEmpty; + } + enqueue() { + this.count++; + return this.count; + } + dequeue(error, output) { + if (this.onQueueEmpty && (--this.count <= 0 || error)) { + this.onQueueEmpty(error, output); + if (error) { + output.controller.abort(); + this.onQueueEmpty = undefined; + } + } + } +} +exports.Queue = Queue; diff --git a/node_modules/fdir/dist/api/sync.d.ts b/node_modules/fdir/dist/api/sync.d.ts new file mode 100644 index 0000000..2190cc2 --- /dev/null +++ b/node_modules/fdir/dist/api/sync.d.ts @@ -0,0 +1,2 @@ +import { Output, Options } from "../types"; +export declare function sync(root: string, options: Options): TOutput; diff --git a/node_modules/fdir/dist/api/sync.js b/node_modules/fdir/dist/api/sync.js new file mode 100644 index 0000000..073bc88 --- /dev/null +++ b/node_modules/fdir/dist/api/sync.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = void 0; +const walker_1 = require("./walker"); +function sync(root, options) { + const walker = new walker_1.Walker(root, options); + return walker.start(); +} +exports.sync = sync; diff --git a/node_modules/fdir/dist/api/walker.d.ts b/node_modules/fdir/dist/api/walker.d.ts new file mode 100644 index 0000000..cb6e1c8 --- /dev/null +++ b/node_modules/fdir/dist/api/walker.d.ts @@ -0,0 +1,18 @@ +import { ResultCallback, Options } from "../types"; +import { Output } from "../types"; +export declare class Walker { + private readonly root; + private readonly isSynchronous; + private readonly state; + private readonly joinPath; + private readonly pushDirectory; + private readonly pushFile; + private readonly getArray; + private readonly groupFiles; + private readonly resolveSymlink; + private readonly walkDirectory; + private readonly callbackInvoker; + constructor(root: string, options: Options, callback?: ResultCallback); + start(): TOutput | null; + private walk; +} diff --git a/node_modules/fdir/dist/api/walker.js b/node_modules/fdir/dist/api/walker.js new file mode 100644 index 0000000..19e9137 --- /dev/null +++ b/node_modules/fdir/dist/api/walker.js @@ -0,0 +1,129 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Walker = void 0; +const path_1 = require("path"); +const utils_1 = require("../utils"); +const joinPath = __importStar(require("./functions/join-path")); +const pushDirectory = __importStar(require("./functions/push-directory")); +const pushFile = __importStar(require("./functions/push-file")); +const getArray = __importStar(require("./functions/get-array")); +const groupFiles = __importStar(require("./functions/group-files")); +const resolveSymlink = __importStar(require("./functions/resolve-symlink")); +const invokeCallback = __importStar(require("./functions/invoke-callback")); +const walkDirectory = __importStar(require("./functions/walk-directory")); +const queue_1 = require("./queue"); +const counter_1 = require("./counter"); +class Walker { + root; + isSynchronous; + state; + joinPath; + pushDirectory; + pushFile; + getArray; + groupFiles; + resolveSymlink; + walkDirectory; + callbackInvoker; + constructor(root, options, callback) { + this.isSynchronous = !callback; + this.callbackInvoker = invokeCallback.build(options, this.isSynchronous); + this.root = (0, utils_1.normalizePath)(root, options); + this.state = { + root: (0, utils_1.isRootDirectory)(this.root) ? this.root : this.root.slice(0, -1), + // Perf: we explicitly tell the compiler to optimize for String arrays + paths: [""].slice(0, 0), + groups: [], + counts: new counter_1.Counter(), + options, + queue: new queue_1.Queue((error, state) => this.callbackInvoker(state, error, callback)), + symlinks: new Map(), + visited: [""].slice(0, 0), + controller: new AbortController(), + }; + /* + * Perf: We conditionally change functions according to options. This gives a slight + * performance boost. Since these functions are so small, they are automatically inlined + * by the javascript engine so there's no function call overhead (in most cases). + */ + this.joinPath = joinPath.build(this.root, options); + this.pushDirectory = pushDirectory.build(this.root, options); + this.pushFile = pushFile.build(options); + this.getArray = getArray.build(options); + this.groupFiles = groupFiles.build(options); + this.resolveSymlink = resolveSymlink.build(options, this.isSynchronous); + this.walkDirectory = walkDirectory.build(this.isSynchronous); + } + start() { + this.pushDirectory(this.root, this.state.paths, this.state.options.filters); + this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk); + return this.isSynchronous ? this.callbackInvoker(this.state, null) : null; + } + walk = (entries, directoryPath, depth) => { + const { paths, options: { filters, resolveSymlinks, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator, }, controller, } = this.state; + if (controller.signal.aborted || + (signal && signal.aborted) || + (maxFiles && paths.length > maxFiles)) + return; + const files = this.getArray(this.state.paths); + for (let i = 0; i < entries.length; ++i) { + const entry = entries[i]; + if (entry.isFile() || + (entry.isSymbolicLink() && !resolveSymlinks && !excludeSymlinks)) { + const filename = this.joinPath(entry.name, directoryPath); + this.pushFile(filename, files, this.state.counts, filters); + } + else if (entry.isDirectory()) { + let path = joinPath.joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator); + if (exclude && exclude(entry.name, path)) + continue; + this.pushDirectory(path, paths, filters); + this.walkDirectory(this.state, path, path, depth - 1, this.walk); + } + else if (this.resolveSymlink && entry.isSymbolicLink()) { + let path = joinPath.joinPathWithBasePath(entry.name, directoryPath); + this.resolveSymlink(path, this.state, (stat, resolvedPath) => { + if (stat.isDirectory()) { + resolvedPath = (0, utils_1.normalizePath)(resolvedPath, this.state.options); + if (exclude && + exclude(entry.name, useRealPaths ? resolvedPath : path + pathSeparator)) + return; + this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path + pathSeparator, depth - 1, this.walk); + } + else { + resolvedPath = useRealPaths ? resolvedPath : path; + const filename = (0, path_1.basename)(resolvedPath); + const directoryPath = (0, utils_1.normalizePath)((0, path_1.dirname)(resolvedPath), this.state.options); + resolvedPath = this.joinPath(filename, directoryPath); + this.pushFile(resolvedPath, files, this.state.counts, filters); + } + }); + } + } + this.groupFiles(this.state.groups, directoryPath, files); + }; +} +exports.Walker = Walker; diff --git a/node_modules/fdir/dist/builder/api-builder.d.ts b/node_modules/fdir/dist/builder/api-builder.d.ts new file mode 100644 index 0000000..690c88f --- /dev/null +++ b/node_modules/fdir/dist/builder/api-builder.d.ts @@ -0,0 +1,9 @@ +import { Options, Output, ResultCallback } from "../types"; +export declare class APIBuilder { + private readonly root; + private readonly options; + constructor(root: string, options: Options); + withPromise(): Promise; + withCallback(cb: ResultCallback): void; + sync(): TReturnType; +} diff --git a/node_modules/fdir/dist/builder/api-builder.js b/node_modules/fdir/dist/builder/api-builder.js new file mode 100644 index 0000000..0538e6f --- /dev/null +++ b/node_modules/fdir/dist/builder/api-builder.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.APIBuilder = void 0; +const async_1 = require("../api/async"); +const sync_1 = require("../api/sync"); +class APIBuilder { + root; + options; + constructor(root, options) { + this.root = root; + this.options = options; + } + withPromise() { + return (0, async_1.promise)(this.root, this.options); + } + withCallback(cb) { + (0, async_1.callback)(this.root, this.options, cb); + } + sync() { + return (0, sync_1.sync)(this.root, this.options); + } +} +exports.APIBuilder = APIBuilder; diff --git a/node_modules/fdir/dist/builder/index.d.ts b/node_modules/fdir/dist/builder/index.d.ts new file mode 100644 index 0000000..5fefafd --- /dev/null +++ b/node_modules/fdir/dist/builder/index.d.ts @@ -0,0 +1,41 @@ +/// +import { Output, OnlyCountsOutput, GroupOutput, PathsOutput, Options, FilterPredicate, ExcludePredicate, GlobParams } from "../types"; +import { APIBuilder } from "./api-builder"; +import type picomatch from "picomatch"; +export declare class Builder { + private readonly globCache; + private options; + private globFunction?; + constructor(options?: Partial>); + group(): Builder; + withPathSeparator(separator: "/" | "\\"): this; + withBasePath(): this; + withRelativePaths(): this; + withDirs(): this; + withMaxDepth(depth: number): this; + withMaxFiles(limit: number): this; + withFullPaths(): this; + withErrors(): this; + withSymlinks({ resolvePaths }?: { + resolvePaths?: boolean | undefined; + }): this; + withAbortSignal(signal: AbortSignal): this; + normalize(): this; + filter(predicate: FilterPredicate): this; + onlyDirs(): this; + exclude(predicate: ExcludePredicate): this; + onlyCounts(): Builder; + crawl(root?: string): APIBuilder; + withGlobFunction(fn: TFunc): Builder; + /** + * @deprecated Pass options using the constructor instead: + * ```ts + * new fdir(options).crawl("/path/to/root"); + * ``` + * This method will be removed in v7.0 + */ + crawlWithOptions(root: string, options: Partial>): APIBuilder; + glob(...patterns: string[]): Builder; + globWithOptions(patterns: string[]): Builder; + globWithOptions(patterns: string[], ...options: GlobParams): Builder; +} diff --git a/node_modules/fdir/dist/builder/index.js b/node_modules/fdir/dist/builder/index.js new file mode 100644 index 0000000..7f99aec --- /dev/null +++ b/node_modules/fdir/dist/builder/index.js @@ -0,0 +1,136 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Builder = void 0; +const path_1 = require("path"); +const api_builder_1 = require("./api-builder"); +var pm = null; +/* c8 ignore next 6 */ +try { + require.resolve("picomatch"); + pm = require("picomatch"); +} +catch (_e) { + // do nothing +} +class Builder { + globCache = {}; + options = { + maxDepth: Infinity, + suppressErrors: true, + pathSeparator: path_1.sep, + filters: [], + }; + globFunction; + constructor(options) { + this.options = { ...this.options, ...options }; + this.globFunction = this.options.globFunction; + } + group() { + this.options.group = true; + return this; + } + withPathSeparator(separator) { + this.options.pathSeparator = separator; + return this; + } + withBasePath() { + this.options.includeBasePath = true; + return this; + } + withRelativePaths() { + this.options.relativePaths = true; + return this; + } + withDirs() { + this.options.includeDirs = true; + return this; + } + withMaxDepth(depth) { + this.options.maxDepth = depth; + return this; + } + withMaxFiles(limit) { + this.options.maxFiles = limit; + return this; + } + withFullPaths() { + this.options.resolvePaths = true; + this.options.includeBasePath = true; + return this; + } + withErrors() { + this.options.suppressErrors = false; + return this; + } + withSymlinks({ resolvePaths = true } = {}) { + this.options.resolveSymlinks = true; + this.options.useRealPaths = resolvePaths; + return this.withFullPaths(); + } + withAbortSignal(signal) { + this.options.signal = signal; + return this; + } + normalize() { + this.options.normalizePath = true; + return this; + } + filter(predicate) { + this.options.filters.push(predicate); + return this; + } + onlyDirs() { + this.options.excludeFiles = true; + this.options.includeDirs = true; + return this; + } + exclude(predicate) { + this.options.exclude = predicate; + return this; + } + onlyCounts() { + this.options.onlyCounts = true; + return this; + } + crawl(root) { + return new api_builder_1.APIBuilder(root || ".", this.options); + } + withGlobFunction(fn) { + // cast this since we don't have the new type params yet + this.globFunction = fn; + return this; + } + /** + * @deprecated Pass options using the constructor instead: + * ```ts + * new fdir(options).crawl("/path/to/root"); + * ``` + * This method will be removed in v7.0 + */ + /* c8 ignore next 4 */ + crawlWithOptions(root, options) { + this.options = { ...this.options, ...options }; + return new api_builder_1.APIBuilder(root || ".", this.options); + } + glob(...patterns) { + if (this.globFunction) { + return this.globWithOptions(patterns); + } + return this.globWithOptions(patterns, ...[{ dot: true }]); + } + globWithOptions(patterns, ...options) { + const globFn = (this.globFunction || pm); + /* c8 ignore next 5 */ + if (!globFn) { + throw new Error("Please specify a glob function to use glob matching."); + } + var isMatch = this.globCache[patterns.join("\0")]; + if (!isMatch) { + isMatch = globFn(patterns, ...options); + this.globCache[patterns.join("\0")] = isMatch; + } + this.options.filters.push((path) => isMatch(path)); + return this; + } +} +exports.Builder = Builder; diff --git a/node_modules/fdir/dist/index.cjs b/node_modules/fdir/dist/index.cjs new file mode 100644 index 0000000..83e7248 --- /dev/null +++ b/node_modules/fdir/dist/index.cjs @@ -0,0 +1,572 @@ +//#region rolldown:runtime +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) { + key = keys[i]; + if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { + get: ((k) => from[k]).bind(null, key), + enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable + }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { + value: mod, + enumerable: true +}) : target, mod)); + +//#endregion +const path = __toESM(require("path")); +const fs = __toESM(require("fs")); + +//#region src/utils.ts +function cleanPath(path$1) { + let normalized = (0, path.normalize)(path$1); + if (normalized.length > 1 && normalized[normalized.length - 1] === path.sep) normalized = normalized.substring(0, normalized.length - 1); + return normalized; +} +const SLASHES_REGEX = /[\\/]/g; +function convertSlashes(path$1, separator) { + return path$1.replace(SLASHES_REGEX, separator); +} +const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i; +function isRootDirectory(path$1) { + return path$1 === "/" || WINDOWS_ROOT_DIR_REGEX.test(path$1); +} +function normalizePath(path$1, options) { + const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options; + const pathNeedsCleaning = process.platform === "win32" && path$1.includes("/") || path$1.startsWith("."); + if (resolvePaths) path$1 = (0, path.resolve)(path$1); + if (normalizePath$1 || pathNeedsCleaning) path$1 = cleanPath(path$1); + if (path$1 === ".") return ""; + const needsSeperator = path$1[path$1.length - 1] !== pathSeparator; + return convertSlashes(needsSeperator ? path$1 + pathSeparator : path$1, pathSeparator); +} + +//#endregion +//#region src/api/functions/join-path.ts +function joinPathWithBasePath(filename, directoryPath) { + return directoryPath + filename; +} +function joinPathWithRelativePath(root, options) { + return function(filename, directoryPath) { + const sameRoot = directoryPath.startsWith(root); + if (sameRoot) return directoryPath.replace(root, "") + filename; + else return convertSlashes((0, path.relative)(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename; + }; +} +function joinPath(filename) { + return filename; +} +function joinDirectoryPath(filename, directoryPath, separator) { + return directoryPath + filename + separator; +} +function build$7(root, options) { + const { relativePaths, includeBasePath } = options; + return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath; +} + +//#endregion +//#region src/api/functions/push-directory.ts +function pushDirectoryWithRelativePath(root) { + return function(directoryPath, paths) { + paths.push(directoryPath.substring(root.length) || "."); + }; +} +function pushDirectoryFilterWithRelativePath(root) { + return function(directoryPath, paths, filters) { + const relativePath = directoryPath.substring(root.length) || "."; + if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath); + }; +} +const pushDirectory = (directoryPath, paths) => { + paths.push(directoryPath || "."); +}; +const pushDirectoryFilter = (directoryPath, paths, filters) => { + const path$1 = directoryPath || "."; + if (filters.every((filter) => filter(path$1, true))) paths.push(path$1); +}; +const empty$2 = () => {}; +function build$6(root, options) { + const { includeDirs, filters, relativePaths } = options; + if (!includeDirs) return empty$2; + if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root); + return filters && filters.length ? pushDirectoryFilter : pushDirectory; +} + +//#endregion +//#region src/api/functions/push-file.ts +const pushFileFilterAndCount = (filename, _paths, counts, filters) => { + if (filters.every((filter) => filter(filename, false))) counts.files++; +}; +const pushFileFilter = (filename, paths, _counts, filters) => { + if (filters.every((filter) => filter(filename, false))) paths.push(filename); +}; +const pushFileCount = (_filename, _paths, counts, _filters) => { + counts.files++; +}; +const pushFile = (filename, paths) => { + paths.push(filename); +}; +const empty$1 = () => {}; +function build$5(options) { + const { excludeFiles, filters, onlyCounts } = options; + if (excludeFiles) return empty$1; + if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter; + else if (onlyCounts) return pushFileCount; + else return pushFile; +} + +//#endregion +//#region src/api/functions/get-array.ts +const getArray = (paths) => { + return paths; +}; +const getArrayGroup = () => { + return [""].slice(0, 0); +}; +function build$4(options) { + return options.group ? getArrayGroup : getArray; +} + +//#endregion +//#region src/api/functions/group-files.ts +const groupFiles = (groups, directory, files) => { + groups.push({ + directory, + files, + dir: directory + }); +}; +const empty = () => {}; +function build$3(options) { + return options.group ? groupFiles : empty; +} + +//#endregion +//#region src/api/functions/resolve-symlink.ts +const resolveSymlinksAsync = function(path$1, state, callback$1) { + const { queue, options: { suppressErrors } } = state; + queue.enqueue(); + fs.default.realpath(path$1, (error, resolvedPath) => { + if (error) return queue.dequeue(suppressErrors ? null : error, state); + fs.default.stat(resolvedPath, (error$1, stat) => { + if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state); + if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return queue.dequeue(null, state); + callback$1(stat, resolvedPath); + queue.dequeue(null, state); + }); + }); +}; +const resolveSymlinks = function(path$1, state, callback$1) { + const { queue, options: { suppressErrors } } = state; + queue.enqueue(); + try { + const resolvedPath = fs.default.realpathSync(path$1); + const stat = fs.default.statSync(resolvedPath); + if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return; + callback$1(stat, resolvedPath); + } catch (e) { + if (!suppressErrors) throw e; + } +}; +function build$2(options, isSynchronous) { + if (!options.resolveSymlinks || options.excludeSymlinks) return null; + return isSynchronous ? resolveSymlinks : resolveSymlinksAsync; +} +function isRecursive(path$1, resolved, state) { + if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state); + let parent = (0, path.dirname)(path$1); + let depth = 1; + while (parent !== state.root && depth < 2) { + const resolvedPath = state.symlinks.get(parent); + const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath)); + if (isSameRoot) depth++; + else parent = (0, path.dirname)(parent); + } + state.symlinks.set(path$1, resolved); + return depth > 1; +} +function isRecursiveUsingRealPaths(resolved, state) { + return state.visited.includes(resolved + state.options.pathSeparator); +} + +//#endregion +//#region src/api/functions/invoke-callback.ts +const onlyCountsSync = (state) => { + return state.counts; +}; +const groupsSync = (state) => { + return state.groups; +}; +const defaultSync = (state) => { + return state.paths; +}; +const limitFilesSync = (state) => { + return state.paths.slice(0, state.options.maxFiles); +}; +const onlyCountsAsync = (state, error, callback$1) => { + report(error, callback$1, state.counts, state.options.suppressErrors); + return null; +}; +const defaultAsync = (state, error, callback$1) => { + report(error, callback$1, state.paths, state.options.suppressErrors); + return null; +}; +const limitFilesAsync = (state, error, callback$1) => { + report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors); + return null; +}; +const groupsAsync = (state, error, callback$1) => { + report(error, callback$1, state.groups, state.options.suppressErrors); + return null; +}; +function report(error, callback$1, output, suppressErrors) { + if (error && !suppressErrors) callback$1(error, output); + else callback$1(null, output); +} +function build$1(options, isSynchronous) { + const { onlyCounts, group, maxFiles } = options; + if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync; + else if (group) return isSynchronous ? groupsSync : groupsAsync; + else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync; + else return isSynchronous ? defaultSync : defaultAsync; +} + +//#endregion +//#region src/api/functions/walk-directory.ts +const readdirOpts = { withFileTypes: true }; +const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => { + state.queue.enqueue(); + if (currentDepth <= 0) return state.queue.dequeue(null, state); + state.visited.push(crawlPath); + state.counts.directories++; + fs.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => { + callback$1(entries, directoryPath, currentDepth); + state.queue.dequeue(state.options.suppressErrors ? null : error, state); + }); +}; +const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => { + if (currentDepth <= 0) return; + state.visited.push(crawlPath); + state.counts.directories++; + let entries = []; + try { + entries = fs.default.readdirSync(crawlPath || ".", readdirOpts); + } catch (e) { + if (!state.options.suppressErrors) throw e; + } + callback$1(entries, directoryPath, currentDepth); +}; +function build(isSynchronous) { + return isSynchronous ? walkSync : walkAsync; +} + +//#endregion +//#region src/api/queue.ts +/** +* This is a custom stateless queue to track concurrent async fs calls. +* It increments a counter whenever a call is queued and decrements it +* as soon as it completes. When the counter hits 0, it calls onQueueEmpty. +*/ +var Queue = class { + count = 0; + constructor(onQueueEmpty) { + this.onQueueEmpty = onQueueEmpty; + } + enqueue() { + this.count++; + return this.count; + } + dequeue(error, output) { + if (this.onQueueEmpty && (--this.count <= 0 || error)) { + this.onQueueEmpty(error, output); + if (error) { + output.controller.abort(); + this.onQueueEmpty = void 0; + } + } + } +}; + +//#endregion +//#region src/api/counter.ts +var Counter = class { + _files = 0; + _directories = 0; + set files(num) { + this._files = num; + } + get files() { + return this._files; + } + set directories(num) { + this._directories = num; + } + get directories() { + return this._directories; + } + /** + * @deprecated use `directories` instead + */ + /* c8 ignore next 3 */ + get dirs() { + return this._directories; + } +}; + +//#endregion +//#region src/api/walker.ts +var Walker = class { + root; + isSynchronous; + state; + joinPath; + pushDirectory; + pushFile; + getArray; + groupFiles; + resolveSymlink; + walkDirectory; + callbackInvoker; + constructor(root, options, callback$1) { + this.isSynchronous = !callback$1; + this.callbackInvoker = build$1(options, this.isSynchronous); + this.root = normalizePath(root, options); + this.state = { + root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1), + paths: [""].slice(0, 0), + groups: [], + counts: new Counter(), + options, + queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)), + symlinks: /* @__PURE__ */ new Map(), + visited: [""].slice(0, 0), + controller: new AbortController() + }; + this.joinPath = build$7(this.root, options); + this.pushDirectory = build$6(this.root, options); + this.pushFile = build$5(options); + this.getArray = build$4(options); + this.groupFiles = build$3(options); + this.resolveSymlink = build$2(options, this.isSynchronous); + this.walkDirectory = build(this.isSynchronous); + } + start() { + this.pushDirectory(this.root, this.state.paths, this.state.options.filters); + this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk); + return this.isSynchronous ? this.callbackInvoker(this.state, null) : null; + } + walk = (entries, directoryPath, depth) => { + const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state; + if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return; + const files = this.getArray(this.state.paths); + for (let i = 0; i < entries.length; ++i) { + const entry = entries[i]; + if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) { + const filename = this.joinPath(entry.name, directoryPath); + this.pushFile(filename, files, this.state.counts, filters); + } else if (entry.isDirectory()) { + let path$1 = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator); + if (exclude && exclude(entry.name, path$1)) continue; + this.pushDirectory(path$1, paths, filters); + this.walkDirectory(this.state, path$1, path$1, depth - 1, this.walk); + } else if (this.resolveSymlink && entry.isSymbolicLink()) { + let path$1 = joinPathWithBasePath(entry.name, directoryPath); + this.resolveSymlink(path$1, this.state, (stat, resolvedPath) => { + if (stat.isDirectory()) { + resolvedPath = normalizePath(resolvedPath, this.state.options); + if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path$1 + pathSeparator)) return; + this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path$1 + pathSeparator, depth - 1, this.walk); + } else { + resolvedPath = useRealPaths ? resolvedPath : path$1; + const filename = (0, path.basename)(resolvedPath); + const directoryPath$1 = normalizePath((0, path.dirname)(resolvedPath), this.state.options); + resolvedPath = this.joinPath(filename, directoryPath$1); + this.pushFile(resolvedPath, files, this.state.counts, filters); + } + }); + } + } + this.groupFiles(this.state.groups, directoryPath, files); + }; +}; + +//#endregion +//#region src/api/async.ts +function promise(root, options) { + return new Promise((resolve$1, reject) => { + callback(root, options, (err, output) => { + if (err) return reject(err); + resolve$1(output); + }); + }); +} +function callback(root, options, callback$1) { + let walker = new Walker(root, options, callback$1); + walker.start(); +} + +//#endregion +//#region src/api/sync.ts +function sync(root, options) { + const walker = new Walker(root, options); + return walker.start(); +} + +//#endregion +//#region src/builder/api-builder.ts +var APIBuilder = class { + constructor(root, options) { + this.root = root; + this.options = options; + } + withPromise() { + return promise(this.root, this.options); + } + withCallback(cb) { + callback(this.root, this.options, cb); + } + sync() { + return sync(this.root, this.options); + } +}; + +//#endregion +//#region src/builder/index.ts +var pm = null; +/* c8 ignore next 6 */ +try { + require.resolve("picomatch"); + pm = require("picomatch"); +} catch (_e) {} +var Builder = class { + globCache = {}; + options = { + maxDepth: Infinity, + suppressErrors: true, + pathSeparator: path.sep, + filters: [] + }; + globFunction; + constructor(options) { + this.options = { + ...this.options, + ...options + }; + this.globFunction = this.options.globFunction; + } + group() { + this.options.group = true; + return this; + } + withPathSeparator(separator) { + this.options.pathSeparator = separator; + return this; + } + withBasePath() { + this.options.includeBasePath = true; + return this; + } + withRelativePaths() { + this.options.relativePaths = true; + return this; + } + withDirs() { + this.options.includeDirs = true; + return this; + } + withMaxDepth(depth) { + this.options.maxDepth = depth; + return this; + } + withMaxFiles(limit) { + this.options.maxFiles = limit; + return this; + } + withFullPaths() { + this.options.resolvePaths = true; + this.options.includeBasePath = true; + return this; + } + withErrors() { + this.options.suppressErrors = false; + return this; + } + withSymlinks({ resolvePaths = true } = {}) { + this.options.resolveSymlinks = true; + this.options.useRealPaths = resolvePaths; + return this.withFullPaths(); + } + withAbortSignal(signal) { + this.options.signal = signal; + return this; + } + normalize() { + this.options.normalizePath = true; + return this; + } + filter(predicate) { + this.options.filters.push(predicate); + return this; + } + onlyDirs() { + this.options.excludeFiles = true; + this.options.includeDirs = true; + return this; + } + exclude(predicate) { + this.options.exclude = predicate; + return this; + } + onlyCounts() { + this.options.onlyCounts = true; + return this; + } + crawl(root) { + return new APIBuilder(root || ".", this.options); + } + withGlobFunction(fn) { + this.globFunction = fn; + return this; + } + /** + * @deprecated Pass options using the constructor instead: + * ```ts + * new fdir(options).crawl("/path/to/root"); + * ``` + * This method will be removed in v7.0 + */ + /* c8 ignore next 4 */ + crawlWithOptions(root, options) { + this.options = { + ...this.options, + ...options + }; + return new APIBuilder(root || ".", this.options); + } + glob(...patterns) { + if (this.globFunction) return this.globWithOptions(patterns); + return this.globWithOptions(patterns, ...[{ dot: true }]); + } + globWithOptions(patterns, ...options) { + const globFn = this.globFunction || pm; + /* c8 ignore next 5 */ + if (!globFn) throw new Error("Please specify a glob function to use glob matching."); + var isMatch = this.globCache[patterns.join("\0")]; + if (!isMatch) { + isMatch = globFn(patterns, ...options); + this.globCache[patterns.join("\0")] = isMatch; + } + this.options.filters.push((path$1) => isMatch(path$1)); + return this; + } +}; + +//#endregion +exports.fdir = Builder; \ No newline at end of file diff --git a/node_modules/fdir/dist/index.d.cts b/node_modules/fdir/dist/index.d.cts new file mode 100644 index 0000000..8eb36bc --- /dev/null +++ b/node_modules/fdir/dist/index.d.cts @@ -0,0 +1,134 @@ +/// +import picomatch from "picomatch"; + +//#region src/api/queue.d.ts +type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void; +/** + * This is a custom stateless queue to track concurrent async fs calls. + * It increments a counter whenever a call is queued and decrements it + * as soon as it completes. When the counter hits 0, it calls onQueueEmpty. + */ +declare class Queue { + private onQueueEmpty?; + count: number; + constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined); + enqueue(): number; + dequeue(error: Error | null, output: WalkerState): void; +} +//#endregion +//#region src/types.d.ts +type Counts = { + files: number; + directories: number; + /** + * @deprecated use `directories` instead. Will be removed in v7.0. + */ + dirs: number; +}; +type Group = { + directory: string; + files: string[]; + /** + * @deprecated use `directory` instead. Will be removed in v7.0. + */ + dir: string; +}; +type GroupOutput = Group[]; +type OnlyCountsOutput = Counts; +type PathsOutput = string[]; +type Output = OnlyCountsOutput | PathsOutput | GroupOutput; +type WalkerState = { + root: string; + paths: string[]; + groups: Group[]; + counts: Counts; + options: Options; + queue: Queue; + controller: AbortController; + symlinks: Map; + visited: string[]; +}; +type ResultCallback = (error: Error | null, output: TOutput) => void; +type FilterPredicate = (path: string, isDirectory: boolean) => boolean; +type ExcludePredicate = (dirName: string, dirPath: string) => boolean; +type PathSeparator = "/" | "\\"; +type Options = { + includeBasePath?: boolean; + includeDirs?: boolean; + normalizePath?: boolean; + maxDepth: number; + maxFiles?: number; + resolvePaths?: boolean; + suppressErrors: boolean; + group?: boolean; + onlyCounts?: boolean; + filters: FilterPredicate[]; + resolveSymlinks?: boolean; + useRealPaths?: boolean; + excludeFiles?: boolean; + excludeSymlinks?: boolean; + exclude?: ExcludePredicate; + relativePaths?: boolean; + pathSeparator: PathSeparator; + signal?: AbortSignal; + globFunction?: TGlobFunction; +}; +type GlobMatcher = (test: string) => boolean; +type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher; +type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : []; +//#endregion +//#region src/builder/api-builder.d.ts +declare class APIBuilder { + private readonly root; + private readonly options; + constructor(root: string, options: Options); + withPromise(): Promise; + withCallback(cb: ResultCallback): void; + sync(): TReturnType; +} +//#endregion +//#region src/builder/index.d.ts +declare class Builder { + private readonly globCache; + private options; + private globFunction?; + constructor(options?: Partial>); + group(): Builder; + withPathSeparator(separator: "/" | "\\"): this; + withBasePath(): this; + withRelativePaths(): this; + withDirs(): this; + withMaxDepth(depth: number): this; + withMaxFiles(limit: number): this; + withFullPaths(): this; + withErrors(): this; + withSymlinks({ + resolvePaths + }?: { + resolvePaths?: boolean | undefined; + }): this; + withAbortSignal(signal: AbortSignal): this; + normalize(): this; + filter(predicate: FilterPredicate): this; + onlyDirs(): this; + exclude(predicate: ExcludePredicate): this; + onlyCounts(): Builder; + crawl(root?: string): APIBuilder; + withGlobFunction(fn: TFunc): Builder; + /** + * @deprecated Pass options using the constructor instead: + * ```ts + * new fdir(options).crawl("/path/to/root"); + * ``` + * This method will be removed in v7.0 + */ + crawlWithOptions(root: string, options: Partial>): APIBuilder; + glob(...patterns: string[]): Builder; + globWithOptions(patterns: string[]): Builder; + globWithOptions(patterns: string[], ...options: GlobParams): Builder; +} +//#endregion +//#region src/index.d.ts +type Fdir = typeof Builder; +//#endregion +export { Counts, ExcludePredicate, Fdir, FilterPredicate, GlobFunction, GlobMatcher, GlobParams, Group, GroupOutput, OnlyCountsOutput, Options, Output, PathSeparator, PathsOutput, ResultCallback, WalkerState, Builder as fdir }; \ No newline at end of file diff --git a/node_modules/fdir/dist/index.d.mts b/node_modules/fdir/dist/index.d.mts new file mode 100644 index 0000000..8eb36bc --- /dev/null +++ b/node_modules/fdir/dist/index.d.mts @@ -0,0 +1,134 @@ +/// +import picomatch from "picomatch"; + +//#region src/api/queue.d.ts +type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void; +/** + * This is a custom stateless queue to track concurrent async fs calls. + * It increments a counter whenever a call is queued and decrements it + * as soon as it completes. When the counter hits 0, it calls onQueueEmpty. + */ +declare class Queue { + private onQueueEmpty?; + count: number; + constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined); + enqueue(): number; + dequeue(error: Error | null, output: WalkerState): void; +} +//#endregion +//#region src/types.d.ts +type Counts = { + files: number; + directories: number; + /** + * @deprecated use `directories` instead. Will be removed in v7.0. + */ + dirs: number; +}; +type Group = { + directory: string; + files: string[]; + /** + * @deprecated use `directory` instead. Will be removed in v7.0. + */ + dir: string; +}; +type GroupOutput = Group[]; +type OnlyCountsOutput = Counts; +type PathsOutput = string[]; +type Output = OnlyCountsOutput | PathsOutput | GroupOutput; +type WalkerState = { + root: string; + paths: string[]; + groups: Group[]; + counts: Counts; + options: Options; + queue: Queue; + controller: AbortController; + symlinks: Map; + visited: string[]; +}; +type ResultCallback = (error: Error | null, output: TOutput) => void; +type FilterPredicate = (path: string, isDirectory: boolean) => boolean; +type ExcludePredicate = (dirName: string, dirPath: string) => boolean; +type PathSeparator = "/" | "\\"; +type Options = { + includeBasePath?: boolean; + includeDirs?: boolean; + normalizePath?: boolean; + maxDepth: number; + maxFiles?: number; + resolvePaths?: boolean; + suppressErrors: boolean; + group?: boolean; + onlyCounts?: boolean; + filters: FilterPredicate[]; + resolveSymlinks?: boolean; + useRealPaths?: boolean; + excludeFiles?: boolean; + excludeSymlinks?: boolean; + exclude?: ExcludePredicate; + relativePaths?: boolean; + pathSeparator: PathSeparator; + signal?: AbortSignal; + globFunction?: TGlobFunction; +}; +type GlobMatcher = (test: string) => boolean; +type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher; +type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : []; +//#endregion +//#region src/builder/api-builder.d.ts +declare class APIBuilder { + private readonly root; + private readonly options; + constructor(root: string, options: Options); + withPromise(): Promise; + withCallback(cb: ResultCallback): void; + sync(): TReturnType; +} +//#endregion +//#region src/builder/index.d.ts +declare class Builder { + private readonly globCache; + private options; + private globFunction?; + constructor(options?: Partial>); + group(): Builder; + withPathSeparator(separator: "/" | "\\"): this; + withBasePath(): this; + withRelativePaths(): this; + withDirs(): this; + withMaxDepth(depth: number): this; + withMaxFiles(limit: number): this; + withFullPaths(): this; + withErrors(): this; + withSymlinks({ + resolvePaths + }?: { + resolvePaths?: boolean | undefined; + }): this; + withAbortSignal(signal: AbortSignal): this; + normalize(): this; + filter(predicate: FilterPredicate): this; + onlyDirs(): this; + exclude(predicate: ExcludePredicate): this; + onlyCounts(): Builder; + crawl(root?: string): APIBuilder; + withGlobFunction(fn: TFunc): Builder; + /** + * @deprecated Pass options using the constructor instead: + * ```ts + * new fdir(options).crawl("/path/to/root"); + * ``` + * This method will be removed in v7.0 + */ + crawlWithOptions(root: string, options: Partial>): APIBuilder; + glob(...patterns: string[]): Builder; + globWithOptions(patterns: string[]): Builder; + globWithOptions(patterns: string[], ...options: GlobParams): Builder; +} +//#endregion +//#region src/index.d.ts +type Fdir = typeof Builder; +//#endregion +export { Counts, ExcludePredicate, Fdir, FilterPredicate, GlobFunction, GlobMatcher, GlobParams, Group, GroupOutput, OnlyCountsOutput, Options, Output, PathSeparator, PathsOutput, ResultCallback, WalkerState, Builder as fdir }; \ No newline at end of file diff --git a/node_modules/fdir/dist/index.d.ts b/node_modules/fdir/dist/index.d.ts new file mode 100644 index 0000000..3596376 --- /dev/null +++ b/node_modules/fdir/dist/index.d.ts @@ -0,0 +1,4 @@ +import { Builder } from "./builder"; +export { Builder as fdir }; +export type Fdir = typeof Builder; +export * from "./types"; diff --git a/node_modules/fdir/dist/index.js b/node_modules/fdir/dist/index.js new file mode 100644 index 0000000..b907a8b --- /dev/null +++ b/node_modules/fdir/dist/index.js @@ -0,0 +1,20 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fdir = void 0; +const builder_1 = require("./builder"); +Object.defineProperty(exports, "fdir", { enumerable: true, get: function () { return builder_1.Builder; } }); +__exportStar(require("./types"), exports); diff --git a/node_modules/fdir/dist/index.mjs b/node_modules/fdir/dist/index.mjs new file mode 100644 index 0000000..2714090 --- /dev/null +++ b/node_modules/fdir/dist/index.mjs @@ -0,0 +1,554 @@ +import { createRequire } from "module"; +import { basename, dirname, normalize, relative, resolve, sep } from "path"; +import fs from "fs"; + +//#region rolldown:runtime +var __require = /* @__PURE__ */ createRequire(import.meta.url); + +//#endregion +//#region src/utils.ts +function cleanPath(path) { + let normalized = normalize(path); + if (normalized.length > 1 && normalized[normalized.length - 1] === sep) normalized = normalized.substring(0, normalized.length - 1); + return normalized; +} +const SLASHES_REGEX = /[\\/]/g; +function convertSlashes(path, separator) { + return path.replace(SLASHES_REGEX, separator); +} +const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i; +function isRootDirectory(path) { + return path === "/" || WINDOWS_ROOT_DIR_REGEX.test(path); +} +function normalizePath(path, options) { + const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options; + const pathNeedsCleaning = process.platform === "win32" && path.includes("/") || path.startsWith("."); + if (resolvePaths) path = resolve(path); + if (normalizePath$1 || pathNeedsCleaning) path = cleanPath(path); + if (path === ".") return ""; + const needsSeperator = path[path.length - 1] !== pathSeparator; + return convertSlashes(needsSeperator ? path + pathSeparator : path, pathSeparator); +} + +//#endregion +//#region src/api/functions/join-path.ts +function joinPathWithBasePath(filename, directoryPath) { + return directoryPath + filename; +} +function joinPathWithRelativePath(root, options) { + return function(filename, directoryPath) { + const sameRoot = directoryPath.startsWith(root); + if (sameRoot) return directoryPath.replace(root, "") + filename; + else return convertSlashes(relative(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename; + }; +} +function joinPath(filename) { + return filename; +} +function joinDirectoryPath(filename, directoryPath, separator) { + return directoryPath + filename + separator; +} +function build$7(root, options) { + const { relativePaths, includeBasePath } = options; + return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath; +} + +//#endregion +//#region src/api/functions/push-directory.ts +function pushDirectoryWithRelativePath(root) { + return function(directoryPath, paths) { + paths.push(directoryPath.substring(root.length) || "."); + }; +} +function pushDirectoryFilterWithRelativePath(root) { + return function(directoryPath, paths, filters) { + const relativePath = directoryPath.substring(root.length) || "."; + if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath); + }; +} +const pushDirectory = (directoryPath, paths) => { + paths.push(directoryPath || "."); +}; +const pushDirectoryFilter = (directoryPath, paths, filters) => { + const path = directoryPath || "."; + if (filters.every((filter) => filter(path, true))) paths.push(path); +}; +const empty$2 = () => {}; +function build$6(root, options) { + const { includeDirs, filters, relativePaths } = options; + if (!includeDirs) return empty$2; + if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root); + return filters && filters.length ? pushDirectoryFilter : pushDirectory; +} + +//#endregion +//#region src/api/functions/push-file.ts +const pushFileFilterAndCount = (filename, _paths, counts, filters) => { + if (filters.every((filter) => filter(filename, false))) counts.files++; +}; +const pushFileFilter = (filename, paths, _counts, filters) => { + if (filters.every((filter) => filter(filename, false))) paths.push(filename); +}; +const pushFileCount = (_filename, _paths, counts, _filters) => { + counts.files++; +}; +const pushFile = (filename, paths) => { + paths.push(filename); +}; +const empty$1 = () => {}; +function build$5(options) { + const { excludeFiles, filters, onlyCounts } = options; + if (excludeFiles) return empty$1; + if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter; + else if (onlyCounts) return pushFileCount; + else return pushFile; +} + +//#endregion +//#region src/api/functions/get-array.ts +const getArray = (paths) => { + return paths; +}; +const getArrayGroup = () => { + return [""].slice(0, 0); +}; +function build$4(options) { + return options.group ? getArrayGroup : getArray; +} + +//#endregion +//#region src/api/functions/group-files.ts +const groupFiles = (groups, directory, files) => { + groups.push({ + directory, + files, + dir: directory + }); +}; +const empty = () => {}; +function build$3(options) { + return options.group ? groupFiles : empty; +} + +//#endregion +//#region src/api/functions/resolve-symlink.ts +const resolveSymlinksAsync = function(path, state, callback$1) { + const { queue, options: { suppressErrors } } = state; + queue.enqueue(); + fs.realpath(path, (error, resolvedPath) => { + if (error) return queue.dequeue(suppressErrors ? null : error, state); + fs.stat(resolvedPath, (error$1, stat) => { + if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state); + if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) return queue.dequeue(null, state); + callback$1(stat, resolvedPath); + queue.dequeue(null, state); + }); + }); +}; +const resolveSymlinks = function(path, state, callback$1) { + const { queue, options: { suppressErrors } } = state; + queue.enqueue(); + try { + const resolvedPath = fs.realpathSync(path); + const stat = fs.statSync(resolvedPath); + if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) return; + callback$1(stat, resolvedPath); + } catch (e) { + if (!suppressErrors) throw e; + } +}; +function build$2(options, isSynchronous) { + if (!options.resolveSymlinks || options.excludeSymlinks) return null; + return isSynchronous ? resolveSymlinks : resolveSymlinksAsync; +} +function isRecursive(path, resolved, state) { + if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state); + let parent = dirname(path); + let depth = 1; + while (parent !== state.root && depth < 2) { + const resolvedPath = state.symlinks.get(parent); + const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath)); + if (isSameRoot) depth++; + else parent = dirname(parent); + } + state.symlinks.set(path, resolved); + return depth > 1; +} +function isRecursiveUsingRealPaths(resolved, state) { + return state.visited.includes(resolved + state.options.pathSeparator); +} + +//#endregion +//#region src/api/functions/invoke-callback.ts +const onlyCountsSync = (state) => { + return state.counts; +}; +const groupsSync = (state) => { + return state.groups; +}; +const defaultSync = (state) => { + return state.paths; +}; +const limitFilesSync = (state) => { + return state.paths.slice(0, state.options.maxFiles); +}; +const onlyCountsAsync = (state, error, callback$1) => { + report(error, callback$1, state.counts, state.options.suppressErrors); + return null; +}; +const defaultAsync = (state, error, callback$1) => { + report(error, callback$1, state.paths, state.options.suppressErrors); + return null; +}; +const limitFilesAsync = (state, error, callback$1) => { + report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors); + return null; +}; +const groupsAsync = (state, error, callback$1) => { + report(error, callback$1, state.groups, state.options.suppressErrors); + return null; +}; +function report(error, callback$1, output, suppressErrors) { + if (error && !suppressErrors) callback$1(error, output); + else callback$1(null, output); +} +function build$1(options, isSynchronous) { + const { onlyCounts, group, maxFiles } = options; + if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync; + else if (group) return isSynchronous ? groupsSync : groupsAsync; + else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync; + else return isSynchronous ? defaultSync : defaultAsync; +} + +//#endregion +//#region src/api/functions/walk-directory.ts +const readdirOpts = { withFileTypes: true }; +const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => { + state.queue.enqueue(); + if (currentDepth <= 0) return state.queue.dequeue(null, state); + state.visited.push(crawlPath); + state.counts.directories++; + fs.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => { + callback$1(entries, directoryPath, currentDepth); + state.queue.dequeue(state.options.suppressErrors ? null : error, state); + }); +}; +const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => { + if (currentDepth <= 0) return; + state.visited.push(crawlPath); + state.counts.directories++; + let entries = []; + try { + entries = fs.readdirSync(crawlPath || ".", readdirOpts); + } catch (e) { + if (!state.options.suppressErrors) throw e; + } + callback$1(entries, directoryPath, currentDepth); +}; +function build(isSynchronous) { + return isSynchronous ? walkSync : walkAsync; +} + +//#endregion +//#region src/api/queue.ts +/** +* This is a custom stateless queue to track concurrent async fs calls. +* It increments a counter whenever a call is queued and decrements it +* as soon as it completes. When the counter hits 0, it calls onQueueEmpty. +*/ +var Queue = class { + count = 0; + constructor(onQueueEmpty) { + this.onQueueEmpty = onQueueEmpty; + } + enqueue() { + this.count++; + return this.count; + } + dequeue(error, output) { + if (this.onQueueEmpty && (--this.count <= 0 || error)) { + this.onQueueEmpty(error, output); + if (error) { + output.controller.abort(); + this.onQueueEmpty = void 0; + } + } + } +}; + +//#endregion +//#region src/api/counter.ts +var Counter = class { + _files = 0; + _directories = 0; + set files(num) { + this._files = num; + } + get files() { + return this._files; + } + set directories(num) { + this._directories = num; + } + get directories() { + return this._directories; + } + /** + * @deprecated use `directories` instead + */ + /* c8 ignore next 3 */ + get dirs() { + return this._directories; + } +}; + +//#endregion +//#region src/api/walker.ts +var Walker = class { + root; + isSynchronous; + state; + joinPath; + pushDirectory; + pushFile; + getArray; + groupFiles; + resolveSymlink; + walkDirectory; + callbackInvoker; + constructor(root, options, callback$1) { + this.isSynchronous = !callback$1; + this.callbackInvoker = build$1(options, this.isSynchronous); + this.root = normalizePath(root, options); + this.state = { + root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1), + paths: [""].slice(0, 0), + groups: [], + counts: new Counter(), + options, + queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)), + symlinks: /* @__PURE__ */ new Map(), + visited: [""].slice(0, 0), + controller: new AbortController() + }; + this.joinPath = build$7(this.root, options); + this.pushDirectory = build$6(this.root, options); + this.pushFile = build$5(options); + this.getArray = build$4(options); + this.groupFiles = build$3(options); + this.resolveSymlink = build$2(options, this.isSynchronous); + this.walkDirectory = build(this.isSynchronous); + } + start() { + this.pushDirectory(this.root, this.state.paths, this.state.options.filters); + this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk); + return this.isSynchronous ? this.callbackInvoker(this.state, null) : null; + } + walk = (entries, directoryPath, depth) => { + const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state; + if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return; + const files = this.getArray(this.state.paths); + for (let i = 0; i < entries.length; ++i) { + const entry = entries[i]; + if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) { + const filename = this.joinPath(entry.name, directoryPath); + this.pushFile(filename, files, this.state.counts, filters); + } else if (entry.isDirectory()) { + let path = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator); + if (exclude && exclude(entry.name, path)) continue; + this.pushDirectory(path, paths, filters); + this.walkDirectory(this.state, path, path, depth - 1, this.walk); + } else if (this.resolveSymlink && entry.isSymbolicLink()) { + let path = joinPathWithBasePath(entry.name, directoryPath); + this.resolveSymlink(path, this.state, (stat, resolvedPath) => { + if (stat.isDirectory()) { + resolvedPath = normalizePath(resolvedPath, this.state.options); + if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path + pathSeparator)) return; + this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path + pathSeparator, depth - 1, this.walk); + } else { + resolvedPath = useRealPaths ? resolvedPath : path; + const filename = basename(resolvedPath); + const directoryPath$1 = normalizePath(dirname(resolvedPath), this.state.options); + resolvedPath = this.joinPath(filename, directoryPath$1); + this.pushFile(resolvedPath, files, this.state.counts, filters); + } + }); + } + } + this.groupFiles(this.state.groups, directoryPath, files); + }; +}; + +//#endregion +//#region src/api/async.ts +function promise(root, options) { + return new Promise((resolve$1, reject) => { + callback(root, options, (err, output) => { + if (err) return reject(err); + resolve$1(output); + }); + }); +} +function callback(root, options, callback$1) { + let walker = new Walker(root, options, callback$1); + walker.start(); +} + +//#endregion +//#region src/api/sync.ts +function sync(root, options) { + const walker = new Walker(root, options); + return walker.start(); +} + +//#endregion +//#region src/builder/api-builder.ts +var APIBuilder = class { + constructor(root, options) { + this.root = root; + this.options = options; + } + withPromise() { + return promise(this.root, this.options); + } + withCallback(cb) { + callback(this.root, this.options, cb); + } + sync() { + return sync(this.root, this.options); + } +}; + +//#endregion +//#region src/builder/index.ts +var pm = null; +/* c8 ignore next 6 */ +try { + __require.resolve("picomatch"); + pm = __require("picomatch"); +} catch (_e) {} +var Builder = class { + globCache = {}; + options = { + maxDepth: Infinity, + suppressErrors: true, + pathSeparator: sep, + filters: [] + }; + globFunction; + constructor(options) { + this.options = { + ...this.options, + ...options + }; + this.globFunction = this.options.globFunction; + } + group() { + this.options.group = true; + return this; + } + withPathSeparator(separator) { + this.options.pathSeparator = separator; + return this; + } + withBasePath() { + this.options.includeBasePath = true; + return this; + } + withRelativePaths() { + this.options.relativePaths = true; + return this; + } + withDirs() { + this.options.includeDirs = true; + return this; + } + withMaxDepth(depth) { + this.options.maxDepth = depth; + return this; + } + withMaxFiles(limit) { + this.options.maxFiles = limit; + return this; + } + withFullPaths() { + this.options.resolvePaths = true; + this.options.includeBasePath = true; + return this; + } + withErrors() { + this.options.suppressErrors = false; + return this; + } + withSymlinks({ resolvePaths = true } = {}) { + this.options.resolveSymlinks = true; + this.options.useRealPaths = resolvePaths; + return this.withFullPaths(); + } + withAbortSignal(signal) { + this.options.signal = signal; + return this; + } + normalize() { + this.options.normalizePath = true; + return this; + } + filter(predicate) { + this.options.filters.push(predicate); + return this; + } + onlyDirs() { + this.options.excludeFiles = true; + this.options.includeDirs = true; + return this; + } + exclude(predicate) { + this.options.exclude = predicate; + return this; + } + onlyCounts() { + this.options.onlyCounts = true; + return this; + } + crawl(root) { + return new APIBuilder(root || ".", this.options); + } + withGlobFunction(fn) { + this.globFunction = fn; + return this; + } + /** + * @deprecated Pass options using the constructor instead: + * ```ts + * new fdir(options).crawl("/path/to/root"); + * ``` + * This method will be removed in v7.0 + */ + /* c8 ignore next 4 */ + crawlWithOptions(root, options) { + this.options = { + ...this.options, + ...options + }; + return new APIBuilder(root || ".", this.options); + } + glob(...patterns) { + if (this.globFunction) return this.globWithOptions(patterns); + return this.globWithOptions(patterns, ...[{ dot: true }]); + } + globWithOptions(patterns, ...options) { + const globFn = this.globFunction || pm; + /* c8 ignore next 5 */ + if (!globFn) throw new Error("Please specify a glob function to use glob matching."); + var isMatch = this.globCache[patterns.join("\0")]; + if (!isMatch) { + isMatch = globFn(patterns, ...options); + this.globCache[patterns.join("\0")] = isMatch; + } + this.options.filters.push((path) => isMatch(path)); + return this; + } +}; + +//#endregion +export { Builder as fdir }; \ No newline at end of file diff --git a/node_modules/fdir/dist/types.d.ts b/node_modules/fdir/dist/types.d.ts new file mode 100644 index 0000000..bc9b108 --- /dev/null +++ b/node_modules/fdir/dist/types.d.ts @@ -0,0 +1,61 @@ +/// +import { Queue } from "./api/queue"; +export type Counts = { + files: number; + directories: number; + /** + * @deprecated use `directories` instead. Will be removed in v7.0. + */ + dirs: number; +}; +export type Group = { + directory: string; + files: string[]; + /** + * @deprecated use `directory` instead. Will be removed in v7.0. + */ + dir: string; +}; +export type GroupOutput = Group[]; +export type OnlyCountsOutput = Counts; +export type PathsOutput = string[]; +export type Output = OnlyCountsOutput | PathsOutput | GroupOutput; +export type WalkerState = { + root: string; + paths: string[]; + groups: Group[]; + counts: Counts; + options: Options; + queue: Queue; + controller: AbortController; + symlinks: Map; + visited: string[]; +}; +export type ResultCallback = (error: Error | null, output: TOutput) => void; +export type FilterPredicate = (path: string, isDirectory: boolean) => boolean; +export type ExcludePredicate = (dirName: string, dirPath: string) => boolean; +export type PathSeparator = "/" | "\\"; +export type Options = { + includeBasePath?: boolean; + includeDirs?: boolean; + normalizePath?: boolean; + maxDepth: number; + maxFiles?: number; + resolvePaths?: boolean; + suppressErrors: boolean; + group?: boolean; + onlyCounts?: boolean; + filters: FilterPredicate[]; + resolveSymlinks?: boolean; + useRealPaths?: boolean; + excludeFiles?: boolean; + excludeSymlinks?: boolean; + exclude?: ExcludePredicate; + relativePaths?: boolean; + pathSeparator: PathSeparator; + signal?: AbortSignal; + globFunction?: TGlobFunction; +}; +export type GlobMatcher = (test: string) => boolean; +export type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher; +export type GlobParams = T extends (globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher ? TParams : []; diff --git a/node_modules/fdir/dist/types.js b/node_modules/fdir/dist/types.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/fdir/dist/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/fdir/dist/utils.d.ts b/node_modules/fdir/dist/utils.d.ts new file mode 100644 index 0000000..5614917 --- /dev/null +++ b/node_modules/fdir/dist/utils.d.ts @@ -0,0 +1,9 @@ +import { PathSeparator } from "./types"; +export declare function cleanPath(path: string): string; +export declare function convertSlashes(path: string, separator: PathSeparator): string; +export declare function isRootDirectory(path: string): boolean; +export declare function normalizePath(path: string, options: { + resolvePaths?: boolean; + normalizePath?: boolean; + pathSeparator: PathSeparator; +}): string; diff --git a/node_modules/fdir/dist/utils.js b/node_modules/fdir/dist/utils.js new file mode 100644 index 0000000..539b2a0 --- /dev/null +++ b/node_modules/fdir/dist/utils.js @@ -0,0 +1,37 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.normalizePath = exports.isRootDirectory = exports.convertSlashes = exports.cleanPath = void 0; +const path_1 = require("path"); +function cleanPath(path) { + let normalized = (0, path_1.normalize)(path); + // we have to remove the last path separator + // to account for / root path + if (normalized.length > 1 && normalized[normalized.length - 1] === path_1.sep) + normalized = normalized.substring(0, normalized.length - 1); + return normalized; +} +exports.cleanPath = cleanPath; +const SLASHES_REGEX = /[\\/]/g; +function convertSlashes(path, separator) { + return path.replace(SLASHES_REGEX, separator); +} +exports.convertSlashes = convertSlashes; +const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i; +function isRootDirectory(path) { + return path === "/" || WINDOWS_ROOT_DIR_REGEX.test(path); +} +exports.isRootDirectory = isRootDirectory; +function normalizePath(path, options) { + const { resolvePaths, normalizePath, pathSeparator } = options; + const pathNeedsCleaning = (process.platform === "win32" && path.includes("/")) || + path.startsWith("."); + if (resolvePaths) + path = (0, path_1.resolve)(path); + if (normalizePath || pathNeedsCleaning) + path = cleanPath(path); + if (path === ".") + return ""; + const needsSeperator = path[path.length - 1] !== pathSeparator; + return convertSlashes(needsSeperator ? path + pathSeparator : path, pathSeparator); +} +exports.normalizePath = normalizePath; diff --git a/node_modules/fdir/package.json b/node_modules/fdir/package.json new file mode 100644 index 0000000..f766381 --- /dev/null +++ b/node_modules/fdir/package.json @@ -0,0 +1,90 @@ +{ + "name": "fdir", + "version": "6.4.6", + "description": "The fastest directory crawler & globbing alternative to glob, fast-glob, & tiny-glob. Crawls 1m files in < 1s", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "prepublishOnly": "npm run test && npm run build", + "build": "tsc", + "format": "prettier --write src __tests__ benchmarks", + "test": "vitest run __tests__/", + "test:coverage": "vitest run --coverage __tests__/", + "test:watch": "vitest __tests__/", + "bench": "ts-node benchmarks/benchmark.js", + "bench:glob": "ts-node benchmarks/glob-benchmark.ts", + "bench:fdir": "ts-node benchmarks/fdir-benchmark.ts", + "release": "./scripts/release.sh" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/thecodrr/fdir.git" + }, + "keywords": [ + "util", + "os", + "sys", + "fs", + "walk", + "crawler", + "directory", + "files", + "io", + "tiny-glob", + "glob", + "fast-glob", + "speed", + "javascript", + "nodejs" + ], + "author": "thecodrr ", + "license": "MIT", + "bugs": { + "url": "https://github.com/thecodrr/fdir/issues" + }, + "homepage": "https://github.com/thecodrr/fdir#readme", + "devDependencies": { + "@types/glob": "^8.1.0", + "@types/mock-fs": "^4.13.4", + "@types/node": "^20.9.4", + "@types/picomatch": "^3.0.0", + "@types/tap": "^15.0.11", + "@vitest/coverage-v8": "^0.34.6", + "all-files-in-tree": "^1.1.2", + "benny": "^3.7.1", + "csv-to-markdown-table": "^1.3.1", + "expect": "^29.7.0", + "fast-glob": "^3.3.2", + "fdir1": "npm:fdir@1.2.0", + "fdir2": "npm:fdir@2.1.0", + "fdir3": "npm:fdir@3.4.2", + "fdir4": "npm:fdir@4.1.0", + "fdir5": "npm:fdir@5.0.0", + "fs-readdir-recursive": "^1.1.0", + "get-all-files": "^4.1.0", + "glob": "^10.3.10", + "klaw-sync": "^6.0.0", + "mock-fs": "^5.2.0", + "picomatch": "^4.0.2", + "prettier": "^3.5.3", + "recur-readdir": "0.0.1", + "recursive-files": "^1.0.2", + "recursive-fs": "^2.1.0", + "recursive-readdir": "^2.2.3", + "rrdir": "^12.1.0", + "systeminformation": "^5.21.17", + "tiny-glob": "^0.2.9", + "ts-node": "^10.9.1", + "typescript": "^5.3.2", + "vitest": "^0.34.6", + "walk-sync": "^3.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } +} diff --git a/node_modules/nanoid/LICENSE b/node_modules/nanoid/LICENSE new file mode 100644 index 0000000..37f56aa --- /dev/null +++ b/node_modules/nanoid/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright 2017 Andrey Sitnik + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/nanoid/README.md b/node_modules/nanoid/README.md new file mode 100644 index 0000000..35abb57 --- /dev/null +++ b/node_modules/nanoid/README.md @@ -0,0 +1,39 @@ +# Nano ID + +Nano ID logo by Anton Lovchikov + +**English** | [ะ ัƒััะบะธะน](./README.ru.md) | [็ฎ€ไฝ“ไธญๆ–‡](./README.zh-CN.md) | [Bahasa Indonesia](./README.id-ID.md) + +A tiny, secure, URL-friendly, uniqueย string IDย generator forย JavaScript. + +> โ€œAn amazing level of senseless perfectionism, +> which is simply impossible not to respect.โ€ + +* **Small.** 130 bytes (minified and gzipped). Noย dependencies. + [Size Limit] controls the size. +* **Fast.** It is 2 times faster than UUID. +* **Safe.** It uses hardware random generator. Can be used in clusters. +* **Short IDs.** It uses aย larger alphabet than UUID (`A-Za-z0-9_-`). + So ID size was reduced fromย 36ย toย 21ย symbols. +* **Portable.** Nano ID was ported + to [20 programming languages](#other-programming-languages). + +```js +import { nanoid } from 'nanoid' +model.id = nanoid() //=> "V1StGXR8_Z5jdHi6B-myT" +``` + +Supports modern browsers, IE [with Babel], Node.js and React Native. + +[online tool]: https://gitpod.io/#https://github.com/ai/nanoid/ +[with Babel]: https://developer.epages.com/blog/coding/how-to-transpile-node-modules-with-babel-and-webpack-in-a-monorepo/ +[Size Limit]: https://github.com/ai/size-limit + + + Sponsored by Evil Martians + + +## Docs +Read full docs **[here](https://github.com/ai/nanoid#readme)**. diff --git a/node_modules/nanoid/async/index.browser.cjs b/node_modules/nanoid/async/index.browser.cjs new file mode 100644 index 0000000..80d1871 --- /dev/null +++ b/node_modules/nanoid/async/index.browser.cjs @@ -0,0 +1,69 @@ +let random = async bytes => crypto.getRandomValues(new Uint8Array(bytes)) + +let customAlphabet = (alphabet, defaultSize = 21) => { + // First, a bitmask is necessary to generate the ID. The bitmask makes bytes + // values closer to the alphabet size. The bitmask calculates the closest + // `2^31 - 1` number, which exceeds the alphabet size. + // For example, the bitmask for the alphabet size 30 is 31 (00011111). + // `Math.clz32` is not used, because it is not available in browsers. + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + // Though, the bitmask solution is not perfect since the bytes exceeding + // the alphabet size are refused. Therefore, to reliably generate the ID, + // the random bytes redundancy has to be satisfied. + + // Note: every hardware random generator call is performance expensive, + // because the system call for entropy collection takes a lot of time. + // So, to avoid additional system calls, extra bytes are requested in advance. + + // Next, a step determines how many random bytes to generate. + // The number of random bytes gets decided upon the ID size, mask, + // alphabet size, and magic number 1.6 (using 1.6 peaks at performance + // according to benchmarks). + + // `-~f => Math.ceil(f)` if f is a float + // `-~i => i + 1` if i is an integer + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + + return async (size = defaultSize) => { + let id = '' + while (true) { + let bytes = crypto.getRandomValues(new Uint8Array(step)) + // A compact alternative for `for (var i = 0; i < step; i++)`. + let i = step | 0 + while (i--) { + // Adding `|| ''` refuses a random byte that exceeds the alphabet size. + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} + +let nanoid = async (size = 21) => { + let id = '' + let bytes = crypto.getRandomValues(new Uint8Array((size |= 0))) + + // A compact alternative for `for (var i = 0; i < step; i++)`. + while (size--) { + // It is incorrect to use bytes exceeding the alphabet size. + // The following mask reduces the random byte in the 0-255 value + // range to the 0-63 value range. Therefore, adding hacks, such + // as empty string fallback or magic numbers, is unneccessary because + // the bitmask trims bytes down to the alphabet size. + let byte = bytes[size] & 63 + if (byte < 36) { + // `0-9a-z` + id += byte.toString(36) + } else if (byte < 62) { + // `A-Z` + id += (byte - 26).toString(36).toUpperCase() + } else if (byte < 63) { + id += '_' + } else { + id += '-' + } + } + return id +} + +module.exports = { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.browser.js b/node_modules/nanoid/async/index.browser.js new file mode 100644 index 0000000..fbaa230 --- /dev/null +++ b/node_modules/nanoid/async/index.browser.js @@ -0,0 +1,34 @@ +let random = async bytes => crypto.getRandomValues(new Uint8Array(bytes)) +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + return async (size = defaultSize) => { + let id = '' + while (true) { + let bytes = crypto.getRandomValues(new Uint8Array(step)) + let i = step | 0 + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} +let nanoid = async (size = 21) => { + let id = '' + let bytes = crypto.getRandomValues(new Uint8Array((size |= 0))) + while (size--) { + let byte = bytes[size] & 63 + if (byte < 36) { + id += byte.toString(36) + } else if (byte < 62) { + id += (byte - 26).toString(36).toUpperCase() + } else if (byte < 63) { + id += '_' + } else { + id += '-' + } + } + return id +} +export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.cjs b/node_modules/nanoid/async/index.cjs new file mode 100644 index 0000000..f1b0ad0 --- /dev/null +++ b/node_modules/nanoid/async/index.cjs @@ -0,0 +1,71 @@ +let crypto = require('crypto') + +let { urlAlphabet } = require('../url-alphabet/index.cjs') + +// `crypto.randomFill()` is a little faster than `crypto.randomBytes()`, +// because it is possible to use in combination with `Buffer.allocUnsafe()`. +let random = bytes => + new Promise((resolve, reject) => { + // `Buffer.allocUnsafe()` is faster because it doesnโ€™t flush the memory. + // Memory flushing is unnecessary since the buffer allocation itself resets + // the memory with the new bytes. + crypto.randomFill(Buffer.allocUnsafe(bytes), (err, buf) => { + if (err) { + reject(err) + } else { + resolve(buf) + } + }) + }) + +let customAlphabet = (alphabet, defaultSize = 21) => { + // First, a bitmask is necessary to generate the ID. The bitmask makes bytes + // values closer to the alphabet size. The bitmask calculates the closest + // `2^31 - 1` number, which exceeds the alphabet size. + // For example, the bitmask for the alphabet size 30 is 31 (00011111). + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + // Though, the bitmask solution is not perfect since the bytes exceeding + // the alphabet size are refused. Therefore, to reliably generate the ID, + // the random bytes redundancy has to be satisfied. + + // Note: every hardware random generator call is performance expensive, + // because the system call for entropy collection takes a lot of time. + // So, to avoid additional system calls, extra bytes are requested in advance. + + // Next, a step determines how many random bytes to generate. + // The number of random bytes gets decided upon the ID size, mask, + // alphabet size, and magic number 1.6 (using 1.6 peaks at performance + // according to benchmarks). + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + + let tick = (id, size = defaultSize) => + random(step).then(bytes => { + // A compact alternative for `for (var i = 0; i < step; i++)`. + let i = step + while (i--) { + // Adding `|| ''` refuses a random byte that exceeds the alphabet size. + id += alphabet[bytes[i] & mask] || '' + if (id.length >= size) return id + } + return tick(id, size) + }) + + return size => tick('', size) +} + +let nanoid = (size = 21) => + random((size |= 0)).then(bytes => { + let id = '' + // A compact alternative for `for (var i = 0; i < step; i++)`. + while (size--) { + // It is incorrect to use bytes exceeding the alphabet size. + // The following mask reduces the random byte in the 0-255 value + // range to the 0-63 value range. Therefore, adding hacks, such + // as empty string fallback or magic numbers, is unneccessary because + // the bitmask trims bytes down to the alphabet size. + id += urlAlphabet[bytes[size] & 63] + } + return id + }) + +module.exports = { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.d.ts b/node_modules/nanoid/async/index.d.ts new file mode 100644 index 0000000..9e91965 --- /dev/null +++ b/node_modules/nanoid/async/index.d.ts @@ -0,0 +1,56 @@ +/** + * Generate secure URL-friendly unique ID. The non-blocking version. + * + * By default, the ID will have 21 symbols to have a collision probability + * similar to UUID v4. + * + * ```js + * import { nanoid } from 'nanoid/async' + * nanoid().then(id => { + * model.id = id + * }) + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A promise with a random string. + */ +export function nanoid(size?: number): Promise + +/** + * A low-level function. + * Generate secure unique ID with custom alphabet. The non-blocking version. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A function that returns a promise with a random string. + * + * ```js + * import { customAlphabet } from 'nanoid/async' + * const nanoid = customAlphabet('0123456789ะฐะฑะฒะณะดะตั‘', 5) + * nanoid().then(id => { + * model.id = id //=> "8ั‘56ะฐ" + * }) + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => Promise + +/** + * Generate an array of random bytes collected from hardware noise. + * + * ```js + * import { random } from 'nanoid/async' + * random(5).then(bytes => { + * bytes //=> [10, 67, 212, 67, 89] + * }) + * ``` + * + * @param bytes Size of the array. + * @returns A promise with a random bytes array. + */ +export function random(bytes: number): Promise diff --git a/node_modules/nanoid/async/index.js b/node_modules/nanoid/async/index.js new file mode 100644 index 0000000..cec454a --- /dev/null +++ b/node_modules/nanoid/async/index.js @@ -0,0 +1,35 @@ +import crypto from 'crypto' +import { urlAlphabet } from '../url-alphabet/index.js' +let random = bytes => + new Promise((resolve, reject) => { + crypto.randomFill(Buffer.allocUnsafe(bytes), (err, buf) => { + if (err) { + reject(err) + } else { + resolve(buf) + } + }) + }) +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + let tick = (id, size = defaultSize) => + random(step).then(bytes => { + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length >= size) return id + } + return tick(id, size) + }) + return size => tick('', size) +} +let nanoid = (size = 21) => + random((size |= 0)).then(bytes => { + let id = '' + while (size--) { + id += urlAlphabet[bytes[size] & 63] + } + return id + }) +export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.native.js b/node_modules/nanoid/async/index.native.js new file mode 100644 index 0000000..7c1d6f3 --- /dev/null +++ b/node_modules/nanoid/async/index.native.js @@ -0,0 +1,26 @@ +import { getRandomBytesAsync } from 'expo-random' +import { urlAlphabet } from '../url-alphabet/index.js' +let random = getRandomBytesAsync +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + let tick = (id, size = defaultSize) => + random(step).then(bytes => { + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length >= size) return id + } + return tick(id, size) + }) + return size => tick('', size) +} +let nanoid = (size = 21) => + random((size |= 0)).then(bytes => { + let id = '' + while (size--) { + id += urlAlphabet[bytes[size] & 63] + } + return id + }) +export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/package.json b/node_modules/nanoid/async/package.json new file mode 100644 index 0000000..578cdb4 --- /dev/null +++ b/node_modules/nanoid/async/package.json @@ -0,0 +1,12 @@ +{ + "type": "module", + "main": "index.cjs", + "module": "index.js", + "react-native": { + "./index.js": "./index.native.js" + }, + "browser": { + "./index.js": "./index.browser.js", + "./index.cjs": "./index.browser.cjs" + } +} \ No newline at end of file diff --git a/node_modules/nanoid/bin/nanoid.cjs b/node_modules/nanoid/bin/nanoid.cjs new file mode 100644 index 0000000..c76db0f --- /dev/null +++ b/node_modules/nanoid/bin/nanoid.cjs @@ -0,0 +1,55 @@ +#!/usr/bin/env node + +let { nanoid, customAlphabet } = require('..') + +function print(msg) { + process.stdout.write(msg + '\n') +} + +function error(msg) { + process.stderr.write(msg + '\n') + process.exit(1) +} + +if (process.argv.includes('--help') || process.argv.includes('-h')) { + print(` + Usage + $ nanoid [options] + + Options + -s, --size Generated ID size + -a, --alphabet Alphabet to use + -h, --help Show this help + + Examples + $ nanoid --s 15 + S9sBF77U6sDB8Yg + + $ nanoid --size 10 --alphabet abc + bcabababca`) + process.exit() +} + +let alphabet, size +for (let i = 2; i < process.argv.length; i++) { + let arg = process.argv[i] + if (arg === '--size' || arg === '-s') { + size = Number(process.argv[i + 1]) + i += 1 + if (Number.isNaN(size) || size <= 0) { + error('Size must be positive integer') + } + } else if (arg === '--alphabet' || arg === '-a') { + alphabet = process.argv[i + 1] + i += 1 + } else { + error('Unknown argument ' + arg) + } +} + +if (alphabet) { + let customNanoid = customAlphabet(alphabet, size) + print(customNanoid()) +} else { + print(nanoid(size)) +} diff --git a/node_modules/nanoid/index.browser.cjs b/node_modules/nanoid/index.browser.cjs new file mode 100644 index 0000000..d21a91f --- /dev/null +++ b/node_modules/nanoid/index.browser.cjs @@ -0,0 +1,72 @@ +// This file replaces `index.js` in bundlers like webpack or Rollup, +// according to `browser` config in `package.json`. + +let { urlAlphabet } = require('./url-alphabet/index.cjs') + +let random = bytes => crypto.getRandomValues(new Uint8Array(bytes)) + +let customRandom = (alphabet, defaultSize, getRandom) => { + // First, a bitmask is necessary to generate the ID. The bitmask makes bytes + // values closer to the alphabet size. The bitmask calculates the closest + // `2^31 - 1` number, which exceeds the alphabet size. + // For example, the bitmask for the alphabet size 30 is 31 (00011111). + // `Math.clz32` is not used, because it is not available in browsers. + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + // Though, the bitmask solution is not perfect since the bytes exceeding + // the alphabet size are refused. Therefore, to reliably generate the ID, + // the random bytes redundancy has to be satisfied. + + // Note: every hardware random generator call is performance expensive, + // because the system call for entropy collection takes a lot of time. + // So, to avoid additional system calls, extra bytes are requested in advance. + + // Next, a step determines how many random bytes to generate. + // The number of random bytes gets decided upon the ID size, mask, + // alphabet size, and magic number 1.6 (using 1.6 peaks at performance + // according to benchmarks). + + // `-~f => Math.ceil(f)` if f is a float + // `-~i => i + 1` if i is an integer + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + // A compact alternative for `for (var i = 0; i < step; i++)`. + let j = step | 0 + while (j--) { + // Adding `|| ''` refuses a random byte that exceeds the alphabet size. + id += alphabet[bytes[j] & mask] || '' + if (id.length === size) return id + } + } + } +} + +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) + +let nanoid = (size = 21) => + crypto.getRandomValues(new Uint8Array(size)).reduce((id, byte) => { + // It is incorrect to use bytes exceeding the alphabet size. + // The following mask reduces the random byte in the 0-255 value + // range to the 0-63 value range. Therefore, adding hacks, such + // as empty string fallback or magic numbers, is unneccessary because + // the bitmask trims bytes down to the alphabet size. + byte &= 63 + if (byte < 36) { + // `0-9a-z` + id += byte.toString(36) + } else if (byte < 62) { + // `A-Z` + id += (byte - 26).toString(36).toUpperCase() + } else if (byte > 62) { + id += '-' + } else { + id += '_' + } + return id + }, '') + +module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.browser.js b/node_modules/nanoid/index.browser.js new file mode 100644 index 0000000..7d3b876 --- /dev/null +++ b/node_modules/nanoid/index.browser.js @@ -0,0 +1,34 @@ +import { urlAlphabet } from './url-alphabet/index.js' +let random = bytes => crypto.getRandomValues(new Uint8Array(bytes)) +let customRandom = (alphabet, defaultSize, getRandom) => { + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + let j = step | 0 + while (j--) { + id += alphabet[bytes[j] & mask] || '' + if (id.length === size) return id + } + } + } +} +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) +let nanoid = (size = 21) => + crypto.getRandomValues(new Uint8Array(size)).reduce((id, byte) => { + byte &= 63 + if (byte < 36) { + id += byte.toString(36) + } else if (byte < 62) { + id += (byte - 26).toString(36).toUpperCase() + } else if (byte > 62) { + id += '-' + } else { + id += '_' + } + return id + }, '') +export { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.cjs b/node_modules/nanoid/index.cjs new file mode 100644 index 0000000..c20e374 --- /dev/null +++ b/node_modules/nanoid/index.cjs @@ -0,0 +1,85 @@ +let crypto = require('crypto') + +let { urlAlphabet } = require('./url-alphabet/index.cjs') + +// It is best to make fewer, larger requests to the crypto module to +// avoid system call overhead. So, random numbers are generated in a +// pool. The pool is a Buffer that is larger than the initial random +// request size by this multiplier. The pool is enlarged if subsequent +// requests exceed the maximum buffer size. +const POOL_SIZE_MULTIPLIER = 128 +let pool, poolOffset + +let fillPool = bytes => { + if (!pool || pool.length < bytes) { + pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER) + crypto.randomFillSync(pool) + poolOffset = 0 + } else if (poolOffset + bytes > pool.length) { + crypto.randomFillSync(pool) + poolOffset = 0 + } + poolOffset += bytes +} + +let random = bytes => { + // `|=` convert `bytes` to number to prevent `valueOf` abusing and pool pollution + fillPool((bytes |= 0)) + return pool.subarray(poolOffset - bytes, poolOffset) +} + +let customRandom = (alphabet, defaultSize, getRandom) => { + // First, a bitmask is necessary to generate the ID. The bitmask makes bytes + // values closer to the alphabet size. The bitmask calculates the closest + // `2^31 - 1` number, which exceeds the alphabet size. + // For example, the bitmask for the alphabet size 30 is 31 (00011111). + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + // Though, the bitmask solution is not perfect since the bytes exceeding + // the alphabet size are refused. Therefore, to reliably generate the ID, + // the random bytes redundancy has to be satisfied. + + // Note: every hardware random generator call is performance expensive, + // because the system call for entropy collection takes a lot of time. + // So, to avoid additional system calls, extra bytes are requested in advance. + + // Next, a step determines how many random bytes to generate. + // The number of random bytes gets decided upon the ID size, mask, + // alphabet size, and magic number 1.6 (using 1.6 peaks at performance + // according to benchmarks). + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + // A compact alternative for `for (let i = 0; i < step; i++)`. + let i = step + while (i--) { + // Adding `|| ''` refuses a random byte that exceeds the alphabet size. + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} + +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) + +let nanoid = (size = 21) => { + // `|=` convert `size` to number to prevent `valueOf` abusing and pool pollution + fillPool((size |= 0)) + let id = '' + // We are reading directly from the random pool to avoid creating new array + for (let i = poolOffset - size; i < poolOffset; i++) { + // It is incorrect to use bytes exceeding the alphabet size. + // The following mask reduces the random byte in the 0-255 value + // range to the 0-63 value range. Therefore, adding hacks, such + // as empty string fallback or magic numbers, is unneccessary because + // the bitmask trims bytes down to the alphabet size. + id += urlAlphabet[pool[i] & 63] + } + return id +} + +module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.d.cts b/node_modules/nanoid/index.d.cts new file mode 100644 index 0000000..3e111a3 --- /dev/null +++ b/node_modules/nanoid/index.d.cts @@ -0,0 +1,91 @@ +/** + * Generate secure URL-friendly unique ID. + * + * By default, the ID will have 21 symbols to have a collision probability + * similar to UUID v4. + * + * ```js + * import { nanoid } from 'nanoid' + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A random string. + */ +export function nanoid(size?: number): string + +/** + * Generate secure unique ID with custom alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A random string generator. + * + * ```js + * const { customAlphabet } = require('nanoid') + * const nanoid = customAlphabet('0123456789ะฐะฑะฒะณะดะตั‘', 5) + * nanoid() //=> "8ั‘56ะฐ" + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => string + +/** + * Generate unique ID with custom random generator and alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * ```js + * import { customRandom } from 'nanoid/format' + * + * const nanoid = customRandom('abcdef', 5, size => { + * const random = [] + * for (let i = 0; i < size; i++) { + * random.push(randomByte()) + * } + * return random + * }) + * + * nanoid() //=> "fbaef" + * ``` + * + * @param alphabet Alphabet used to generate a random string. + * @param size Size of the random string. + * @param random A random bytes generator. + * @returns A random string generator. + */ +export function customRandom( + alphabet: string, + size: number, + random: (bytes: number) => Uint8Array +): () => string + +/** + * URL safe symbols. + * + * ```js + * import { urlAlphabet } from 'nanoid' + * const nanoid = customAlphabet(urlAlphabet, 10) + * nanoid() //=> "Uakgb_J5m9" + * ``` + */ +export const urlAlphabet: string + +/** + * Generate an array of random bytes collected from hardware noise. + * + * ```js + * import { customRandom, random } from 'nanoid' + * const nanoid = customRandom("abcdef", 5, random) + * ``` + * + * @param bytes Size of the array. + * @returns An array of random bytes. + */ +export function random(bytes: number): Uint8Array diff --git a/node_modules/nanoid/index.d.ts b/node_modules/nanoid/index.d.ts new file mode 100644 index 0000000..3e111a3 --- /dev/null +++ b/node_modules/nanoid/index.d.ts @@ -0,0 +1,91 @@ +/** + * Generate secure URL-friendly unique ID. + * + * By default, the ID will have 21 symbols to have a collision probability + * similar to UUID v4. + * + * ```js + * import { nanoid } from 'nanoid' + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A random string. + */ +export function nanoid(size?: number): string + +/** + * Generate secure unique ID with custom alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A random string generator. + * + * ```js + * const { customAlphabet } = require('nanoid') + * const nanoid = customAlphabet('0123456789ะฐะฑะฒะณะดะตั‘', 5) + * nanoid() //=> "8ั‘56ะฐ" + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => string + +/** + * Generate unique ID with custom random generator and alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * ```js + * import { customRandom } from 'nanoid/format' + * + * const nanoid = customRandom('abcdef', 5, size => { + * const random = [] + * for (let i = 0; i < size; i++) { + * random.push(randomByte()) + * } + * return random + * }) + * + * nanoid() //=> "fbaef" + * ``` + * + * @param alphabet Alphabet used to generate a random string. + * @param size Size of the random string. + * @param random A random bytes generator. + * @returns A random string generator. + */ +export function customRandom( + alphabet: string, + size: number, + random: (bytes: number) => Uint8Array +): () => string + +/** + * URL safe symbols. + * + * ```js + * import { urlAlphabet } from 'nanoid' + * const nanoid = customAlphabet(urlAlphabet, 10) + * nanoid() //=> "Uakgb_J5m9" + * ``` + */ +export const urlAlphabet: string + +/** + * Generate an array of random bytes collected from hardware noise. + * + * ```js + * import { customRandom, random } from 'nanoid' + * const nanoid = customRandom("abcdef", 5, random) + * ``` + * + * @param bytes Size of the array. + * @returns An array of random bytes. + */ +export function random(bytes: number): Uint8Array diff --git a/node_modules/nanoid/index.js b/node_modules/nanoid/index.js new file mode 100644 index 0000000..9bc909d --- /dev/null +++ b/node_modules/nanoid/index.js @@ -0,0 +1,45 @@ +import crypto from 'crypto' +import { urlAlphabet } from './url-alphabet/index.js' +const POOL_SIZE_MULTIPLIER = 128 +let pool, poolOffset +let fillPool = bytes => { + if (!pool || pool.length < bytes) { + pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER) + crypto.randomFillSync(pool) + poolOffset = 0 + } else if (poolOffset + bytes > pool.length) { + crypto.randomFillSync(pool) + poolOffset = 0 + } + poolOffset += bytes +} +let random = bytes => { + fillPool((bytes |= 0)) + return pool.subarray(poolOffset - bytes, poolOffset) +} +let customRandom = (alphabet, defaultSize, getRandom) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) +let nanoid = (size = 21) => { + fillPool((size |= 0)) + let id = '' + for (let i = poolOffset - size; i < poolOffset; i++) { + id += urlAlphabet[pool[i] & 63] + } + return id +} +export { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/nanoid.js b/node_modules/nanoid/nanoid.js new file mode 100644 index 0000000..ec242ea --- /dev/null +++ b/node_modules/nanoid/nanoid.js @@ -0,0 +1 @@ +export let nanoid=(t=21)=>crypto.getRandomValues(new Uint8Array(t)).reduce(((t,e)=>t+=(e&=63)<36?e.toString(36):e<62?(e-26).toString(36).toUpperCase():e<63?"_":"-"),""); \ No newline at end of file diff --git a/node_modules/nanoid/non-secure/index.cjs b/node_modules/nanoid/non-secure/index.cjs new file mode 100644 index 0000000..d51fcb6 --- /dev/null +++ b/node_modules/nanoid/non-secure/index.cjs @@ -0,0 +1,34 @@ +// This alphabet uses `A-Za-z0-9_-` symbols. +// The order of characters is optimized for better gzip and brotli compression. +// References to the same file (works both for gzip and brotli): +// `'use`, `andom`, and `rict'` +// References to the brotli default dictionary: +// `-26T`, `1983`, `40px`, `75px`, `bush`, `jack`, `mind`, `very`, and `wolf` +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' + +let customAlphabet = (alphabet, defaultSize = 21) => { + return (size = defaultSize) => { + let id = '' + // A compact alternative for `for (var i = 0; i < step; i++)`. + let i = size | 0 + while (i--) { + // `| 0` is more compact and faster than `Math.floor()`. + id += alphabet[(Math.random() * alphabet.length) | 0] + } + return id + } +} + +let nanoid = (size = 21) => { + let id = '' + // A compact alternative for `for (var i = 0; i < step; i++)`. + let i = size | 0 + while (i--) { + // `| 0` is more compact and faster than `Math.floor()`. + id += urlAlphabet[(Math.random() * 64) | 0] + } + return id +} + +module.exports = { nanoid, customAlphabet } diff --git a/node_modules/nanoid/non-secure/index.d.ts b/node_modules/nanoid/non-secure/index.d.ts new file mode 100644 index 0000000..4965322 --- /dev/null +++ b/node_modules/nanoid/non-secure/index.d.ts @@ -0,0 +1,33 @@ +/** + * Generate URL-friendly unique ID. This method uses the non-secure + * predictable random generator with bigger collision probability. + * + * ```js + * import { nanoid } from 'nanoid/non-secure' + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A random string. + */ +export function nanoid(size?: number): string + +/** + * Generate a unique ID based on a custom alphabet. + * This method uses the non-secure predictable random generator + * with bigger collision probability. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A random string generator. + * + * ```js + * import { customAlphabet } from 'nanoid/non-secure' + * const nanoid = customAlphabet('0123456789ะฐะฑะฒะณะดะตั‘', 5) + * model.id = //=> "8ั‘56ะฐ" + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => string diff --git a/node_modules/nanoid/non-secure/index.js b/node_modules/nanoid/non-secure/index.js new file mode 100644 index 0000000..2ea5827 --- /dev/null +++ b/node_modules/nanoid/non-secure/index.js @@ -0,0 +1,21 @@ +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' +let customAlphabet = (alphabet, defaultSize = 21) => { + return (size = defaultSize) => { + let id = '' + let i = size | 0 + while (i--) { + id += alphabet[(Math.random() * alphabet.length) | 0] + } + return id + } +} +let nanoid = (size = 21) => { + let id = '' + let i = size | 0 + while (i--) { + id += urlAlphabet[(Math.random() * 64) | 0] + } + return id +} +export { nanoid, customAlphabet } diff --git a/node_modules/nanoid/non-secure/package.json b/node_modules/nanoid/non-secure/package.json new file mode 100644 index 0000000..9930d6a --- /dev/null +++ b/node_modules/nanoid/non-secure/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "main": "index.cjs", + "module": "index.js", + "react-native": "index.js" +} \ No newline at end of file diff --git a/node_modules/nanoid/package.json b/node_modules/nanoid/package.json new file mode 100644 index 0000000..a3d3f44 --- /dev/null +++ b/node_modules/nanoid/package.json @@ -0,0 +1,89 @@ +{ + "name": "nanoid", + "version": "3.3.11", + "description": "A tiny (116 bytes), secure URL-friendly unique string ID generator", + "keywords": [ + "uuid", + "random", + "id", + "url" + ], + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "author": "Andrey Sitnik ", + "license": "MIT", + "repository": "ai/nanoid", + "browser": { + "./index.js": "./index.browser.js", + "./async/index.js": "./async/index.browser.js", + "./async/index.cjs": "./async/index.browser.cjs", + "./index.cjs": "./index.browser.cjs" + }, + "react-native": "index.js", + "bin": "./bin/nanoid.cjs", + "sideEffects": false, + "types": "./index.d.ts", + "type": "module", + "main": "index.cjs", + "module": "index.js", + "exports": { + ".": { + "react-native": "./index.browser.js", + "browser": "./index.browser.js", + "require": { + "types": "./index.d.cts", + "default": "./index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "default": "./index.js" + }, + "./package.json": "./package.json", + "./async/package.json": "./async/package.json", + "./async": { + "browser": "./async/index.browser.js", + "require": { + "types": "./index.d.cts", + "default": "./async/index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./async/index.js" + }, + "default": "./async/index.js" + }, + "./non-secure/package.json": "./non-secure/package.json", + "./non-secure": { + "require": { + "types": "./index.d.cts", + "default": "./non-secure/index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./non-secure/index.js" + }, + "default": "./non-secure/index.js" + }, + "./url-alphabet/package.json": "./url-alphabet/package.json", + "./url-alphabet": { + "require": { + "types": "./index.d.cts", + "default": "./url-alphabet/index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./url-alphabet/index.js" + }, + "default": "./url-alphabet/index.js" + } + } +} diff --git a/node_modules/nanoid/url-alphabet/index.cjs b/node_modules/nanoid/url-alphabet/index.cjs new file mode 100644 index 0000000..a332f0b --- /dev/null +++ b/node_modules/nanoid/url-alphabet/index.cjs @@ -0,0 +1,7 @@ +// This alphabet uses `A-Za-z0-9_-` symbols. +// The order of characters is optimized for better gzip and brotli compression. +// Same as in non-secure/index.js +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' + +module.exports = { urlAlphabet } diff --git a/node_modules/nanoid/url-alphabet/index.js b/node_modules/nanoid/url-alphabet/index.js new file mode 100644 index 0000000..c2782e5 --- /dev/null +++ b/node_modules/nanoid/url-alphabet/index.js @@ -0,0 +1,3 @@ +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' +export { urlAlphabet } diff --git a/node_modules/nanoid/url-alphabet/package.json b/node_modules/nanoid/url-alphabet/package.json new file mode 100644 index 0000000..9930d6a --- /dev/null +++ b/node_modules/nanoid/url-alphabet/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "main": "index.cjs", + "module": "index.js", + "react-native": "index.js" +} \ No newline at end of file diff --git a/node_modules/picocolors/LICENSE b/node_modules/picocolors/LICENSE new file mode 100644 index 0000000..46c9b95 --- /dev/null +++ b/node_modules/picocolors/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2021-2024 Oleksii Raspopov, Kostiantyn Denysov, Anton Verinov + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/picocolors/README.md b/node_modules/picocolors/README.md new file mode 100644 index 0000000..8e47aa8 --- /dev/null +++ b/node_modules/picocolors/README.md @@ -0,0 +1,21 @@ +# picocolors + +The tiniest and the fastest library for terminal output formatting with ANSI colors. + +```javascript +import pc from "picocolors" + +console.log( + pc.green(`How are ${pc.italic(`you`)} doing?`) +) +``` + +- **No dependencies.** +- **14 times** smaller and **2 times** faster than chalk. +- Used by popular tools like PostCSS, SVGO, Stylelint, and Browserslist. +- Node.js v6+ & browsers support. Support for both CJS and ESM projects. +- TypeScript type declarations included. +- [`NO_COLOR`](https://no-color.org/) friendly. + +## Docs +Read **[full docs](https://github.com/alexeyraspopov/picocolors#readme)** on GitHub. diff --git a/node_modules/picocolors/package.json b/node_modules/picocolors/package.json new file mode 100644 index 0000000..372d4b6 --- /dev/null +++ b/node_modules/picocolors/package.json @@ -0,0 +1,25 @@ +{ + "name": "picocolors", + "version": "1.1.1", + "main": "./picocolors.js", + "types": "./picocolors.d.ts", + "browser": { + "./picocolors.js": "./picocolors.browser.js" + }, + "sideEffects": false, + "description": "The tiniest and the fastest library for terminal output formatting with ANSI colors", + "files": [ + "picocolors.*", + "types.d.ts" + ], + "keywords": [ + "terminal", + "colors", + "formatting", + "cli", + "console" + ], + "author": "Alexey Raspopov", + "repository": "alexeyraspopov/picocolors", + "license": "ISC" +} diff --git a/node_modules/picocolors/picocolors.browser.js b/node_modules/picocolors/picocolors.browser.js new file mode 100644 index 0000000..9dcf637 --- /dev/null +++ b/node_modules/picocolors/picocolors.browser.js @@ -0,0 +1,4 @@ +var x=String; +var create=function() {return {isColorSupported:false,reset:x,bold:x,dim:x,italic:x,underline:x,inverse:x,hidden:x,strikethrough:x,black:x,red:x,green:x,yellow:x,blue:x,magenta:x,cyan:x,white:x,gray:x,bgBlack:x,bgRed:x,bgGreen:x,bgYellow:x,bgBlue:x,bgMagenta:x,bgCyan:x,bgWhite:x,blackBright:x,redBright:x,greenBright:x,yellowBright:x,blueBright:x,magentaBright:x,cyanBright:x,whiteBright:x,bgBlackBright:x,bgRedBright:x,bgGreenBright:x,bgYellowBright:x,bgBlueBright:x,bgMagentaBright:x,bgCyanBright:x,bgWhiteBright:x}}; +module.exports=create(); +module.exports.createColors = create; diff --git a/node_modules/picocolors/picocolors.d.ts b/node_modules/picocolors/picocolors.d.ts new file mode 100644 index 0000000..94e146a --- /dev/null +++ b/node_modules/picocolors/picocolors.d.ts @@ -0,0 +1,5 @@ +import { Colors } from "./types" + +declare const picocolors: Colors & { createColors: (enabled?: boolean) => Colors } + +export = picocolors diff --git a/node_modules/picocolors/picocolors.js b/node_modules/picocolors/picocolors.js new file mode 100644 index 0000000..e32df85 --- /dev/null +++ b/node_modules/picocolors/picocolors.js @@ -0,0 +1,75 @@ +let p = process || {}, argv = p.argv || [], env = p.env || {} +let isColorSupported = + !(!!env.NO_COLOR || argv.includes("--no-color")) && + (!!env.FORCE_COLOR || argv.includes("--color") || p.platform === "win32" || ((p.stdout || {}).isTTY && env.TERM !== "dumb") || !!env.CI) + +let formatter = (open, close, replace = open) => + input => { + let string = "" + input, index = string.indexOf(close, open.length) + return ~index ? open + replaceClose(string, close, replace, index) + close : open + string + close + } + +let replaceClose = (string, close, replace, index) => { + let result = "", cursor = 0 + do { + result += string.substring(cursor, index) + replace + cursor = index + close.length + index = string.indexOf(close, cursor) + } while (~index) + return result + string.substring(cursor) +} + +let createColors = (enabled = isColorSupported) => { + let f = enabled ? formatter : () => String + return { + isColorSupported: enabled, + reset: f("\x1b[0m", "\x1b[0m"), + bold: f("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m"), + dim: f("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m"), + italic: f("\x1b[3m", "\x1b[23m"), + underline: f("\x1b[4m", "\x1b[24m"), + inverse: f("\x1b[7m", "\x1b[27m"), + hidden: f("\x1b[8m", "\x1b[28m"), + strikethrough: f("\x1b[9m", "\x1b[29m"), + + black: f("\x1b[30m", "\x1b[39m"), + red: f("\x1b[31m", "\x1b[39m"), + green: f("\x1b[32m", "\x1b[39m"), + yellow: f("\x1b[33m", "\x1b[39m"), + blue: f("\x1b[34m", "\x1b[39m"), + magenta: f("\x1b[35m", "\x1b[39m"), + cyan: f("\x1b[36m", "\x1b[39m"), + white: f("\x1b[37m", "\x1b[39m"), + gray: f("\x1b[90m", "\x1b[39m"), + + bgBlack: f("\x1b[40m", "\x1b[49m"), + bgRed: f("\x1b[41m", "\x1b[49m"), + bgGreen: f("\x1b[42m", "\x1b[49m"), + bgYellow: f("\x1b[43m", "\x1b[49m"), + bgBlue: f("\x1b[44m", "\x1b[49m"), + bgMagenta: f("\x1b[45m", "\x1b[49m"), + bgCyan: f("\x1b[46m", "\x1b[49m"), + bgWhite: f("\x1b[47m", "\x1b[49m"), + + blackBright: f("\x1b[90m", "\x1b[39m"), + redBright: f("\x1b[91m", "\x1b[39m"), + greenBright: f("\x1b[92m", "\x1b[39m"), + yellowBright: f("\x1b[93m", "\x1b[39m"), + blueBright: f("\x1b[94m", "\x1b[39m"), + magentaBright: f("\x1b[95m", "\x1b[39m"), + cyanBright: f("\x1b[96m", "\x1b[39m"), + whiteBright: f("\x1b[97m", "\x1b[39m"), + + bgBlackBright: f("\x1b[100m", "\x1b[49m"), + bgRedBright: f("\x1b[101m", "\x1b[49m"), + bgGreenBright: f("\x1b[102m", "\x1b[49m"), + bgYellowBright: f("\x1b[103m", "\x1b[49m"), + bgBlueBright: f("\x1b[104m", "\x1b[49m"), + bgMagentaBright: f("\x1b[105m", "\x1b[49m"), + bgCyanBright: f("\x1b[106m", "\x1b[49m"), + bgWhiteBright: f("\x1b[107m", "\x1b[49m"), + } +} + +module.exports = createColors() +module.exports.createColors = createColors diff --git a/node_modules/picocolors/types.d.ts b/node_modules/picocolors/types.d.ts new file mode 100644 index 0000000..cd1aec4 --- /dev/null +++ b/node_modules/picocolors/types.d.ts @@ -0,0 +1,51 @@ +export type Formatter = (input: string | number | null | undefined) => string + +export interface Colors { + isColorSupported: boolean + + reset: Formatter + bold: Formatter + dim: Formatter + italic: Formatter + underline: Formatter + inverse: Formatter + hidden: Formatter + strikethrough: Formatter + + black: Formatter + red: Formatter + green: Formatter + yellow: Formatter + blue: Formatter + magenta: Formatter + cyan: Formatter + white: Formatter + gray: Formatter + + bgBlack: Formatter + bgRed: Formatter + bgGreen: Formatter + bgYellow: Formatter + bgBlue: Formatter + bgMagenta: Formatter + bgCyan: Formatter + bgWhite: Formatter + + blackBright: Formatter + redBright: Formatter + greenBright: Formatter + yellowBright: Formatter + blueBright: Formatter + magentaBright: Formatter + cyanBright: Formatter + whiteBright: Formatter + + bgBlackBright: Formatter + bgRedBright: Formatter + bgGreenBright: Formatter + bgYellowBright: Formatter + bgBlueBright: Formatter + bgMagentaBright: Formatter + bgCyanBright: Formatter + bgWhiteBright: Formatter +} diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE new file mode 100644 index 0000000..3608dca --- /dev/null +++ b/node_modules/picomatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/picomatch/README.md b/node_modules/picomatch/README.md new file mode 100644 index 0000000..5062654 --- /dev/null +++ b/node_modules/picomatch/README.md @@ -0,0 +1,738 @@ +

Picomatch

+ +

+ +version + + +test status + + +coverage status + + +downloads + +

+ +
+
+ +

+Blazing fast and accurate glob matcher written in JavaScript.
+No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. +

+ +
+
+ +## Why picomatch? + +* **Lightweight** - No dependencies +* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. +* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) +* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) +* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. +* **Well tested** - Thousands of unit tests + +See the [library comparison](#library-comparisons) to other libraries. + +
+
+ +## Table of Contents + +
Click to expand + +- [Install](#install) +- [Usage](#usage) +- [API](#api) + * [picomatch](#picomatch) + * [.test](#test) + * [.matchBase](#matchbase) + * [.isMatch](#ismatch) + * [.parse](#parse) + * [.scan](#scan) + * [.compileRe](#compilere) + * [.makeRe](#makere) + * [.toRegex](#toregex) +- [Options](#options) + * [Picomatch options](#picomatch-options) + * [Scan Options](#scan-options) + * [Options Examples](#options-examples) +- [Globbing features](#globbing-features) + * [Basic globbing](#basic-globbing) + * [Advanced globbing](#advanced-globbing) + * [Braces](#braces) + * [Matching special characters as literals](#matching-special-characters-as-literals) +- [Library Comparisons](#library-comparisons) +- [Benchmarks](#benchmarks) +- [Philosophies](#philosophies) +- [About](#about) + * [Author](#author) + * [License](#license) + +_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ + +
+ +
+
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +npm install --save picomatch +``` + +
+ +## Usage + +The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. + +```js +const pm = require('picomatch'); +const isMatch = pm('*.js'); + +console.log(isMatch('abcd')); //=> false +console.log(isMatch('a.js')); //=> true +console.log(isMatch('a.md')); //=> false +console.log(isMatch('a/b.js')); //=> false +``` + +
+ +## API + +### [picomatch](lib/picomatch.js#L31) + +Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. + +**Params** + +* `globs` **{String|Array}**: One or more glob patterns. +* `options` **{Object=}** +* `returns` **{Function=}**: Returns a matcher function. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch(glob[, options]); + +const isMatch = picomatch('*.!(*a)'); +console.log(isMatch('a.a')); //=> false +console.log(isMatch('a.b')); //=> true +``` + +**Example without node.js** + +For environments without `node.js`, `picomatch/posix` provides you a dependency-free matcher, without automatic OS detection. + +```js +const picomatch = require('picomatch/posix'); +// the same API, defaulting to posix paths +const isMatch = picomatch('a/*'); +console.log(isMatch('a\\b')); //=> false +console.log(isMatch('a/b')); //=> true + +// you can still configure the matcher function to accept windows paths +const isMatch = picomatch('a/*', { options: windows }); +console.log(isMatch('a\\b')); //=> true +console.log(isMatch('a/b')); //=> true +``` + +### [.test](lib/picomatch.js#L116) + +Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. + +**Params** + +* `input` **{String}**: String to test. +* `regex` **{RegExp}** +* `returns` **{Object}**: Returns an object with matching info. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.test(input, regex[, options]); + +console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); +// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } +``` + +### [.matchBase](lib/picomatch.js#L160) + +Match the basename of a filepath. + +**Params** + +* `input` **{String}**: String to test. +* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). +* `returns` **{Boolean}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.matchBase(input, glob[, options]); +console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true +``` + +### [.isMatch](lib/picomatch.js#L182) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* **{String|Array}**: str The string to test. +* **{String|Array}**: patterns One or more glob patterns to use for matching. +* **{Object}**: See available [options](#options). +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.isMatch(string, patterns[, options]); + +console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true +console.log(picomatch.isMatch('a.a', 'b.*')); //=> false +``` + +### [.parse](lib/picomatch.js#L198) + +Parse a glob pattern to create the source string for a regular expression. + +**Params** + +* `pattern` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.parse(pattern[, options]); +``` + +### [.scan](lib/picomatch.js#L230) + +Scan a glob pattern to separate the pattern into segments. + +**Params** + +* `input` **{String}**: Glob pattern to scan. +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.scan(input[, options]); + +const result = picomatch.scan('!./foo/*.js'); +console.log(result); +{ prefix: '!./', + input: '!./foo/*.js', + start: 3, + base: 'foo', + glob: '*.js', + isBrace: false, + isBracket: false, + isGlob: true, + isExtglob: false, + isGlobstar: false, + negated: true } +``` + +### [.compileRe](lib/picomatch.js#L244) + +Compile a regular expression from the `state` object returned by the +[parse()](#parse) method. + +**Params** + +* `state` **{Object}** +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. +* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. +* `returns` **{RegExp}** + +### [.makeRe](lib/picomatch.js#L285) + +Create a regular expression from a parsed glob pattern. + +**Params** + +* `state` **{String}**: The object returned from the `.parse` method. +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. +* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +const picomatch = require('picomatch'); +const state = picomatch.parse('*.js'); +// picomatch.compileRe(state[, options]); + +console.log(picomatch.compileRe(state)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +### [.toRegex](lib/picomatch.js#L320) + +Create a regular expression from the given regex source string. + +**Params** + +* `source` **{String}**: Regular expression source string. +* `options` **{Object}** +* `returns` **{RegExp}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.toRegex(source[, options]); + +const { output } = picomatch.parse('*.js'); +console.log(picomatch.toRegex(output)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +
+ +## Options + +### Picomatch options + +The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | +| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | +| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | +| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | +| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | +| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | +| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | +| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | +| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | +| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | +| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | +| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | +| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | +| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | +| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | +| `matchBase` | `boolean` | `false` | Alias for `basename` | +| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | +| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | +| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | +| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | +| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | +| `noext` | `boolean` | `false` | Alias for `noextglob` | +| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | +| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | +| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | +| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | +| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | +| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | +| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | +| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | +| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | +| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | +| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | +| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | +| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | +| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | +| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | +| `windows` | `boolean` | `false` | Also accept backslashes as the path separator. | + +### Scan Options + +In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | +| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.scan('!./foo/*.js', { tokens: true }); +console.log(result); +// { +// prefix: '!./', +// input: '!./foo/*.js', +// start: 3, +// base: 'foo', +// glob: '*.js', +// isBrace: false, +// isBracket: false, +// isGlob: true, +// isExtglob: false, +// isGlobstar: false, +// negated: true, +// maxDepth: 2, +// tokens: [ +// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, +// { value: 'foo', depth: 1, isGlob: false }, +// { value: '*.js', depth: 1, isGlob: true } +// ], +// slashes: [ 2, 6 ], +// parts: [ 'foo', '*.js' ] +// } +``` + +
+ +### Options Examples + +#### options.expandRange + +**Type**: `function` + +**Default**: `undefined` + +Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. + +**Example** + +The following example shows how to create a glob that matches a folder + +```js +const fill = require('fill-range'); +const regex = pm.makeRe('foo/{01..25}/bar', { + expandRange(a, b) { + return `(${fill(a, b, { toRegex: true })})`; + } +}); + +console.log(regex); +//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ + +console.log(regex.test('foo/00/bar')) // false +console.log(regex.test('foo/01/bar')) // true +console.log(regex.test('foo/10/bar')) // true +console.log(regex.test('foo/22/bar')) // true +console.log(regex.test('foo/25/bar')) // true +console.log(regex.test('foo/26/bar')) // false +``` + +#### options.format + +**Type**: `function` + +**Default**: `undefined` + +Custom function for formatting strings before they're matched. + +**Example** + +```js +// strip leading './' from strings +const format = str => str.replace(/^\.\//, ''); +const isMatch = picomatch('foo/*.js', { format }); +console.log(isMatch('./foo/bar.js')); //=> true +``` + +#### options.onMatch + +```js +const onMatch = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onMatch }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onIgnore + +```js +const onIgnore = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onResult + +```js +const onResult = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onResult, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +
+
+ +## Globbing features + +* [Basic globbing](#basic-globbing) (Wildcard matching) +* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) + +### Basic globbing + +| **Character** | **Description** | +| --- | --- | +| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | +| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` with the `windows` option) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | +| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | +| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | + +#### Matching behavior vs. Bash + +Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: + +* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. +* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. + +
+ +### Advanced globbing + +* [extglobs](#extglobs) +* [POSIX brackets](#posix-brackets) +* [Braces](#brace-expansion) + +#### Extglobs + +| **Pattern** | **Description** | +| --- | --- | +| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | +| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | +| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | +| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | +| `!(pattern)` | Match _anything but_ `pattern` | + +**Examples** + +```js +const pm = require('picomatch'); + +// *(pattern) matches ZERO or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// +(pattern) matches ONE or more of "pattern" +console.log(pm.isMatch('a', 'a+(z)')); // false +console.log(pm.isMatch('az', 'a+(z)')); // true +console.log(pm.isMatch('azzz', 'a+(z)')); // true + +// supports multiple extglobs +console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false + +// supports nested extglobs +console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true +``` + +#### POSIX brackets + +POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. + +**Enable POSIX bracket support** + +```js +console.log(pm.makeRe('[[:word:]]+', { posix: true })); +//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ +``` + +**Supported POSIX classes** + +The following named POSIX bracket expressions are supported: + +* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` +* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. +* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. +* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. +* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. +* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. +* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. +* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. +* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. +* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. +* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. +* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. +* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. +* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. + +See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. + +### Braces + +Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. + +### Matching special characters as literals + +If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: + +**Special Characters** + +Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. + +To match any of the following characters as literals: `$^*+?()[] + +Examples: + +```js +console.log(pm.makeRe('foo/bar \\(1\\)')); +console.log(pm.makeRe('foo/bar \\(1\\)')); +``` + +
+
+ +## Library Comparisons + +The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). + +| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | +| --- | --- | --- | --- | --- | --- | --- | --- | +| Wildcard matching (`*?+`) | โœ” | โœ” | โœ” | โœ” | - | - | - | +| Advancing globbing | โœ” | โœ” | โœ” | - | - | - | - | +| Brace _matching_ | โœ” | โœ” | โœ” | - | - | โœ” | - | +| Brace _expansion_ | โœ” | โœ” | - | - | - | โœ” | - | +| Extglobs | partial | โœ” | โœ” | - | โœ” | - | - | +| Posix brackets | - | โœ” | โœ” | - | - | - | โœ” | +| Regular expression syntax | - | โœ” | โœ” | โœ” | โœ” | - | โœ” | +| File system operations | - | - | - | - | - | - | - | + +
+
+ +## Benchmarks + +Performance comparison of picomatch and minimatch. + +_(Pay special attention to the last three benchmarks. Minimatch freezes on long ranges.)_ + +``` +# .makeRe star (*) + picomatch x 4,449,159 ops/sec ยฑ0.24% (97 runs sampled) + minimatch x 632,772 ops/sec ยฑ0.14% (98 runs sampled) + +# .makeRe star; dot=true (*) + picomatch x 3,500,079 ops/sec ยฑ0.26% (99 runs sampled) + minimatch x 564,916 ops/sec ยฑ0.23% (96 runs sampled) + +# .makeRe globstar (**) + picomatch x 3,261,000 ops/sec ยฑ0.27% (98 runs sampled) + minimatch x 1,664,766 ops/sec ยฑ0.20% (100 runs sampled) + +# .makeRe globstars (**/**/**) + picomatch x 3,284,469 ops/sec ยฑ0.18% (97 runs sampled) + minimatch x 1,435,880 ops/sec ยฑ0.34% (95 runs sampled) + +# .makeRe with leading star (*.txt) + picomatch x 3,100,197 ops/sec ยฑ0.35% (99 runs sampled) + minimatch x 428,347 ops/sec ยฑ0.42% (94 runs sampled) + +# .makeRe - basic braces ({a,b,c}*.txt) + picomatch x 443,578 ops/sec ยฑ1.33% (89 runs sampled) + minimatch x 107,143 ops/sec ยฑ0.35% (94 runs sampled) + +# .makeRe - short ranges ({a..z}*.txt) + picomatch x 415,484 ops/sec ยฑ0.76% (96 runs sampled) + minimatch x 14,299 ops/sec ยฑ0.26% (96 runs sampled) + +# .makeRe - medium ranges ({1..100000}*.txt) + picomatch x 395,020 ops/sec ยฑ0.87% (89 runs sampled) + minimatch x 2 ops/sec ยฑ4.59% (10 runs sampled) + +# .makeRe - long ranges ({1..10000000}*.txt) + picomatch x 400,036 ops/sec ยฑ0.83% (90 runs sampled) + minimatch (FATAL ERROR: Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory) +``` + +
+
+ +## Philosophies + +The goal of this library is to be blazing fast, without compromising on accuracy. + +**Accuracy** + +The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. + +Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. + +**Performance** + +Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. + +
+
+ +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright ยฉ 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js new file mode 100644 index 0000000..a753b1d --- /dev/null +++ b/node_modules/picomatch/index.js @@ -0,0 +1,17 @@ +'use strict'; + +const pico = require('./lib/picomatch'); +const utils = require('./lib/utils'); + +function picomatch(glob, options, returnState = false) { + // default to os.platform() + if (options && (options.windows === null || options.windows === undefined)) { + // don't mutate the original options object + options = { ...options, windows: utils.isWindows() }; + } + + return pico(glob, options, returnState); +} + +Object.assign(picomatch, pico); +module.exports = picomatch; diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js new file mode 100644 index 0000000..27b3e20 --- /dev/null +++ b/node_modules/picomatch/lib/constants.js @@ -0,0 +1,179 @@ +'use strict'; + +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + +/** + * Posix glob regex + */ + +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; +const SEP = '/'; + +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR, + SEP +}; + +/** + * Windows glob regex + */ + +const WINDOWS_CHARS = { + ...POSIX_CHARS, + + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)`, + SEP: '\\' +}; + +/** + * POSIX Bracket Regex + */ + +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; + +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, + + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, + + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ + + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ + + CHAR_ASTERISK: 42, /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + + /** + * Create EXTGLOB_CHARS + */ + + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, + + /** + * Create GLOB_CHARS + */ + + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js new file mode 100644 index 0000000..8fd8ff4 --- /dev/null +++ b/node_modules/picomatch/lib/parse.js @@ -0,0 +1,1085 @@ +'use strict'; + +const constants = require('./constants'); +const utils = require('./utils'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; + +/** + * Helpers + */ + +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } + + args.sort(); + const value = `[${args.join('-')}]`; + + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; +}; + +/** + * Create the message for a syntax error + */ + +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; + + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(opts.windows); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; + + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; + + input = utils.removePrefix(input, state); + len = input.length; + + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; + + /** + * Tokenizing helpers + */ + + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; + + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; + + const negate = () => { + let count = 1; + + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } + + if (count % 2 === 0) { + return false; + } + + state.negated = true; + state.start++; + return true; + }; + + const increment = type => { + state[type]++; + stack.push(type); + }; + + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } + + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } + + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.output = (prev.output || prev.value) + tok.value; + prev.value += tok.value; + return; + } + + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; + + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; + + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; + + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; + + if (token.type === 'negate') { + let extglobStar = star; + + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } + + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } + + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } + + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; + + /** + * Fast paths + */ + + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; + + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } + + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } + + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } + + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); + + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } + + if (output === input && opts.contains === true) { + state.output = input; + return state; + } + + state.output = utils.wrapOutput(output, state, options); + return state; + } + + /** + * Tokenize input until we reach end-of-string + */ + + while (!eos()) { + value = advance(); + + if (value === '\u0000') { + continue; + } + + /** + * Escaped characters + */ + + if (value === '\\') { + const next = peek(); + + if (next === '/' && opts.bash !== true) { + continue; + } + + if (next === '.' || next === ';') { + continue; + } + + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } + + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; + + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } + + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } + + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } + + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ + + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; + + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); + + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } + + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } + + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } + + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } + + prev.value += value; + append({ value }); + continue; + } + + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ + + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } + + /** + * Double quotes + */ + + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } + + /** + * Parentheses + */ + + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } + + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } + + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } + + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } + + /** + * Square brackets + */ + + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + + value = `\\${value}`; + } else { + increment('brackets'); + } + + push({ type: 'bracket', value }); + continue; + } + + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + decrement('brackets'); + + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } + + prev.value += value; + append({ value }); + + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } + + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); + + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } + + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } + + /** + * Braces + */ + + if (value === '{' && opts.nobrace !== true) { + increment('braces'); + + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; + + braces.push(open); + push(open); + continue; + } + + if (value === '}') { + const brace = braces[braces.length - 1]; + + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } + + let output = ')'; + + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; + + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } + + output = expandRange(range, opts); + state.backtrack = true; + } + + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } + + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } + + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } + + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } + + push({ type: 'comma', value, output }); + continue; + } + + /** + * Slashes + */ + + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } + + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } + + /** + * Dots + */ + + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } + + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } + + /** + * Question marks + */ + + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } + + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } + + push({ type: 'text', value, output }); + continue; + } + + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } + + push({ type: 'qmark', value, output: QMARK }); + continue; + } + + /** + * Exclamation + */ + + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } + + /** + * Plus + */ + + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } + + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } + + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } + + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } + + /** + * Plain text + */ + + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Plain text + */ + + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } + + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Stars + */ + + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } + + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } + + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } + + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); + + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } + + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } + + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; + + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; + + state.output += prior.output + prev.output; + state.globstar = true; + + consume(value + advance()); + + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); + + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; + + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + + const token = { type: 'star', value, output: star }; + + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } + + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } + + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; + + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; + + } else { + state.output += nodot; + prev.output += nodot; + } + + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } + + push(token); + } + + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } + + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } + + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } + + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } + + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; + + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; + + if (token.suffix) { + state.output += token.suffix; + } + } + } + + return state; +}; + +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ + +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + input = REPLACEMENTS[input] || input; + + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(opts.windows); + + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; + + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + + case '**': + return nodot + globstar(opts); + + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; + + const source = create(match[1]); + if (!source) return; + + return source + DOT_LITERAL + match[2]; + } + } + }; + + const output = utils.removePrefix(input, state); + let source = create(output); + + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } + + return source; +}; + +module.exports = parse; diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js new file mode 100644 index 0000000..d0ebd9f --- /dev/null +++ b/node_modules/picomatch/lib/picomatch.js @@ -0,0 +1,341 @@ +'use strict'; + +const scan = require('./scan'); +const parse = require('./parse'); +const utils = require('./utils'); +const constants = require('./constants'); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ + +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } + + const isState = isObject(glob) && glob.tokens && glob.input; + + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } + + const opts = options || {}; + const posix = opts.windows; + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); + + const state = regex.state; + delete regex.state; + + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } + + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; + + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } + + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } + + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } + + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; + + if (returnState) { + matcher.state = state; + } + + return matcher; +}; + +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ + +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } + + if (input === '') { + return { isMatch: false, output: '' }; + } + + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; + + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } + + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } + + return { isMatch: Boolean(match), match, output }; +}; + +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ + +picomatch.matchBase = (input, glob, options) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(utils.basename(input)); +}; + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ + +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; + +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +picomatch.scan = (input, options) => scan(input, options); + +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } + + let parsed = { negated: false, fastpaths: true }; + + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } + + if (!parsed.output) { + parsed = parse(input, options); + } + + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; + +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; + +/** + * Picomatch constants. + * @return {Object} + */ + +picomatch.constants = constants; + +/** + * Expose "picomatch" + */ + +module.exports = picomatch; diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js new file mode 100644 index 0000000..e59cd7a --- /dev/null +++ b/node_modules/picomatch/lib/scan.js @@ -0,0 +1,391 @@ +'use strict'; + +const utils = require('./utils'); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = require('./constants'); + +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; + +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; + +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ + +const scan = (input, options) => { + const opts = options || {}; + + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; + + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; + + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; + + while (index < length) { + code = advance(); + let next; + + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } + + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; + + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } + + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; + + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; + + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } + + lastIndex = index + 1; + continue; + } + + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } + + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js new file mode 100644 index 0000000..9c97cae --- /dev/null +++ b/node_modules/picomatch/lib/utils.js @@ -0,0 +1,72 @@ +/*global navigator*/ +'use strict'; + +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = require('./constants'); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.isWindows = () => { + if (typeof navigator !== 'undefined' && navigator.platform) { + const platform = navigator.platform.toLowerCase(); + return platform === 'win32' || platform === 'windows'; + } + + if (typeof process !== 'undefined' && process.platform) { + return process.platform === 'win32'; + } + + return false; +}; + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; + +exports.basename = (path, { windows } = {}) => { + const segs = path.split(windows ? /[\\/]/ : '/'); + const last = segs[segs.length - 1]; + + if (last === '') { + return segs[segs.length - 2]; + } + + return last; +}; diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json new file mode 100644 index 0000000..703a83d --- /dev/null +++ b/node_modules/picomatch/package.json @@ -0,0 +1,83 @@ +{ + "name": "picomatch", + "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", + "version": "4.0.2", + "homepage": "https://github.com/micromatch/picomatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "funding": "https://github.com/sponsors/jonschlinkert", + "repository": "micromatch/picomatch", + "bugs": { + "url": "https://github.com/micromatch/picomatch/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "posix.js", + "lib" + ], + "sideEffects": false, + "main": "index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "devDependencies": { + "eslint": "^8.57.0", + "fill-range": "^7.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^10.4.0", + "nyc": "^15.1.0", + "time-require": "github:jonschlinkert/time-require" + }, + "keywords": [ + "glob", + "match", + "picomatch" + ], + "nyc": { + "reporter": [ + "html", + "lcov", + "text-summary" + ] + }, + "verb": { + "toc": { + "render": true, + "method": "preWrite", + "maxdepth": 3 + }, + "layout": "empty", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "braces", + "micromatch" + ] + }, + "reflinks": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "micromatch", + "minimatch", + "nanomatch", + "picomatch" + ] + } +} diff --git a/node_modules/picomatch/posix.js b/node_modules/picomatch/posix.js new file mode 100644 index 0000000..d2f2bc5 --- /dev/null +++ b/node_modules/picomatch/posix.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./lib/picomatch'); diff --git a/node_modules/postcss/LICENSE b/node_modules/postcss/LICENSE new file mode 100644 index 0000000..da057b4 --- /dev/null +++ b/node_modules/postcss/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright 2013 Andrey Sitnik + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/postcss/README.md b/node_modules/postcss/README.md new file mode 100644 index 0000000..05fed07 --- /dev/null +++ b/node_modules/postcss/README.md @@ -0,0 +1,29 @@ +# PostCSS + +Philosopherโ€™s stone, logo of PostCSS + +PostCSS is a tool for transforming styles with JS plugins. +These plugins can lint your CSS, supportย variablesย andย mixins, +transpileย futureย CSSย syntax, inlineย images, andย more. + +PostCSS is used by industry leaders including Wikipedia, Twitter, Alibaba, +and JetBrains. Theย [Autoprefixer] and [Stylelint]ย PostCSS pluginsย are someย ofย theย most popular CSS tools. + +--- + +ย ย Built by + Evil Martians, go-to agency for developer tools. + +--- + +[Abstractย Syntaxย Tree]: https://en.wikipedia.org/wiki/Abstract_syntax_tree +[Evilย Martians]: https://evilmartians.com/?utm_source=postcss +[Autoprefixer]: https://github.com/postcss/autoprefixer +[Stylelint]: https://stylelint.io/ +[plugins]: https://github.com/postcss/postcss#plugins + + +## Docs +Read full docs **[here](https://postcss.org/)**. diff --git a/node_modules/postcss/lib/at-rule.d.ts b/node_modules/postcss/lib/at-rule.d.ts new file mode 100644 index 0000000..89fb505 --- /dev/null +++ b/node_modules/postcss/lib/at-rule.d.ts @@ -0,0 +1,140 @@ +import Container, { + ContainerProps, + ContainerWithChildren +} from './container.js' + +declare namespace AtRule { + export interface AtRuleRaws extends Record { + /** + * The space symbols after the last child of the node to the end of the node. + */ + after?: string + + /** + * The space between the at-rule name and its parameters. + */ + afterName?: string + + /** + * The space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + */ + before?: string + + /** + * The symbols between the last parameter and `{` for rules. + */ + between?: string + + /** + * The ruleโ€™s selector with comments. + */ + params?: { + raw: string + value: string + } + + /** + * Contains `true` if the last child has an (optional) semicolon. + */ + semicolon?: boolean + } + + export interface AtRuleProps extends ContainerProps { + /** Name of the at-rule. */ + name: string + /** Parameters following the name of the at-rule. */ + params?: number | string + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: AtRuleRaws + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { AtRule_ as default } +} + +/** + * Represents an at-rule. + * + * ```js + * Once (root, { AtRule }) { + * let media = new AtRule({ name: 'media', params: 'print' }) + * media.append(โ€ฆ) + * root.append(media) + * } + * ``` + * + * If itโ€™s followed in the CSS by a `{}` block, this node will have + * a nodes property representing its children. + * + * ```js + * const root = postcss.parse('@charset "UTF-8"; @media print {}') + * + * const charset = root.first + * charset.type //=> 'atrule' + * charset.nodes //=> undefined + * + * const media = root.last + * media.nodes //=> [] + * ``` + */ +declare class AtRule_ extends Container { + /** + * An array containing the layerโ€™s children. + * + * ```js + * const root = postcss.parse('@layer example { a { color: black } }') + * const layer = root.first + * layer.nodes.length //=> 1 + * layer.nodes[0].selector //=> 'a' + * ``` + * + * Can be `undefinded` if the at-rule has no body. + * + * ```js + * const root = postcss.parse('@layer a, b, c;') + * const layer = root.first + * layer.nodes //=> undefined + * ``` + */ + nodes: Container['nodes'] | undefined + parent: ContainerWithChildren | undefined + + raws: AtRule.AtRuleRaws + type: 'atrule' + /** + * The at-ruleโ€™s name immediately follows the `@`. + * + * ```js + * const root = postcss.parse('@media print {}') + * const media = root.first + * media.name //=> 'media' + * ``` + */ + get name(): string + set name(value: string) + + /** + * The at-ruleโ€™s parameters, the values that follow the at-ruleโ€™s name + * but precede any `{}` block. + * + * ```js + * const root = postcss.parse('@media print, screen {}') + * const media = root.first + * media.params //=> 'print, screen' + * ``` + */ + get params(): string + + set params(value: string) + + constructor(defaults?: AtRule.AtRuleProps) + assign(overrides: AtRule.AtRuleProps | object): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this +} + +declare class AtRule extends AtRule_ {} + +export = AtRule diff --git a/node_modules/postcss/lib/at-rule.js b/node_modules/postcss/lib/at-rule.js new file mode 100644 index 0000000..9486447 --- /dev/null +++ b/node_modules/postcss/lib/at-rule.js @@ -0,0 +1,25 @@ +'use strict' + +let Container = require('./container') + +class AtRule extends Container { + constructor(defaults) { + super(defaults) + this.type = 'atrule' + } + + append(...children) { + if (!this.proxyOf.nodes) this.nodes = [] + return super.append(...children) + } + + prepend(...children) { + if (!this.proxyOf.nodes) this.nodes = [] + return super.prepend(...children) + } +} + +module.exports = AtRule +AtRule.default = AtRule + +Container.registerAtRule(AtRule) diff --git a/node_modules/postcss/lib/comment.d.ts b/node_modules/postcss/lib/comment.d.ts new file mode 100644 index 0000000..6f1f66f --- /dev/null +++ b/node_modules/postcss/lib/comment.d.ts @@ -0,0 +1,68 @@ +import Container from './container.js' +import Node, { NodeProps } from './node.js' + +declare namespace Comment { + export interface CommentRaws extends Record { + /** + * The space symbols before the node. + */ + before?: string + + /** + * The space symbols between `/*` and the commentโ€™s text. + */ + left?: string + + /** + * The space symbols between the commentโ€™s text. + */ + right?: string + } + + export interface CommentProps extends NodeProps { + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: CommentRaws + /** Content of the comment. */ + text: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Comment_ as default } +} + +/** + * It represents a class that handles + * [CSS comments](https://developer.mozilla.org/en-US/docs/Web/CSS/Comments) + * + * ```js + * Once (root, { Comment }) { + * const note = new Comment({ text: 'Note: โ€ฆ' }) + * root.append(note) + * } + * ``` + * + * Remember that CSS comments inside selectors, at-rule parameters, + * or declaration values will be stored in the `raws` properties + * explained above. + */ +declare class Comment_ extends Node { + parent: Container | undefined + raws: Comment.CommentRaws + type: 'comment' + /** + * The comment's text. + */ + get text(): string + + set text(value: string) + + constructor(defaults?: Comment.CommentProps) + assign(overrides: Comment.CommentProps | object): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this +} + +declare class Comment extends Comment_ {} + +export = Comment diff --git a/node_modules/postcss/lib/comment.js b/node_modules/postcss/lib/comment.js new file mode 100644 index 0000000..c566506 --- /dev/null +++ b/node_modules/postcss/lib/comment.js @@ -0,0 +1,13 @@ +'use strict' + +let Node = require('./node') + +class Comment extends Node { + constructor(defaults) { + super(defaults) + this.type = 'comment' + } +} + +module.exports = Comment +Comment.default = Comment diff --git a/node_modules/postcss/lib/container.d.ts b/node_modules/postcss/lib/container.d.ts new file mode 100644 index 0000000..c2b310b --- /dev/null +++ b/node_modules/postcss/lib/container.d.ts @@ -0,0 +1,483 @@ +import AtRule from './at-rule.js' +import Comment from './comment.js' +import Declaration from './declaration.js' +import Node, { ChildNode, ChildProps, NodeProps } from './node.js' +import { Root } from './postcss.js' +import Rule from './rule.js' + +declare namespace Container { + export type ContainerWithChildren = { + nodes: Child[] + } & ( + | AtRule + | Root + | Rule + ) + + export interface ValueOptions { + /** + * String thatโ€™s used to narrow down values and speed up the regexp search. + */ + fast?: string + + /** + * An array of property names. + */ + props?: readonly string[] + } + + export interface ContainerProps extends NodeProps { + nodes?: readonly (ChildProps | Node)[] + } + + /** + * All types that can be passed into container methods to create or add a new + * child node. + */ + export type NewChild = + | ChildProps + | Node + | readonly ChildProps[] + | readonly Node[] + | readonly string[] + | string + | undefined + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Container_ as default } +} + +/** + * The `Root`, `AtRule`, and `Rule` container nodes + * inherit some common methods to help work with their children. + * + * Note that all containers can store any content. If you write a rule inside + * a rule, PostCSS will parse it. + */ +declare abstract class Container_ extends Node { + /** + * An array containing the containerโ€™s children. + * + * ```js + * const root = postcss.parse('a { color: black }') + * root.nodes.length //=> 1 + * root.nodes[0].selector //=> 'a' + * root.nodes[0].nodes[0].prop //=> 'color' + * ``` + */ + nodes: Child[] | undefined + + /** + * The containerโ€™s first child. + * + * ```js + * rule.first === rules.nodes[0] + * ``` + */ + get first(): Child | undefined + + /** + * The containerโ€™s last child. + * + * ```js + * rule.last === rule.nodes[rule.nodes.length - 1] + * ``` + */ + get last(): Child | undefined + /** + * Inserts new nodes to the end of the container. + * + * ```js + * const decl1 = new Declaration({ prop: 'color', value: 'black' }) + * const decl2 = new Declaration({ prop: 'background-color', value: 'white' }) + * rule.append(decl1, decl2) + * + * root.append({ name: 'charset', params: '"UTF-8"' }) // at-rule + * root.append({ selector: 'a' }) // rule + * rule.append({ prop: 'color', value: 'black' }) // declaration + * rule.append({ text: 'Comment' }) // comment + * + * root.append('a {}') + * root.first.append('color: black; z-index: 1') + * ``` + * + * @param nodes New nodes. + * @return This node for methods chain. + */ + append(...nodes: Container.NewChild[]): this + assign(overrides: Container.ContainerProps | object): this + clone(overrides?: Partial): this + + cloneAfter(overrides?: Partial): this + + cloneBefore(overrides?: Partial): this + /** + * Iterates through the containerโ€™s immediate children, + * calling `callback` for each child. + * + * Returning `false` in the callback will break iteration. + * + * This method only iterates through the containerโ€™s immediate children. + * If you need to recursively iterate through all the containerโ€™s descendant + * nodes, use `Container#walk`. + * + * Unlike the for `{}`-cycle or `Array#forEach` this iterator is safe + * if you are mutating the array of child nodes during iteration. + * PostCSS will adjust the current index to match the mutations. + * + * ```js + * const root = postcss.parse('a { color: black; z-index: 1 }') + * const rule = root.first + * + * for (const decl of rule.nodes) { + * decl.cloneBefore({ prop: '-webkit-' + decl.prop }) + * // Cycle will be infinite, because cloneBefore moves the current node + * // to the next index + * } + * + * rule.each(decl => { + * decl.cloneBefore({ prop: '-webkit-' + decl.prop }) + * // Will be executed only for color and z-index + * }) + * ``` + * + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + each( + callback: (node: Child, index: number) => false | void + ): false | undefined + + /** + * Returns `true` if callback returns `true` + * for all of the containerโ€™s children. + * + * ```js + * const noPrefixes = rule.every(i => i.prop[0] !== '-') + * ``` + * + * @param condition Iterator returns true or false. + * @return Is every child pass condition. + */ + every( + condition: (node: Child, index: number, nodes: Child[]) => boolean + ): boolean + /** + * Returns a `child`โ€™s index within the `Container#nodes` array. + * + * ```js + * rule.index( rule.nodes[2] ) //=> 2 + * ``` + * + * @param child Child of the current container. + * @return Child index. + */ + index(child: Child | number): number + + /** + * Insert new node after old node within the container. + * + * @param oldNode Child or childโ€™s index. + * @param newNode New node. + * @return This node for methods chain. + */ + insertAfter(oldNode: Child | number, newNode: Container.NewChild): this + + /** + * Traverses the containerโ€™s descendant nodes, calling callback + * for each comment node. + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * ```js + * root.walkComments(comment => { + * comment.remove() + * }) + * ``` + * + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + + /** + * Insert new node before old node within the container. + * + * ```js + * rule.insertBefore(decl, decl.clone({ prop: '-webkit-' + decl.prop })) + * ``` + * + * @param oldNode Child or childโ€™s index. + * @param newNode New node. + * @return This node for methods chain. + */ + insertBefore(oldNode: Child | number, newNode: Container.NewChild): this + /** + * Inserts new nodes to the start of the container. + * + * ```js + * const decl1 = new Declaration({ prop: 'color', value: 'black' }) + * const decl2 = new Declaration({ prop: 'background-color', value: 'white' }) + * rule.prepend(decl1, decl2) + * + * root.append({ name: 'charset', params: '"UTF-8"' }) // at-rule + * root.append({ selector: 'a' }) // rule + * rule.append({ prop: 'color', value: 'black' }) // declaration + * rule.append({ text: 'Comment' }) // comment + * + * root.append('a {}') + * root.first.append('color: black; z-index: 1') + * ``` + * + * @param nodes New nodes. + * @return This node for methods chain. + */ + prepend(...nodes: Container.NewChild[]): this + + /** + * Add child to the end of the node. + * + * ```js + * rule.push(new Declaration({ prop: 'color', value: 'black' })) + * ``` + * + * @param child New node. + * @return This node for methods chain. + */ + push(child: Child): this + + /** + * Removes all children from the container + * and cleans their parent properties. + * + * ```js + * rule.removeAll() + * rule.nodes.length //=> 0 + * ``` + * + * @return This node for methods chain. + */ + removeAll(): this + + /** + * Removes node from the container and cleans the parent properties + * from the node and its children. + * + * ```js + * rule.nodes.length //=> 5 + * rule.removeChild(decl) + * rule.nodes.length //=> 4 + * decl.parent //=> undefined + * ``` + * + * @param child Child or childโ€™s index. + * @return This node for methods chain. + */ + removeChild(child: Child | number): this + + replaceValues( + pattern: RegExp | string, + replaced: { (substring: string, ...args: any[]): string } | string + ): this + /** + * Passes all declaration values within the container that match pattern + * through callback, replacing those values with the returned result + * of callback. + * + * This method is useful if you are using a custom unit or function + * and need to iterate through all values. + * + * ```js + * root.replaceValues(/\d+rem/, { fast: 'rem' }, string => { + * return 15 * parseInt(string) + 'px' + * }) + * ``` + * + * @param pattern Replace pattern. + * @param {object} options Options to speed up the search. + * @param replaced String to replace pattern or callback + * that returns a new value. The callback + * will receive the same arguments + * as those passed to a function parameter + * of `String#replace`. + * @return This node for methods chain. + */ + replaceValues( + pattern: RegExp | string, + options: Container.ValueOptions, + replaced: { (substring: string, ...args: any[]): string } | string + ): this + + /** + * Returns `true` if callback returns `true` for (at least) one + * of the containerโ€™s children. + * + * ```js + * const hasPrefix = rule.some(i => i.prop[0] === '-') + * ``` + * + * @param condition Iterator returns true or false. + * @return Is some child pass condition. + */ + some( + condition: (node: Child, index: number, nodes: Child[]) => boolean + ): boolean + + /** + * Traverses the containerโ€™s descendant nodes, calling callback + * for each node. + * + * Like container.each(), this method is safe to use + * if you are mutating arrays during iteration. + * + * If you only need to iterate through the containerโ€™s immediate children, + * use `Container#each`. + * + * ```js + * root.walk(node => { + * // Traverses all descendant nodes. + * }) + * ``` + * + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walk( + callback: (node: ChildNode, index: number) => false | void + ): false | undefined + + /** + * Traverses the containerโ€™s descendant nodes, calling callback + * for each at-rule node. + * + * If you pass a filter, iteration will only happen over at-rules + * that have matching names. + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * ```js + * root.walkAtRules(rule => { + * if (isOld(rule.name)) rule.remove() + * }) + * + * let first = false + * root.walkAtRules('charset', rule => { + * if (!first) { + * first = true + * } else { + * rule.remove() + * } + * }) + * ``` + * + * @param name String or regular expression to filter at-rules by name. + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walkAtRules( + nameFilter: RegExp | string, + callback: (atRule: AtRule, index: number) => false | void + ): false | undefined + walkAtRules( + callback: (atRule: AtRule, index: number) => false | void + ): false | undefined + + walkComments( + callback: (comment: Comment, indexed: number) => false | void + ): false | undefined + walkComments( + callback: (comment: Comment, indexed: number) => false | void + ): false | undefined + + /** + * Traverses the containerโ€™s descendant nodes, calling callback + * for each declaration node. + * + * If you pass a filter, iteration will only happen over declarations + * with matching properties. + * + * ```js + * root.walkDecls(decl => { + * checkPropertySupport(decl.prop) + * }) + * + * root.walkDecls('border-radius', decl => { + * decl.remove() + * }) + * + * root.walkDecls(/^background/, decl => { + * decl.value = takeFirstColorFromGradient(decl.value) + * }) + * ``` + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * @param prop String or regular expression to filter declarations + * by property name. + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walkDecls( + propFilter: RegExp | string, + callback: (decl: Declaration, index: number) => false | void + ): false | undefined + walkDecls( + callback: (decl: Declaration, index: number) => false | void + ): false | undefined + /** + * Traverses the containerโ€™s descendant nodes, calling callback + * for each rule node. + * + * If you pass a filter, iteration will only happen over rules + * with matching selectors. + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * ```js + * const selectors = [] + * root.walkRules(rule => { + * selectors.push(rule.selector) + * }) + * console.log(`Your CSS uses ${ selectors.length } selectors`) + * ``` + * + * @param selector String or regular expression to filter rules by selector. + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walkRules( + selectorFilter: RegExp | string, + callback: (rule: Rule, index: number) => false | void + ): false | undefined + walkRules( + callback: (rule: Rule, index: number) => false | void + ): false | undefined + /** + * An internal method that converts a {@link NewChild} into a list of actual + * child nodes that can then be added to this container. + * + * This ensures that the nodes' parent is set to this container, that they use + * the correct prototype chain, and that they're marked as dirty. + * + * @param mnodes The new node or nodes to add. + * @param sample A node from whose raws the new node's `before` raw should be + * taken. + * @param type This should be set to `'prepend'` if the new nodes will be + * inserted at the beginning of the container. + * @hidden + */ + protected normalize( + nodes: Container.NewChild, + sample: Node | undefined, + type?: 'prepend' | false + ): Child[] +} + +declare class Container< + Child extends Node = ChildNode +> extends Container_ {} + +export = Container diff --git a/node_modules/postcss/lib/container.js b/node_modules/postcss/lib/container.js new file mode 100644 index 0000000..edb07cc --- /dev/null +++ b/node_modules/postcss/lib/container.js @@ -0,0 +1,447 @@ +'use strict' + +let Comment = require('./comment') +let Declaration = require('./declaration') +let Node = require('./node') +let { isClean, my } = require('./symbols') + +let AtRule, parse, Root, Rule + +function cleanSource(nodes) { + return nodes.map(i => { + if (i.nodes) i.nodes = cleanSource(i.nodes) + delete i.source + return i + }) +} + +function markTreeDirty(node) { + node[isClean] = false + if (node.proxyOf.nodes) { + for (let i of node.proxyOf.nodes) { + markTreeDirty(i) + } + } +} + +class Container extends Node { + get first() { + if (!this.proxyOf.nodes) return undefined + return this.proxyOf.nodes[0] + } + + get last() { + if (!this.proxyOf.nodes) return undefined + return this.proxyOf.nodes[this.proxyOf.nodes.length - 1] + } + + append(...children) { + for (let child of children) { + let nodes = this.normalize(child, this.last) + for (let node of nodes) this.proxyOf.nodes.push(node) + } + + this.markDirty() + + return this + } + + cleanRaws(keepBetween) { + super.cleanRaws(keepBetween) + if (this.nodes) { + for (let node of this.nodes) node.cleanRaws(keepBetween) + } + } + + each(callback) { + if (!this.proxyOf.nodes) return undefined + let iterator = this.getIterator() + + let index, result + while (this.indexes[iterator] < this.proxyOf.nodes.length) { + index = this.indexes[iterator] + result = callback(this.proxyOf.nodes[index], index) + if (result === false) break + + this.indexes[iterator] += 1 + } + + delete this.indexes[iterator] + return result + } + + every(condition) { + return this.nodes.every(condition) + } + + getIterator() { + if (!this.lastEach) this.lastEach = 0 + if (!this.indexes) this.indexes = {} + + this.lastEach += 1 + let iterator = this.lastEach + this.indexes[iterator] = 0 + + return iterator + } + + getProxyProcessor() { + return { + get(node, prop) { + if (prop === 'proxyOf') { + return node + } else if (!node[prop]) { + return node[prop] + } else if ( + prop === 'each' || + (typeof prop === 'string' && prop.startsWith('walk')) + ) { + return (...args) => { + return node[prop]( + ...args.map(i => { + if (typeof i === 'function') { + return (child, index) => i(child.toProxy(), index) + } else { + return i + } + }) + ) + } + } else if (prop === 'every' || prop === 'some') { + return cb => { + return node[prop]((child, ...other) => + cb(child.toProxy(), ...other) + ) + } + } else if (prop === 'root') { + return () => node.root().toProxy() + } else if (prop === 'nodes') { + return node.nodes.map(i => i.toProxy()) + } else if (prop === 'first' || prop === 'last') { + return node[prop].toProxy() + } else { + return node[prop] + } + }, + + set(node, prop, value) { + if (node[prop] === value) return true + node[prop] = value + if (prop === 'name' || prop === 'params' || prop === 'selector') { + node.markDirty() + } + return true + } + } + } + + index(child) { + if (typeof child === 'number') return child + if (child.proxyOf) child = child.proxyOf + return this.proxyOf.nodes.indexOf(child) + } + + insertAfter(exist, add) { + let existIndex = this.index(exist) + let nodes = this.normalize(add, this.proxyOf.nodes[existIndex]).reverse() + existIndex = this.index(exist) + for (let node of nodes) this.proxyOf.nodes.splice(existIndex + 1, 0, node) + + let index + for (let id in this.indexes) { + index = this.indexes[id] + if (existIndex < index) { + this.indexes[id] = index + nodes.length + } + } + + this.markDirty() + + return this + } + + insertBefore(exist, add) { + let existIndex = this.index(exist) + let type = existIndex === 0 ? 'prepend' : false + let nodes = this.normalize( + add, + this.proxyOf.nodes[existIndex], + type + ).reverse() + existIndex = this.index(exist) + for (let node of nodes) this.proxyOf.nodes.splice(existIndex, 0, node) + + let index + for (let id in this.indexes) { + index = this.indexes[id] + if (existIndex <= index) { + this.indexes[id] = index + nodes.length + } + } + + this.markDirty() + + return this + } + + normalize(nodes, sample) { + if (typeof nodes === 'string') { + nodes = cleanSource(parse(nodes).nodes) + } else if (typeof nodes === 'undefined') { + nodes = [] + } else if (Array.isArray(nodes)) { + nodes = nodes.slice(0) + for (let i of nodes) { + if (i.parent) i.parent.removeChild(i, 'ignore') + } + } else if (nodes.type === 'root' && this.type !== 'document') { + nodes = nodes.nodes.slice(0) + for (let i of nodes) { + if (i.parent) i.parent.removeChild(i, 'ignore') + } + } else if (nodes.type) { + nodes = [nodes] + } else if (nodes.prop) { + if (typeof nodes.value === 'undefined') { + throw new Error('Value field is missed in node creation') + } else if (typeof nodes.value !== 'string') { + nodes.value = String(nodes.value) + } + nodes = [new Declaration(nodes)] + } else if (nodes.selector || nodes.selectors) { + nodes = [new Rule(nodes)] + } else if (nodes.name) { + nodes = [new AtRule(nodes)] + } else if (nodes.text) { + nodes = [new Comment(nodes)] + } else { + throw new Error('Unknown node type in node creation') + } + + let processed = nodes.map(i => { + /* c8 ignore next */ + if (!i[my]) Container.rebuild(i) + i = i.proxyOf + if (i.parent) i.parent.removeChild(i) + if (i[isClean]) markTreeDirty(i) + + if (!i.raws) i.raws = {} + if (typeof i.raws.before === 'undefined') { + if (sample && typeof sample.raws.before !== 'undefined') { + i.raws.before = sample.raws.before.replace(/\S/g, '') + } + } + i.parent = this.proxyOf + return i + }) + + return processed + } + + prepend(...children) { + children = children.reverse() + for (let child of children) { + let nodes = this.normalize(child, this.first, 'prepend').reverse() + for (let node of nodes) this.proxyOf.nodes.unshift(node) + for (let id in this.indexes) { + this.indexes[id] = this.indexes[id] + nodes.length + } + } + + this.markDirty() + + return this + } + + push(child) { + child.parent = this + this.proxyOf.nodes.push(child) + return this + } + + removeAll() { + for (let node of this.proxyOf.nodes) node.parent = undefined + this.proxyOf.nodes = [] + + this.markDirty() + + return this + } + + removeChild(child) { + child = this.index(child) + this.proxyOf.nodes[child].parent = undefined + this.proxyOf.nodes.splice(child, 1) + + let index + for (let id in this.indexes) { + index = this.indexes[id] + if (index >= child) { + this.indexes[id] = index - 1 + } + } + + this.markDirty() + + return this + } + + replaceValues(pattern, opts, callback) { + if (!callback) { + callback = opts + opts = {} + } + + this.walkDecls(decl => { + if (opts.props && !opts.props.includes(decl.prop)) return + if (opts.fast && !decl.value.includes(opts.fast)) return + + decl.value = decl.value.replace(pattern, callback) + }) + + this.markDirty() + + return this + } + + some(condition) { + return this.nodes.some(condition) + } + + walk(callback) { + return this.each((child, i) => { + let result + try { + result = callback(child, i) + } catch (e) { + throw child.addToError(e) + } + if (result !== false && child.walk) { + result = child.walk(callback) + } + + return result + }) + } + + walkAtRules(name, callback) { + if (!callback) { + callback = name + return this.walk((child, i) => { + if (child.type === 'atrule') { + return callback(child, i) + } + }) + } + if (name instanceof RegExp) { + return this.walk((child, i) => { + if (child.type === 'atrule' && name.test(child.name)) { + return callback(child, i) + } + }) + } + return this.walk((child, i) => { + if (child.type === 'atrule' && child.name === name) { + return callback(child, i) + } + }) + } + + walkComments(callback) { + return this.walk((child, i) => { + if (child.type === 'comment') { + return callback(child, i) + } + }) + } + + walkDecls(prop, callback) { + if (!callback) { + callback = prop + return this.walk((child, i) => { + if (child.type === 'decl') { + return callback(child, i) + } + }) + } + if (prop instanceof RegExp) { + return this.walk((child, i) => { + if (child.type === 'decl' && prop.test(child.prop)) { + return callback(child, i) + } + }) + } + return this.walk((child, i) => { + if (child.type === 'decl' && child.prop === prop) { + return callback(child, i) + } + }) + } + + walkRules(selector, callback) { + if (!callback) { + callback = selector + + return this.walk((child, i) => { + if (child.type === 'rule') { + return callback(child, i) + } + }) + } + if (selector instanceof RegExp) { + return this.walk((child, i) => { + if (child.type === 'rule' && selector.test(child.selector)) { + return callback(child, i) + } + }) + } + return this.walk((child, i) => { + if (child.type === 'rule' && child.selector === selector) { + return callback(child, i) + } + }) + } +} + +Container.registerParse = dependant => { + parse = dependant +} + +Container.registerRule = dependant => { + Rule = dependant +} + +Container.registerAtRule = dependant => { + AtRule = dependant +} + +Container.registerRoot = dependant => { + Root = dependant +} + +module.exports = Container +Container.default = Container + +/* c8 ignore start */ +Container.rebuild = node => { + if (node.type === 'atrule') { + Object.setPrototypeOf(node, AtRule.prototype) + } else if (node.type === 'rule') { + Object.setPrototypeOf(node, Rule.prototype) + } else if (node.type === 'decl') { + Object.setPrototypeOf(node, Declaration.prototype) + } else if (node.type === 'comment') { + Object.setPrototypeOf(node, Comment.prototype) + } else if (node.type === 'root') { + Object.setPrototypeOf(node, Root.prototype) + } + + node[my] = true + + if (node.nodes) { + node.nodes.forEach(child => { + Container.rebuild(child) + }) + } +} +/* c8 ignore stop */ diff --git a/node_modules/postcss/lib/css-syntax-error.d.ts b/node_modules/postcss/lib/css-syntax-error.d.ts new file mode 100644 index 0000000..e540d84 --- /dev/null +++ b/node_modules/postcss/lib/css-syntax-error.d.ts @@ -0,0 +1,248 @@ +import { FilePosition } from './input.js' + +declare namespace CssSyntaxError { + /** + * A position that is part of a range. + */ + export interface RangePosition { + /** + * The column number in the input. + */ + column: number + + /** + * The line number in the input. + */ + line: number + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { CssSyntaxError_ as default } +} + +/** + * The CSS parser throws this error for broken CSS. + * + * Custom parsers can throw this error for broken custom syntax using + * the `Node#error` method. + * + * PostCSS will use the input source map to detect the original error location. + * If you wrote a Sass file, compiled it to CSS and then parsed it with PostCSS, + * PostCSS will show the original position in the Sass file. + * + * If you need the position in the PostCSS input + * (e.g., to debug the previous compiler), use `error.input.file`. + * + * ```js + * // Raising error from plugin + * throw node.error('Unknown variable', { plugin: 'postcss-vars' }) + * ``` + * + * ```js + * // Catching and checking syntax error + * try { + * postcss.parse('a{') + * } catch (error) { + * if (error.name === 'CssSyntaxError') { + * error //=> CssSyntaxError + * } + * } + * ``` + */ +declare class CssSyntaxError_ extends Error { + /** + * Source column of the error. + * + * ```js + * error.column //=> 1 + * error.input.column //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.column`. + */ + column?: number + + /** + * Source column of the error's end, exclusive. Provided if the error pertains + * to a range. + * + * ```js + * error.endColumn //=> 1 + * error.input.endColumn //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.endColumn`. + */ + endColumn?: number + + /** + * Source line of the error's end, exclusive. Provided if the error pertains + * to a range. + * + * ```js + * error.endLine //=> 3 + * error.input.endLine //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.endLine`. + */ + endLine?: number + + /** + * Absolute path to the broken file. + * + * ```js + * error.file //=> 'a.sass' + * error.input.file //=> 'a.css' + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.file`. + */ + file?: string + + /** + * Input object with PostCSS internal information + * about input file. If input has source map + * from previous tool, PostCSS will use origin + * (for example, Sass) source. You can use this + * object to get PostCSS input source. + * + * ```js + * error.input.file //=> 'a.css' + * error.file //=> 'a.sass' + * ``` + */ + input?: FilePosition + + /** + * Source line of the error. + * + * ```js + * error.line //=> 2 + * error.input.line //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.line`. + */ + line?: number + + /** + * Full error text in the GNU error format + * with plugin, file, line and column. + * + * ```js + * error.message //=> 'a.css:1:1: Unclosed block' + * ``` + */ + message: string + + /** + * Always equal to `'CssSyntaxError'`. You should always check error type + * by `error.name === 'CssSyntaxError'` + * instead of `error instanceof CssSyntaxError`, + * because npm could have several PostCSS versions. + * + * ```js + * if (error.name === 'CssSyntaxError') { + * error //=> CssSyntaxError + * } + * ``` + */ + name: 'CssSyntaxError' + + /** + * Plugin name, if error came from plugin. + * + * ```js + * error.plugin //=> 'postcss-vars' + * ``` + */ + plugin?: string + + /** + * Error message. + * + * ```js + * error.message //=> 'Unclosed block' + * ``` + */ + reason: string + + /** + * Source code of the broken file. + * + * ```js + * error.source //=> 'a { b {} }' + * error.input.source //=> 'a b { }' + * ``` + */ + source?: string + + stack: string + + /** + * Instantiates a CSS syntax error. Can be instantiated for a single position + * or for a range. + * @param message Error message. + * @param lineOrStartPos If for a single position, the line number, or if for + * a range, the inclusive start position of the error. + * @param columnOrEndPos If for a single position, the column number, or if for + * a range, the exclusive end position of the error. + * @param source Source code of the broken file. + * @param file Absolute path to the broken file. + * @param plugin PostCSS plugin name, if error came from plugin. + */ + constructor( + message: string, + lineOrStartPos?: CssSyntaxError.RangePosition | number, + columnOrEndPos?: CssSyntaxError.RangePosition | number, + source?: string, + file?: string, + plugin?: string + ) + + /** + * Returns a few lines of CSS source that caused the error. + * + * If the CSS has an input source map without `sourceContent`, + * this method will return an empty string. + * + * ```js + * error.showSourceCode() //=> " 4 | } + * // 5 | a { + * // > 6 | bad + * // | ^ + * // 7 | } + * // 8 | b {" + * ``` + * + * @param color Whether arrow will be colored red by terminal + * color codes. By default, PostCSS will detect + * color support by `process.stdout.isTTY` + * and `process.env.NODE_DISABLE_COLORS`. + * @return Few lines of CSS source that caused the error. + */ + showSourceCode(color?: boolean): string + + /** + * Returns error position, message and source code of the broken part. + * + * ```js + * error.toString() //=> "CssSyntaxError: app.css:1:1: Unclosed block + * // > 1 | a { + * // | ^" + * ``` + * + * @return Error position, message and source code. + */ + toString(): string +} + +declare class CssSyntaxError extends CssSyntaxError_ {} + +export = CssSyntaxError diff --git a/node_modules/postcss/lib/css-syntax-error.js b/node_modules/postcss/lib/css-syntax-error.js new file mode 100644 index 0000000..275a4f6 --- /dev/null +++ b/node_modules/postcss/lib/css-syntax-error.js @@ -0,0 +1,133 @@ +'use strict' + +let pico = require('picocolors') + +let terminalHighlight = require('./terminal-highlight') + +class CssSyntaxError extends Error { + constructor(message, line, column, source, file, plugin) { + super(message) + this.name = 'CssSyntaxError' + this.reason = message + + if (file) { + this.file = file + } + if (source) { + this.source = source + } + if (plugin) { + this.plugin = plugin + } + if (typeof line !== 'undefined' && typeof column !== 'undefined') { + if (typeof line === 'number') { + this.line = line + this.column = column + } else { + this.line = line.line + this.column = line.column + this.endLine = column.line + this.endColumn = column.column + } + } + + this.setMessage() + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, CssSyntaxError) + } + } + + setMessage() { + this.message = this.plugin ? this.plugin + ': ' : '' + this.message += this.file ? this.file : '' + if (typeof this.line !== 'undefined') { + this.message += ':' + this.line + ':' + this.column + } + this.message += ': ' + this.reason + } + + showSourceCode(color) { + if (!this.source) return '' + + let css = this.source + if (color == null) color = pico.isColorSupported + + let aside = text => text + let mark = text => text + let highlight = text => text + if (color) { + let { bold, gray, red } = pico.createColors(true) + mark = text => bold(red(text)) + aside = text => gray(text) + if (terminalHighlight) { + highlight = text => terminalHighlight(text) + } + } + + let lines = css.split(/\r?\n/) + let start = Math.max(this.line - 3, 0) + let end = Math.min(this.line + 2, lines.length) + let maxWidth = String(end).length + + return lines + .slice(start, end) + .map((line, index) => { + let number = start + 1 + index + let gutter = ' ' + (' ' + number).slice(-maxWidth) + ' | ' + if (number === this.line) { + if (line.length > 160) { + let padding = 20 + let subLineStart = Math.max(0, this.column - padding) + let subLineEnd = Math.max( + this.column + padding, + this.endColumn + padding + ) + let subLine = line.slice(subLineStart, subLineEnd) + + let spacing = + aside(gutter.replace(/\d/g, ' ')) + + line + .slice(0, Math.min(this.column - 1, padding - 1)) + .replace(/[^\t]/g, ' ') + + return ( + mark('>') + + aside(gutter) + + highlight(subLine) + + '\n ' + + spacing + + mark('^') + ) + } + + let spacing = + aside(gutter.replace(/\d/g, ' ')) + + line.slice(0, this.column - 1).replace(/[^\t]/g, ' ') + + return ( + mark('>') + + aside(gutter) + + highlight(line) + + '\n ' + + spacing + + mark('^') + ) + } + + return ' ' + aside(gutter) + highlight(line) + }) + .join('\n') + } + + toString() { + let code = this.showSourceCode() + if (code) { + code = '\n\n' + code + '\n' + } + return this.name + ': ' + this.message + code + } +} + +module.exports = CssSyntaxError +CssSyntaxError.default = CssSyntaxError diff --git a/node_modules/postcss/lib/declaration.d.ts b/node_modules/postcss/lib/declaration.d.ts new file mode 100644 index 0000000..d489b42 --- /dev/null +++ b/node_modules/postcss/lib/declaration.d.ts @@ -0,0 +1,151 @@ +import { ContainerWithChildren } from './container.js' +import Node from './node.js' + +declare namespace Declaration { + export interface DeclarationRaws extends Record { + /** + * The space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + */ + before?: string + + /** + * The symbols between the property and value for declarations. + */ + between?: string + + /** + * The content of the important statement, if it is not just `!important`. + */ + important?: string + + /** + * Declaration value with comments. + */ + value?: { + raw: string + value: string + } + } + + export interface DeclarationProps { + /** Whether the declaration has an `!important` annotation. */ + important?: boolean + /** Name of the declaration. */ + prop: string + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: DeclarationRaws + /** Value of the declaration. */ + value: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Declaration_ as default } +} + +/** + * It represents a class that handles + * [CSS declarations](https://developer.mozilla.org/en-US/docs/Web/CSS/Syntax#css_declarations) + * + * ```js + * Once (root, { Declaration }) { + * const color = new Declaration({ prop: 'color', value: 'black' }) + * root.append(color) + * } + * ``` + * + * ```js + * const root = postcss.parse('a { color: black }') + * const decl = root.first?.first + * + * decl.type //=> 'decl' + * decl.toString() //=> ' color: black' + * ``` + */ +declare class Declaration_ extends Node { + parent: ContainerWithChildren | undefined + raws: Declaration.DeclarationRaws + + type: 'decl' + + /** + * It represents a specificity of the declaration. + * + * If true, the CSS declaration will have an + * [important](https://developer.mozilla.org/en-US/docs/Web/CSS/important) + * specifier. + * + * ```js + * const root = postcss.parse('a { color: black !important; color: red }') + * + * root.first.first.important //=> true + * root.first.last.important //=> undefined + * ``` + */ + get important(): boolean + set important(value: boolean) + + /** + * The property name for a CSS declaration. + * + * ```js + * const root = postcss.parse('a { color: black }') + * const decl = root.first.first + * + * decl.prop //=> 'color' + * ``` + */ + get prop(): string + + set prop(value: string) + + /** + * The property value for a CSS declaration. + * + * Any CSS comments inside the value string will be filtered out. + * CSS comments present in the source value will be available in + * the `raws` property. + * + * Assigning new `value` would ignore the comments in `raws` + * property while compiling node to string. + * + * ```js + * const root = postcss.parse('a { color: black }') + * const decl = root.first.first + * + * decl.value //=> 'black' + * ``` + */ + get value(): string + set value(value: string) + + /** + * It represents a getter that returns `true` if a declaration starts with + * `--` or `$`, which are used to declare variables in CSS and SASS/SCSS. + * + * ```js + * const root = postcss.parse(':root { --one: 1 }') + * const one = root.first.first + * + * one.variable //=> true + * ``` + * + * ```js + * const root = postcss.parse('$one: 1') + * const one = root.first + * + * one.variable //=> true + * ``` + */ + get variable(): boolean + constructor(defaults?: Declaration.DeclarationProps) + + assign(overrides: Declaration.DeclarationProps | object): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this +} + +declare class Declaration extends Declaration_ {} + +export = Declaration diff --git a/node_modules/postcss/lib/declaration.js b/node_modules/postcss/lib/declaration.js new file mode 100644 index 0000000..65a03aa --- /dev/null +++ b/node_modules/postcss/lib/declaration.js @@ -0,0 +1,24 @@ +'use strict' + +let Node = require('./node') + +class Declaration extends Node { + get variable() { + return this.prop.startsWith('--') || this.prop[0] === '$' + } + + constructor(defaults) { + if ( + defaults && + typeof defaults.value !== 'undefined' && + typeof defaults.value !== 'string' + ) { + defaults = { ...defaults, value: String(defaults.value) } + } + super(defaults) + this.type = 'decl' + } +} + +module.exports = Declaration +Declaration.default = Declaration diff --git a/node_modules/postcss/lib/document.d.ts b/node_modules/postcss/lib/document.d.ts new file mode 100644 index 0000000..f9e8063 --- /dev/null +++ b/node_modules/postcss/lib/document.d.ts @@ -0,0 +1,69 @@ +import Container, { ContainerProps } from './container.js' +import { ProcessOptions } from './postcss.js' +import Result from './result.js' +import Root from './root.js' + +declare namespace Document { + export interface DocumentProps extends ContainerProps { + nodes?: readonly Root[] + + /** + * Information to generate byte-to-byte equal node string as it was + * in the origin input. + * + * Every parser saves its own properties. + */ + raws?: Record + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Document_ as default } +} + +/** + * Represents a file and contains all its parsed nodes. + * + * **Experimental:** some aspects of this node could change within minor + * or patch version releases. + * + * ```js + * const document = htmlParser( + * '' + * ) + * document.type //=> 'document' + * document.nodes.length //=> 2 + * ``` + */ +declare class Document_ extends Container { + nodes: Root[] + parent: undefined + type: 'document' + + constructor(defaults?: Document.DocumentProps) + + assign(overrides: Document.DocumentProps | object): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this + + /** + * Returns a `Result` instance representing the documentโ€™s CSS roots. + * + * ```js + * const root1 = postcss.parse(css1, { from: 'a.css' }) + * const root2 = postcss.parse(css2, { from: 'b.css' }) + * const document = postcss.document() + * document.append(root1) + * document.append(root2) + * const result = document.toResult({ to: 'all.css', map: true }) + * ``` + * + * @param opts Options. + * @return Result with current documentโ€™s CSS. + */ + toResult(options?: ProcessOptions): Result +} + +declare class Document extends Document_ {} + +export = Document diff --git a/node_modules/postcss/lib/document.js b/node_modules/postcss/lib/document.js new file mode 100644 index 0000000..4468991 --- /dev/null +++ b/node_modules/postcss/lib/document.js @@ -0,0 +1,33 @@ +'use strict' + +let Container = require('./container') + +let LazyResult, Processor + +class Document extends Container { + constructor(defaults) { + // type needs to be passed to super, otherwise child roots won't be normalized correctly + super({ type: 'document', ...defaults }) + + if (!this.nodes) { + this.nodes = [] + } + } + + toResult(opts = {}) { + let lazy = new LazyResult(new Processor(), this, opts) + + return lazy.stringify() + } +} + +Document.registerLazyResult = dependant => { + LazyResult = dependant +} + +Document.registerProcessor = dependant => { + Processor = dependant +} + +module.exports = Document +Document.default = Document diff --git a/node_modules/postcss/lib/fromJSON.d.ts b/node_modules/postcss/lib/fromJSON.d.ts new file mode 100644 index 0000000..e1deedb --- /dev/null +++ b/node_modules/postcss/lib/fromJSON.d.ts @@ -0,0 +1,9 @@ +import { JSONHydrator } from './postcss.js' + +interface FromJSON extends JSONHydrator { + default: FromJSON +} + +declare const fromJSON: FromJSON + +export = fromJSON diff --git a/node_modules/postcss/lib/fromJSON.js b/node_modules/postcss/lib/fromJSON.js new file mode 100644 index 0000000..c9ac1a8 --- /dev/null +++ b/node_modules/postcss/lib/fromJSON.js @@ -0,0 +1,54 @@ +'use strict' + +let AtRule = require('./at-rule') +let Comment = require('./comment') +let Declaration = require('./declaration') +let Input = require('./input') +let PreviousMap = require('./previous-map') +let Root = require('./root') +let Rule = require('./rule') + +function fromJSON(json, inputs) { + if (Array.isArray(json)) return json.map(n => fromJSON(n)) + + let { inputs: ownInputs, ...defaults } = json + if (ownInputs) { + inputs = [] + for (let input of ownInputs) { + let inputHydrated = { ...input, __proto__: Input.prototype } + if (inputHydrated.map) { + inputHydrated.map = { + ...inputHydrated.map, + __proto__: PreviousMap.prototype + } + } + inputs.push(inputHydrated) + } + } + if (defaults.nodes) { + defaults.nodes = json.nodes.map(n => fromJSON(n, inputs)) + } + if (defaults.source) { + let { inputId, ...source } = defaults.source + defaults.source = source + if (inputId != null) { + defaults.source.input = inputs[inputId] + } + } + if (defaults.type === 'root') { + return new Root(defaults) + } else if (defaults.type === 'decl') { + return new Declaration(defaults) + } else if (defaults.type === 'rule') { + return new Rule(defaults) + } else if (defaults.type === 'comment') { + return new Comment(defaults) + } else if (defaults.type === 'atrule') { + return new AtRule(defaults) + } else { + throw new Error('Unknown node type: ' + json.type) + } +} + +module.exports = fromJSON +fromJSON.default = fromJSON diff --git a/node_modules/postcss/lib/input.d.ts b/node_modules/postcss/lib/input.d.ts new file mode 100644 index 0000000..3207da3 --- /dev/null +++ b/node_modules/postcss/lib/input.d.ts @@ -0,0 +1,227 @@ +import { CssSyntaxError, ProcessOptions } from './postcss.js' +import PreviousMap from './previous-map.js' + +declare namespace Input { + export interface FilePosition { + /** + * Column of inclusive start position in source file. + */ + column: number + + /** + * Column of exclusive end position in source file. + */ + endColumn?: number + + /** + * Line of exclusive end position in source file. + */ + endLine?: number + + /** + * Offset of exclusive end position in source file. + */ + endOffset?: number + + /** + * Absolute path to the source file. + */ + file?: string + + /** + * Line of inclusive start position in source file. + */ + line: number + + /** + * Offset of inclusive start position in source file. + */ + offset: number + + /** + * Source code. + */ + source?: string + + /** + * URL for the source file. + */ + url: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Input_ as default } +} + +/** + * Represents the source CSS. + * + * ```js + * const root = postcss.parse(css, { from: file }) + * const input = root.source.input + * ``` + */ +declare class Input_ { + /** + * Input CSS source. + * + * ```js + * const input = postcss.parse('a{}', { from: file }).input + * input.css //=> "a{}" + * ``` + */ + css: string + + /** + * Input source with support for non-CSS documents. + * + * ```js + * const input = postcss.parse('a{}', { from: file, document: '' }).input + * input.document //=> "" + * input.css //=> "a{}" + * ``` + */ + document: string + + /** + * The absolute path to the CSS source file defined + * with the `from` option. + * + * ```js + * const root = postcss.parse(css, { from: 'a.css' }) + * root.source.input.file //=> '/home/ai/a.css' + * ``` + */ + file?: string + + /** + * The flag to indicate whether or not the source code has Unicode BOM. + */ + hasBOM: boolean + + /** + * The unique ID of the CSS source. It will be created if `from` option + * is not provided (because PostCSS does not know the file path). + * + * ```js + * const root = postcss.parse(css) + * root.source.input.file //=> undefined + * root.source.input.id //=> "" + * ``` + */ + id?: string + + /** + * The input source map passed from a compilation step before PostCSS + * (for example, from Sass compiler). + * + * ```js + * root.source.input.map.consumer().sources //=> ['a.sass'] + * ``` + */ + map: PreviousMap + + /** + * The CSS source identifier. Contains `Input#file` if the user + * set the `from` option, or `Input#id` if they did not. + * + * ```js + * const root = postcss.parse(css, { from: 'a.css' }) + * root.source.input.from //=> "/home/ai/a.css" + * + * const root = postcss.parse(css) + * root.source.input.from //=> "" + * ``` + */ + get from(): string + + /** + * @param css Input CSS source. + * @param opts Process options. + */ + constructor(css: string, opts?: ProcessOptions) + + /** + * Returns `CssSyntaxError` with information about the error and its position. + */ + error( + message: string, + start: + | { + column: number + line: number + } + | { + offset: number + }, + end: + | { + column: number + line: number + } + | { + offset: number + }, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + error( + message: string, + line: number, + column: number, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + error( + message: string, + offset: number, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + /** + * Converts source line and column to offset. + * + * @param line Source line. + * @param column Source column. + * @return Source offset. + */ + fromLineAndColumn(line: number, column: number): number + + /** + * Converts source offset to line and column. + * + * @param offset Source offset. + */ + fromOffset(offset: number): { col: number; line: number } | null + + /** + * Reads the input source map and returns a symbol position + * in the input source (e.g., in a Sass file that was compiled + * to CSS before being passed to PostCSS). Optionally takes an + * end position, exclusive. + * + * ```js + * root.source.input.origin(1, 1) //=> { file: 'a.css', line: 3, column: 1 } + * root.source.input.origin(1, 1, 1, 4) + * //=> { file: 'a.css', line: 3, column: 1, endLine: 3, endColumn: 4 } + * ``` + * + * @param line Line for inclusive start position in input CSS. + * @param column Column for inclusive start position in input CSS. + * @param endLine Line for exclusive end position in input CSS. + * @param endColumn Column for exclusive end position in input CSS. + * + * @return Position in input source. + */ + origin( + line: number, + column: number, + endLine?: number, + endColumn?: number + ): false | Input.FilePosition + + /** Converts this to a JSON-friendly object representation. */ + toJSON(): object +} + +declare class Input extends Input_ {} + +export = Input diff --git a/node_modules/postcss/lib/input.js b/node_modules/postcss/lib/input.js new file mode 100644 index 0000000..bb0ccf5 --- /dev/null +++ b/node_modules/postcss/lib/input.js @@ -0,0 +1,265 @@ +'use strict' + +let { nanoid } = require('nanoid/non-secure') +let { isAbsolute, resolve } = require('path') +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { fileURLToPath, pathToFileURL } = require('url') + +let CssSyntaxError = require('./css-syntax-error') +let PreviousMap = require('./previous-map') +let terminalHighlight = require('./terminal-highlight') + +let lineToIndexCache = Symbol('lineToIndexCache') + +let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) +let pathAvailable = Boolean(resolve && isAbsolute) + +function getLineToIndex(input) { + if (input[lineToIndexCache]) return input[lineToIndexCache] + let lines = input.css.split('\n') + let lineToIndex = new Array(lines.length) + let prevIndex = 0 + + for (let i = 0, l = lines.length; i < l; i++) { + lineToIndex[i] = prevIndex + prevIndex += lines[i].length + 1 + } + + input[lineToIndexCache] = lineToIndex + return lineToIndex +} + +class Input { + get from() { + return this.file || this.id + } + + constructor(css, opts = {}) { + if ( + css === null || + typeof css === 'undefined' || + (typeof css === 'object' && !css.toString) + ) { + throw new Error(`PostCSS received ${css} instead of CSS string`) + } + + this.css = css.toString() + + if (this.css[0] === '\uFEFF' || this.css[0] === '\uFFFE') { + this.hasBOM = true + this.css = this.css.slice(1) + } else { + this.hasBOM = false + } + + this.document = this.css + if (opts.document) this.document = opts.document.toString() + + if (opts.from) { + if ( + !pathAvailable || + /^\w+:\/\//.test(opts.from) || + isAbsolute(opts.from) + ) { + this.file = opts.from + } else { + this.file = resolve(opts.from) + } + } + + if (pathAvailable && sourceMapAvailable) { + let map = new PreviousMap(this.css, opts) + if (map.text) { + this.map = map + let file = map.consumer().file + if (!this.file && file) this.file = this.mapResolve(file) + } + } + + if (!this.file) { + this.id = '' + } + if (this.map) this.map.file = this.from + } + + error(message, line, column, opts = {}) { + let endColumn, endLine, endOffset, offset, result + + if (line && typeof line === 'object') { + let start = line + let end = column + if (typeof start.offset === 'number') { + offset = start.offset + let pos = this.fromOffset(offset) + line = pos.line + column = pos.col + } else { + line = start.line + column = start.column + offset = this.fromLineAndColumn(line, column) + } + if (typeof end.offset === 'number') { + endOffset = end.offset + let pos = this.fromOffset(endOffset) + endLine = pos.line + endColumn = pos.col + } else { + endLine = end.line + endColumn = end.column + endOffset = this.fromLineAndColumn(end.line, end.column) + } + } else if (!column) { + offset = line + let pos = this.fromOffset(offset) + line = pos.line + column = pos.col + } else { + offset = this.fromLineAndColumn(line, column) + } + + let origin = this.origin(line, column, endLine, endColumn) + if (origin) { + result = new CssSyntaxError( + message, + origin.endLine === undefined + ? origin.line + : { column: origin.column, line: origin.line }, + origin.endLine === undefined + ? origin.column + : { column: origin.endColumn, line: origin.endLine }, + origin.source, + origin.file, + opts.plugin + ) + } else { + result = new CssSyntaxError( + message, + endLine === undefined ? line : { column, line }, + endLine === undefined ? column : { column: endColumn, line: endLine }, + this.css, + this.file, + opts.plugin + ) + } + + result.input = { column, endColumn, endLine, endOffset, line, offset, source: this.css } + if (this.file) { + if (pathToFileURL) { + result.input.url = pathToFileURL(this.file).toString() + } + result.input.file = this.file + } + + return result + } + + fromLineAndColumn(line, column) { + let lineToIndex = getLineToIndex(this) + let index = lineToIndex[line - 1] + return index + column - 1 + } + + fromOffset(offset) { + let lineToIndex = getLineToIndex(this) + let lastLine = lineToIndex[lineToIndex.length - 1] + + let min = 0 + if (offset >= lastLine) { + min = lineToIndex.length - 1 + } else { + let max = lineToIndex.length - 2 + let mid + while (min < max) { + mid = min + ((max - min) >> 1) + if (offset < lineToIndex[mid]) { + max = mid - 1 + } else if (offset >= lineToIndex[mid + 1]) { + min = mid + 1 + } else { + min = mid + break + } + } + } + return { + col: offset - lineToIndex[min] + 1, + line: min + 1 + } + } + + mapResolve(file) { + if (/^\w+:\/\//.test(file)) { + return file + } + return resolve(this.map.consumer().sourceRoot || this.map.root || '.', file) + } + + origin(line, column, endLine, endColumn) { + if (!this.map) return false + let consumer = this.map.consumer() + + let from = consumer.originalPositionFor({ column, line }) + if (!from.source) return false + + let to + if (typeof endLine === 'number') { + to = consumer.originalPositionFor({ column: endColumn, line: endLine }) + } + + let fromUrl + + if (isAbsolute(from.source)) { + fromUrl = pathToFileURL(from.source) + } else { + fromUrl = new URL( + from.source, + this.map.consumer().sourceRoot || pathToFileURL(this.map.mapFile) + ) + } + + let result = { + column: from.column, + endColumn: to && to.column, + endLine: to && to.line, + line: from.line, + url: fromUrl.toString() + } + + if (fromUrl.protocol === 'file:') { + if (fileURLToPath) { + result.file = fileURLToPath(fromUrl) + } else { + /* c8 ignore next 2 */ + throw new Error(`file: protocol is not available in this PostCSS build`) + } + } + + let source = consumer.sourceContentFor(from.source) + if (source) result.source = source + + return result + } + + toJSON() { + let json = {} + for (let name of ['hasBOM', 'css', 'file', 'id']) { + if (this[name] != null) { + json[name] = this[name] + } + } + if (this.map) { + json.map = { ...this.map } + if (json.map.consumerCache) { + json.map.consumerCache = undefined + } + } + return json + } +} + +module.exports = Input +Input.default = Input + +if (terminalHighlight && terminalHighlight.registerInput) { + terminalHighlight.registerInput(Input) +} diff --git a/node_modules/postcss/lib/lazy-result.d.ts b/node_modules/postcss/lib/lazy-result.d.ts new file mode 100644 index 0000000..2eb7279 --- /dev/null +++ b/node_modules/postcss/lib/lazy-result.d.ts @@ -0,0 +1,190 @@ +import Document from './document.js' +import { SourceMap } from './postcss.js' +import Processor from './processor.js' +import Result, { Message, ResultOptions } from './result.js' +import Root from './root.js' +import Warning from './warning.js' + +declare namespace LazyResult { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { LazyResult_ as default } +} + +/** + * A Promise proxy for the result of PostCSS transformations. + * + * A `LazyResult` instance is returned by `Processor#process`. + * + * ```js + * const lazy = postcss([autoprefixer]).process(css) + * ``` + */ +declare class LazyResult_ + implements PromiseLike> +{ + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls onRejected for each error thrown in any plugin. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css).then(result => { + * console.log(result.css) + * }).catch(error => { + * console.error(error) + * }) + * ``` + */ + catch: Promise>['catch'] + + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls onFinally on any error or when all plugins will finish work. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css).finally(() => { + * console.log('processing ended') + * }) + * ``` + */ + finally: Promise>['finally'] + + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls `onFulfilled` with a Result instance. If a plugin throws + * an error, the `onRejected` callback will be executed. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css, { from: cssPath }).then(result => { + * console.log(result.css) + * }) + * ``` + */ + then: Promise>['then'] + + /** + * An alias for the `css` property. Use it with syntaxes + * that generate non-CSS output. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get content(): string + + /** + * Processes input CSS through synchronous plugins, converts `Root` + * to a CSS string and returns `Result#css`. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get css(): string + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#map`. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get map(): SourceMap + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#messages`. + * + * This property will only work with synchronous plugins. If the processor + * contains any asynchronous plugins it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get messages(): Message[] + + /** + * Options from the `Processor#process` call. + */ + get opts(): ResultOptions + + /** + * Returns a `Processor` instance, which will be used + * for CSS transformations. + */ + get processor(): Processor + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#root`. + * + * This property will only work with synchronous plugins. If the processor + * contains any asynchronous plugins it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get root(): RootNode + + /** + * Returns the default string description of an object. + * Required to implement the Promise interface. + */ + get [Symbol.toStringTag](): string + + /** + * @param processor Processor used for this transformation. + * @param css CSS to parse and transform. + * @param opts Options from the `Processor#process` or `Root#toResult`. + */ + constructor(processor: Processor, css: string, opts: ResultOptions) + + /** + * Run plugin in async way and return `Result`. + * + * @return Result with output content. + */ + async(): Promise> + + /** + * Run plugin in sync way and return `Result`. + * + * @return Result with output content. + */ + sync(): Result + + /** + * Alias for the `LazyResult#css` property. + * + * ```js + * lazy + '' === lazy.css + * ``` + * + * @return Output CSS. + */ + toString(): string + + /** + * Processes input CSS through synchronous plugins + * and calls `Result#warnings`. + * + * @return Warnings from plugins. + */ + warnings(): Warning[] +} + +declare class LazyResult< + RootNode = Document | Root +> extends LazyResult_ {} + +export = LazyResult diff --git a/node_modules/postcss/lib/lazy-result.js b/node_modules/postcss/lib/lazy-result.js new file mode 100644 index 0000000..1ea52b8 --- /dev/null +++ b/node_modules/postcss/lib/lazy-result.js @@ -0,0 +1,550 @@ +'use strict' + +let Container = require('./container') +let Document = require('./document') +let MapGenerator = require('./map-generator') +let parse = require('./parse') +let Result = require('./result') +let Root = require('./root') +let stringify = require('./stringify') +let { isClean, my } = require('./symbols') +let warnOnce = require('./warn-once') + +const TYPE_TO_CLASS_NAME = { + atrule: 'AtRule', + comment: 'Comment', + decl: 'Declaration', + document: 'Document', + root: 'Root', + rule: 'Rule' +} + +const PLUGIN_PROPS = { + AtRule: true, + AtRuleExit: true, + Comment: true, + CommentExit: true, + Declaration: true, + DeclarationExit: true, + Document: true, + DocumentExit: true, + Once: true, + OnceExit: true, + postcssPlugin: true, + prepare: true, + Root: true, + RootExit: true, + Rule: true, + RuleExit: true +} + +const NOT_VISITORS = { + Once: true, + postcssPlugin: true, + prepare: true +} + +const CHILDREN = 0 + +function isPromise(obj) { + return typeof obj === 'object' && typeof obj.then === 'function' +} + +function getEvents(node) { + let key = false + let type = TYPE_TO_CLASS_NAME[node.type] + if (node.type === 'decl') { + key = node.prop.toLowerCase() + } else if (node.type === 'atrule') { + key = node.name.toLowerCase() + } + + if (key && node.append) { + return [ + type, + type + '-' + key, + CHILDREN, + type + 'Exit', + type + 'Exit-' + key + ] + } else if (key) { + return [type, type + '-' + key, type + 'Exit', type + 'Exit-' + key] + } else if (node.append) { + return [type, CHILDREN, type + 'Exit'] + } else { + return [type, type + 'Exit'] + } +} + +function toStack(node) { + let events + if (node.type === 'document') { + events = ['Document', CHILDREN, 'DocumentExit'] + } else if (node.type === 'root') { + events = ['Root', CHILDREN, 'RootExit'] + } else { + events = getEvents(node) + } + + return { + eventIndex: 0, + events, + iterator: 0, + node, + visitorIndex: 0, + visitors: [] + } +} + +function cleanMarks(node) { + node[isClean] = false + if (node.nodes) node.nodes.forEach(i => cleanMarks(i)) + return node +} + +let postcss = {} + +class LazyResult { + get content() { + return this.stringify().content + } + + get css() { + return this.stringify().css + } + + get map() { + return this.stringify().map + } + + get messages() { + return this.sync().messages + } + + get opts() { + return this.result.opts + } + + get processor() { + return this.result.processor + } + + get root() { + return this.sync().root + } + + get [Symbol.toStringTag]() { + return 'LazyResult' + } + + constructor(processor, css, opts) { + this.stringified = false + this.processed = false + + let root + if ( + typeof css === 'object' && + css !== null && + (css.type === 'root' || css.type === 'document') + ) { + root = cleanMarks(css) + } else if (css instanceof LazyResult || css instanceof Result) { + root = cleanMarks(css.root) + if (css.map) { + if (typeof opts.map === 'undefined') opts.map = {} + if (!opts.map.inline) opts.map.inline = false + opts.map.prev = css.map + } + } else { + let parser = parse + if (opts.syntax) parser = opts.syntax.parse + if (opts.parser) parser = opts.parser + if (parser.parse) parser = parser.parse + + try { + root = parser(css, opts) + } catch (error) { + this.processed = true + this.error = error + } + + if (root && !root[my]) { + /* c8 ignore next 2 */ + Container.rebuild(root) + } + } + + this.result = new Result(processor, root, opts) + this.helpers = { ...postcss, postcss, result: this.result } + this.plugins = this.processor.plugins.map(plugin => { + if (typeof plugin === 'object' && plugin.prepare) { + return { ...plugin, ...plugin.prepare(this.result) } + } else { + return plugin + } + }) + } + + async() { + if (this.error) return Promise.reject(this.error) + if (this.processed) return Promise.resolve(this.result) + if (!this.processing) { + this.processing = this.runAsync() + } + return this.processing + } + + catch(onRejected) { + return this.async().catch(onRejected) + } + + finally(onFinally) { + return this.async().then(onFinally, onFinally) + } + + getAsyncError() { + throw new Error('Use process(css).then(cb) to work with async plugins') + } + + handleError(error, node) { + let plugin = this.result.lastPlugin + try { + if (node) node.addToError(error) + this.error = error + if (error.name === 'CssSyntaxError' && !error.plugin) { + error.plugin = plugin.postcssPlugin + error.setMessage() + } else if (plugin.postcssVersion) { + if (process.env.NODE_ENV !== 'production') { + let pluginName = plugin.postcssPlugin + let pluginVer = plugin.postcssVersion + let runtimeVer = this.result.processor.version + let a = pluginVer.split('.') + let b = runtimeVer.split('.') + + if (a[0] !== b[0] || parseInt(a[1]) > parseInt(b[1])) { + // eslint-disable-next-line no-console + console.error( + 'Unknown error from PostCSS plugin. Your current PostCSS ' + + 'version is ' + + runtimeVer + + ', but ' + + pluginName + + ' uses ' + + pluginVer + + '. Perhaps this is the source of the error below.' + ) + } + } + } + } catch (err) { + /* c8 ignore next 3 */ + // eslint-disable-next-line no-console + if (console && console.error) console.error(err) + } + return error + } + + prepareVisitors() { + this.listeners = {} + let add = (plugin, type, cb) => { + if (!this.listeners[type]) this.listeners[type] = [] + this.listeners[type].push([plugin, cb]) + } + for (let plugin of this.plugins) { + if (typeof plugin === 'object') { + for (let event in plugin) { + if (!PLUGIN_PROPS[event] && /^[A-Z]/.test(event)) { + throw new Error( + `Unknown event ${event} in ${plugin.postcssPlugin}. ` + + `Try to update PostCSS (${this.processor.version} now).` + ) + } + if (!NOT_VISITORS[event]) { + if (typeof plugin[event] === 'object') { + for (let filter in plugin[event]) { + if (filter === '*') { + add(plugin, event, plugin[event][filter]) + } else { + add( + plugin, + event + '-' + filter.toLowerCase(), + plugin[event][filter] + ) + } + } + } else if (typeof plugin[event] === 'function') { + add(plugin, event, plugin[event]) + } + } + } + } + } + this.hasListener = Object.keys(this.listeners).length > 0 + } + + async runAsync() { + this.plugin = 0 + for (let i = 0; i < this.plugins.length; i++) { + let plugin = this.plugins[i] + let promise = this.runOnRoot(plugin) + if (isPromise(promise)) { + try { + await promise + } catch (error) { + throw this.handleError(error) + } + } + } + + this.prepareVisitors() + if (this.hasListener) { + let root = this.result.root + while (!root[isClean]) { + root[isClean] = true + let stack = [toStack(root)] + while (stack.length > 0) { + let promise = this.visitTick(stack) + if (isPromise(promise)) { + try { + await promise + } catch (e) { + let node = stack[stack.length - 1].node + throw this.handleError(e, node) + } + } + } + } + + if (this.listeners.OnceExit) { + for (let [plugin, visitor] of this.listeners.OnceExit) { + this.result.lastPlugin = plugin + try { + if (root.type === 'document') { + let roots = root.nodes.map(subRoot => + visitor(subRoot, this.helpers) + ) + + await Promise.all(roots) + } else { + await visitor(root, this.helpers) + } + } catch (e) { + throw this.handleError(e) + } + } + } + } + + this.processed = true + return this.stringify() + } + + runOnRoot(plugin) { + this.result.lastPlugin = plugin + try { + if (typeof plugin === 'object' && plugin.Once) { + if (this.result.root.type === 'document') { + let roots = this.result.root.nodes.map(root => + plugin.Once(root, this.helpers) + ) + + if (isPromise(roots[0])) { + return Promise.all(roots) + } + + return roots + } + + return plugin.Once(this.result.root, this.helpers) + } else if (typeof plugin === 'function') { + return plugin(this.result.root, this.result) + } + } catch (error) { + throw this.handleError(error) + } + } + + stringify() { + if (this.error) throw this.error + if (this.stringified) return this.result + this.stringified = true + + this.sync() + + let opts = this.result.opts + let str = stringify + if (opts.syntax) str = opts.syntax.stringify + if (opts.stringifier) str = opts.stringifier + if (str.stringify) str = str.stringify + + let map = new MapGenerator(str, this.result.root, this.result.opts) + let data = map.generate() + this.result.css = data[0] + this.result.map = data[1] + + return this.result + } + + sync() { + if (this.error) throw this.error + if (this.processed) return this.result + this.processed = true + + if (this.processing) { + throw this.getAsyncError() + } + + for (let plugin of this.plugins) { + let promise = this.runOnRoot(plugin) + if (isPromise(promise)) { + throw this.getAsyncError() + } + } + + this.prepareVisitors() + if (this.hasListener) { + let root = this.result.root + while (!root[isClean]) { + root[isClean] = true + this.walkSync(root) + } + if (this.listeners.OnceExit) { + if (root.type === 'document') { + for (let subRoot of root.nodes) { + this.visitSync(this.listeners.OnceExit, subRoot) + } + } else { + this.visitSync(this.listeners.OnceExit, root) + } + } + } + + return this.result + } + + then(onFulfilled, onRejected) { + if (process.env.NODE_ENV !== 'production') { + if (!('from' in this.opts)) { + warnOnce( + 'Without `from` option PostCSS could generate wrong source map ' + + 'and will not find Browserslist config. Set it to CSS file path ' + + 'or to `undefined` to prevent this warning.' + ) + } + } + return this.async().then(onFulfilled, onRejected) + } + + toString() { + return this.css + } + + visitSync(visitors, node) { + for (let [plugin, visitor] of visitors) { + this.result.lastPlugin = plugin + let promise + try { + promise = visitor(node, this.helpers) + } catch (e) { + throw this.handleError(e, node.proxyOf) + } + if (node.type !== 'root' && node.type !== 'document' && !node.parent) { + return true + } + if (isPromise(promise)) { + throw this.getAsyncError() + } + } + } + + visitTick(stack) { + let visit = stack[stack.length - 1] + let { node, visitors } = visit + + if (node.type !== 'root' && node.type !== 'document' && !node.parent) { + stack.pop() + return + } + + if (visitors.length > 0 && visit.visitorIndex < visitors.length) { + let [plugin, visitor] = visitors[visit.visitorIndex] + visit.visitorIndex += 1 + if (visit.visitorIndex === visitors.length) { + visit.visitors = [] + visit.visitorIndex = 0 + } + this.result.lastPlugin = plugin + try { + return visitor(node.toProxy(), this.helpers) + } catch (e) { + throw this.handleError(e, node) + } + } + + if (visit.iterator !== 0) { + let iterator = visit.iterator + let child + while ((child = node.nodes[node.indexes[iterator]])) { + node.indexes[iterator] += 1 + if (!child[isClean]) { + child[isClean] = true + stack.push(toStack(child)) + return + } + } + visit.iterator = 0 + delete node.indexes[iterator] + } + + let events = visit.events + while (visit.eventIndex < events.length) { + let event = events[visit.eventIndex] + visit.eventIndex += 1 + if (event === CHILDREN) { + if (node.nodes && node.nodes.length) { + node[isClean] = true + visit.iterator = node.getIterator() + } + return + } else if (this.listeners[event]) { + visit.visitors = this.listeners[event] + return + } + } + stack.pop() + } + + walkSync(node) { + node[isClean] = true + let events = getEvents(node) + for (let event of events) { + if (event === CHILDREN) { + if (node.nodes) { + node.each(child => { + if (!child[isClean]) this.walkSync(child) + }) + } + } else { + let visitors = this.listeners[event] + if (visitors) { + if (this.visitSync(visitors, node.toProxy())) return + } + } + } + } + + warnings() { + return this.sync().warnings() + } +} + +LazyResult.registerPostcss = dependant => { + postcss = dependant +} + +module.exports = LazyResult +LazyResult.default = LazyResult + +Root.registerLazyResult(LazyResult) +Document.registerLazyResult(LazyResult) diff --git a/node_modules/postcss/lib/list.d.ts b/node_modules/postcss/lib/list.d.ts new file mode 100644 index 0000000..e262ad3 --- /dev/null +++ b/node_modules/postcss/lib/list.d.ts @@ -0,0 +1,60 @@ +declare namespace list { + type List = { + /** + * Safely splits comma-separated values (such as those for `transition-*` + * and `background` properties). + * + * ```js + * Once (root, { list }) { + * list.comma('black, linear-gradient(white, black)') + * //=> ['black', 'linear-gradient(white, black)'] + * } + * ``` + * + * @param str Comma-separated values. + * @return Split values. + */ + comma(str: string): string[] + + default: List + + /** + * Safely splits space-separated values (such as those for `background`, + * `border-radius`, and other shorthand properties). + * + * ```js + * Once (root, { list }) { + * list.space('1px calc(10% + 1px)') //=> ['1px', 'calc(10% + 1px)'] + * } + * ``` + * + * @param str Space-separated values. + * @return Split values. + */ + space(str: string): string[] + + /** + * Safely splits values. + * + * ```js + * Once (root, { list }) { + * list.split('1px calc(10% + 1px)', [' ', '\n', '\t']) //=> ['1px', 'calc(10% + 1px)'] + * } + * ``` + * + * @param string separated values. + * @param separators array of separators. + * @param last boolean indicator. + * @return Split values. + */ + split( + string: string, + separators: readonly string[], + last: boolean + ): string[] + } +} + +declare const list: list.List + +export = list diff --git a/node_modules/postcss/lib/list.js b/node_modules/postcss/lib/list.js new file mode 100644 index 0000000..1b31f98 --- /dev/null +++ b/node_modules/postcss/lib/list.js @@ -0,0 +1,58 @@ +'use strict' + +let list = { + comma(string) { + return list.split(string, [','], true) + }, + + space(string) { + let spaces = [' ', '\n', '\t'] + return list.split(string, spaces) + }, + + split(string, separators, last) { + let array = [] + let current = '' + let split = false + + let func = 0 + let inQuote = false + let prevQuote = '' + let escape = false + + for (let letter of string) { + if (escape) { + escape = false + } else if (letter === '\\') { + escape = true + } else if (inQuote) { + if (letter === prevQuote) { + inQuote = false + } + } else if (letter === '"' || letter === "'") { + inQuote = true + prevQuote = letter + } else if (letter === '(') { + func += 1 + } else if (letter === ')') { + if (func > 0) func -= 1 + } else if (func === 0) { + if (separators.includes(letter)) split = true + } + + if (split) { + if (current !== '') array.push(current.trim()) + current = '' + split = false + } else { + current += letter + } + } + + if (last || current !== '') array.push(current.trim()) + return array + } +} + +module.exports = list +list.default = list diff --git a/node_modules/postcss/lib/map-generator.js b/node_modules/postcss/lib/map-generator.js new file mode 100644 index 0000000..89069d3 --- /dev/null +++ b/node_modules/postcss/lib/map-generator.js @@ -0,0 +1,368 @@ +'use strict' + +let { dirname, relative, resolve, sep } = require('path') +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { pathToFileURL } = require('url') + +let Input = require('./input') + +let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) +let pathAvailable = Boolean(dirname && resolve && relative && sep) + +class MapGenerator { + constructor(stringify, root, opts, cssString) { + this.stringify = stringify + this.mapOpts = opts.map || {} + this.root = root + this.opts = opts + this.css = cssString + this.originalCSS = cssString + this.usesFileUrls = !this.mapOpts.from && this.mapOpts.absolute + + this.memoizedFileURLs = new Map() + this.memoizedPaths = new Map() + this.memoizedURLs = new Map() + } + + addAnnotation() { + let content + + if (this.isInline()) { + content = + 'data:application/json;base64,' + this.toBase64(this.map.toString()) + } else if (typeof this.mapOpts.annotation === 'string') { + content = this.mapOpts.annotation + } else if (typeof this.mapOpts.annotation === 'function') { + content = this.mapOpts.annotation(this.opts.to, this.root) + } else { + content = this.outputFile() + '.map' + } + let eol = '\n' + if (this.css.includes('\r\n')) eol = '\r\n' + + this.css += eol + '/*# sourceMappingURL=' + content + ' */' + } + + applyPrevMaps() { + for (let prev of this.previous()) { + let from = this.toUrl(this.path(prev.file)) + let root = prev.root || dirname(prev.file) + let map + + if (this.mapOpts.sourcesContent === false) { + map = new SourceMapConsumer(prev.text) + if (map.sourcesContent) { + map.sourcesContent = null + } + } else { + map = prev.consumer() + } + + this.map.applySourceMap(map, from, this.toUrl(this.path(root))) + } + } + + clearAnnotation() { + if (this.mapOpts.annotation === false) return + + if (this.root) { + let node + for (let i = this.root.nodes.length - 1; i >= 0; i--) { + node = this.root.nodes[i] + if (node.type !== 'comment') continue + if (node.text.startsWith('# sourceMappingURL=')) { + this.root.removeChild(i) + } + } + } else if (this.css) { + this.css = this.css.replace(/\n*\/\*#[\S\s]*?\*\/$/gm, '') + } + } + + generate() { + this.clearAnnotation() + if (pathAvailable && sourceMapAvailable && this.isMap()) { + return this.generateMap() + } else { + let result = '' + this.stringify(this.root, i => { + result += i + }) + return [result] + } + } + + generateMap() { + if (this.root) { + this.generateString() + } else if (this.previous().length === 1) { + let prev = this.previous()[0].consumer() + prev.file = this.outputFile() + this.map = SourceMapGenerator.fromSourceMap(prev, { + ignoreInvalidMapping: true + }) + } else { + this.map = new SourceMapGenerator({ + file: this.outputFile(), + ignoreInvalidMapping: true + }) + this.map.addMapping({ + generated: { column: 0, line: 1 }, + original: { column: 0, line: 1 }, + source: this.opts.from + ? this.toUrl(this.path(this.opts.from)) + : '' + }) + } + + if (this.isSourcesContent()) this.setSourcesContent() + if (this.root && this.previous().length > 0) this.applyPrevMaps() + if (this.isAnnotation()) this.addAnnotation() + + if (this.isInline()) { + return [this.css] + } else { + return [this.css, this.map] + } + } + + generateString() { + this.css = '' + this.map = new SourceMapGenerator({ + file: this.outputFile(), + ignoreInvalidMapping: true + }) + + let line = 1 + let column = 1 + + let noSource = '' + let mapping = { + generated: { column: 0, line: 0 }, + original: { column: 0, line: 0 }, + source: '' + } + + let last, lines + this.stringify(this.root, (str, node, type) => { + this.css += str + + if (node && type !== 'end') { + mapping.generated.line = line + mapping.generated.column = column - 1 + if (node.source && node.source.start) { + mapping.source = this.sourcePath(node) + mapping.original.line = node.source.start.line + mapping.original.column = node.source.start.column - 1 + this.map.addMapping(mapping) + } else { + mapping.source = noSource + mapping.original.line = 1 + mapping.original.column = 0 + this.map.addMapping(mapping) + } + } + + lines = str.match(/\n/g) + if (lines) { + line += lines.length + last = str.lastIndexOf('\n') + column = str.length - last + } else { + column += str.length + } + + if (node && type !== 'start') { + let p = node.parent || { raws: {} } + let childless = + node.type === 'decl' || (node.type === 'atrule' && !node.nodes) + if (!childless || node !== p.last || p.raws.semicolon) { + if (node.source && node.source.end) { + mapping.source = this.sourcePath(node) + mapping.original.line = node.source.end.line + mapping.original.column = node.source.end.column - 1 + mapping.generated.line = line + mapping.generated.column = column - 2 + this.map.addMapping(mapping) + } else { + mapping.source = noSource + mapping.original.line = 1 + mapping.original.column = 0 + mapping.generated.line = line + mapping.generated.column = column - 1 + this.map.addMapping(mapping) + } + } + } + }) + } + + isAnnotation() { + if (this.isInline()) { + return true + } + if (typeof this.mapOpts.annotation !== 'undefined') { + return this.mapOpts.annotation + } + if (this.previous().length) { + return this.previous().some(i => i.annotation) + } + return true + } + + isInline() { + if (typeof this.mapOpts.inline !== 'undefined') { + return this.mapOpts.inline + } + + let annotation = this.mapOpts.annotation + if (typeof annotation !== 'undefined' && annotation !== true) { + return false + } + + if (this.previous().length) { + return this.previous().some(i => i.inline) + } + return true + } + + isMap() { + if (typeof this.opts.map !== 'undefined') { + return !!this.opts.map + } + return this.previous().length > 0 + } + + isSourcesContent() { + if (typeof this.mapOpts.sourcesContent !== 'undefined') { + return this.mapOpts.sourcesContent + } + if (this.previous().length) { + return this.previous().some(i => i.withContent()) + } + return true + } + + outputFile() { + if (this.opts.to) { + return this.path(this.opts.to) + } else if (this.opts.from) { + return this.path(this.opts.from) + } else { + return 'to.css' + } + } + + path(file) { + if (this.mapOpts.absolute) return file + if (file.charCodeAt(0) === 60 /* `<` */) return file + if (/^\w+:\/\//.test(file)) return file + let cached = this.memoizedPaths.get(file) + if (cached) return cached + + let from = this.opts.to ? dirname(this.opts.to) : '.' + + if (typeof this.mapOpts.annotation === 'string') { + from = dirname(resolve(from, this.mapOpts.annotation)) + } + + let path = relative(from, file) + this.memoizedPaths.set(file, path) + + return path + } + + previous() { + if (!this.previousMaps) { + this.previousMaps = [] + if (this.root) { + this.root.walk(node => { + if (node.source && node.source.input.map) { + let map = node.source.input.map + if (!this.previousMaps.includes(map)) { + this.previousMaps.push(map) + } + } + }) + } else { + let input = new Input(this.originalCSS, this.opts) + if (input.map) this.previousMaps.push(input.map) + } + } + + return this.previousMaps + } + + setSourcesContent() { + let already = {} + if (this.root) { + this.root.walk(node => { + if (node.source) { + let from = node.source.input.from + if (from && !already[from]) { + already[from] = true + let fromUrl = this.usesFileUrls + ? this.toFileUrl(from) + : this.toUrl(this.path(from)) + this.map.setSourceContent(fromUrl, node.source.input.css) + } + } + }) + } else if (this.css) { + let from = this.opts.from + ? this.toUrl(this.path(this.opts.from)) + : '' + this.map.setSourceContent(from, this.css) + } + } + + sourcePath(node) { + if (this.mapOpts.from) { + return this.toUrl(this.mapOpts.from) + } else if (this.usesFileUrls) { + return this.toFileUrl(node.source.input.from) + } else { + return this.toUrl(this.path(node.source.input.from)) + } + } + + toBase64(str) { + if (Buffer) { + return Buffer.from(str).toString('base64') + } else { + return window.btoa(unescape(encodeURIComponent(str))) + } + } + + toFileUrl(path) { + let cached = this.memoizedFileURLs.get(path) + if (cached) return cached + + if (pathToFileURL) { + let fileURL = pathToFileURL(path).toString() + this.memoizedFileURLs.set(path, fileURL) + + return fileURL + } else { + throw new Error( + '`map.absolute` option is not available in this PostCSS build' + ) + } + } + + toUrl(path) { + let cached = this.memoizedURLs.get(path) + if (cached) return cached + + if (sep === '\\') { + path = path.replace(/\\/g, '/') + } + + let url = encodeURI(path).replace(/[#?]/g, encodeURIComponent) + this.memoizedURLs.set(path, url) + + return url + } +} + +module.exports = MapGenerator diff --git a/node_modules/postcss/lib/no-work-result.d.ts b/node_modules/postcss/lib/no-work-result.d.ts new file mode 100644 index 0000000..094f30a --- /dev/null +++ b/node_modules/postcss/lib/no-work-result.d.ts @@ -0,0 +1,46 @@ +import LazyResult from './lazy-result.js' +import { SourceMap } from './postcss.js' +import Processor from './processor.js' +import Result, { Message, ResultOptions } from './result.js' +import Root from './root.js' +import Warning from './warning.js' + +declare namespace NoWorkResult { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { NoWorkResult_ as default } +} + +/** + * A Promise proxy for the result of PostCSS transformations. + * This lazy result instance doesn't parse css unless `NoWorkResult#root` or `Result#root` + * are accessed. See the example below for details. + * A `NoWork` instance is returned by `Processor#process` ONLY when no plugins defined. + * + * ```js + * const noWorkResult = postcss().process(css) // No plugins are defined. + * // CSS is not parsed + * let root = noWorkResult.root // now css is parsed because we accessed the root + * ``` + */ +declare class NoWorkResult_ implements LazyResult { + catch: Promise>['catch'] + finally: Promise>['finally'] + then: Promise>['then'] + get content(): string + get css(): string + get map(): SourceMap + get messages(): Message[] + get opts(): ResultOptions + get processor(): Processor + get root(): Root + get [Symbol.toStringTag](): string + constructor(processor: Processor, css: string, opts: ResultOptions) + async(): Promise> + sync(): Result + toString(): string + warnings(): Warning[] +} + +declare class NoWorkResult extends NoWorkResult_ {} + +export = NoWorkResult diff --git a/node_modules/postcss/lib/no-work-result.js b/node_modules/postcss/lib/no-work-result.js new file mode 100644 index 0000000..dd46182 --- /dev/null +++ b/node_modules/postcss/lib/no-work-result.js @@ -0,0 +1,138 @@ +'use strict' + +let MapGenerator = require('./map-generator') +let parse = require('./parse') +const Result = require('./result') +let stringify = require('./stringify') +let warnOnce = require('./warn-once') + +class NoWorkResult { + get content() { + return this.result.css + } + + get css() { + return this.result.css + } + + get map() { + return this.result.map + } + + get messages() { + return [] + } + + get opts() { + return this.result.opts + } + + get processor() { + return this.result.processor + } + + get root() { + if (this._root) { + return this._root + } + + let root + let parser = parse + + try { + root = parser(this._css, this._opts) + } catch (error) { + this.error = error + } + + if (this.error) { + throw this.error + } else { + this._root = root + return root + } + } + + get [Symbol.toStringTag]() { + return 'NoWorkResult' + } + + constructor(processor, css, opts) { + css = css.toString() + this.stringified = false + + this._processor = processor + this._css = css + this._opts = opts + this._map = undefined + let root + + let str = stringify + this.result = new Result(this._processor, root, this._opts) + this.result.css = css + + let self = this + Object.defineProperty(this.result, 'root', { + get() { + return self.root + } + }) + + let map = new MapGenerator(str, root, this._opts, css) + if (map.isMap()) { + let [generatedCSS, generatedMap] = map.generate() + if (generatedCSS) { + this.result.css = generatedCSS + } + if (generatedMap) { + this.result.map = generatedMap + } + } else { + map.clearAnnotation() + this.result.css = map.css + } + } + + async() { + if (this.error) return Promise.reject(this.error) + return Promise.resolve(this.result) + } + + catch(onRejected) { + return this.async().catch(onRejected) + } + + finally(onFinally) { + return this.async().then(onFinally, onFinally) + } + + sync() { + if (this.error) throw this.error + return this.result + } + + then(onFulfilled, onRejected) { + if (process.env.NODE_ENV !== 'production') { + if (!('from' in this._opts)) { + warnOnce( + 'Without `from` option PostCSS could generate wrong source map ' + + 'and will not find Browserslist config. Set it to CSS file path ' + + 'or to `undefined` to prevent this warning.' + ) + } + } + + return this.async().then(onFulfilled, onRejected) + } + + toString() { + return this._css + } + + warnings() { + return [] + } +} + +module.exports = NoWorkResult +NoWorkResult.default = NoWorkResult diff --git a/node_modules/postcss/lib/node.d.ts b/node_modules/postcss/lib/node.d.ts new file mode 100644 index 0000000..a09fe4d --- /dev/null +++ b/node_modules/postcss/lib/node.d.ts @@ -0,0 +1,556 @@ +import AtRule = require('./at-rule.js') +import { AtRuleProps } from './at-rule.js' +import Comment, { CommentProps } from './comment.js' +import Container, { NewChild } from './container.js' +import CssSyntaxError from './css-syntax-error.js' +import Declaration, { DeclarationProps } from './declaration.js' +import Document from './document.js' +import Input from './input.js' +import { Stringifier, Syntax } from './postcss.js' +import Result from './result.js' +import Root from './root.js' +import Rule, { RuleProps } from './rule.js' +import Warning, { WarningOptions } from './warning.js' + +declare namespace Node { + export type ChildNode = AtRule.default | Comment | Declaration | Rule + + export type AnyNode = + | AtRule.default + | Comment + | Declaration + | Document + | Root + | Rule + + export type ChildProps = + | AtRuleProps + | CommentProps + | DeclarationProps + | RuleProps + + export interface Position { + /** + * Source line in file. In contrast to `offset` it starts from 1. + */ + column: number + + /** + * Source column in file. + */ + line: number + + /** + * Source offset in file. It starts from 0. + */ + offset: number + } + + export interface Range { + /** + * End position, exclusive. + */ + end: Position + + /** + * Start position, inclusive. + */ + start: Position + } + + /** + * Source represents an interface for the {@link Node.source} property. + */ + export interface Source { + /** + * The inclusive ending position for the source + * code of a node. + * + * However, `end.offset` of a non `Root` node is the exclusive position. + * See https://github.com/postcss/postcss/pull/1879 for details. + * + * ```js + * const root = postcss.parse('a { color: black }') + * const a = root.first + * const color = a.first + * + * // The offset of `Root` node is the inclusive position + * css.source.end // { line: 1, column: 19, offset: 18 } + * + * // The offset of non `Root` node is the exclusive position + * a.source.end // { line: 1, column: 18, offset: 18 } + * color.source.end // { line: 1, column: 16, offset: 16 } + * ``` + */ + end?: Position + + /** + * The source file from where a node has originated. + */ + input: Input + + /** + * The inclusive starting position for the source + * code of a node. + */ + start?: Position + } + + /** + * Interface represents an interface for an object received + * as parameter by Node class constructor. + */ + export interface NodeProps { + source?: Source + } + + export interface NodeErrorOptions { + /** + * An ending index inside a node's string that should be highlighted as + * source of error. + */ + endIndex?: number + /** + * An index inside a node's string that should be highlighted as source + * of error. + */ + index?: number + /** + * Plugin name that created this error. PostCSS will set it automatically. + */ + plugin?: string + /** + * A word inside a node's string, that should be highlighted as source + * of error. + */ + word?: string + } + + // eslint-disable-next-line @typescript-eslint/no-shadow + class Node extends Node_ {} + export { Node as default } +} + +/** + * It represents an abstract class that handles common + * methods for other CSS abstract syntax tree nodes. + * + * Any node that represents CSS selector or value should + * not extend the `Node` class. + */ +declare abstract class Node_ { + /** + * It represents parent of the current node. + * + * ```js + * root.nodes[0].parent === root //=> true + * ``` + */ + parent: Container | Document | undefined + + /** + * It represents unnecessary whitespace and characters present + * in the css source code. + * + * Information to generate byte-to-byte equal node string as it was + * in the origin input. + * + * The properties of the raws object are decided by parser, + * the default parser uses the following properties: + * + * * `before`: the space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + * * `after`: the space symbols after the last child of the node + * to the end of the node. + * * `between`: the symbols between the property and value + * for declarations, selector and `{` for rules, or last parameter + * and `{` for at-rules. + * * `semicolon`: contains true if the last child has + * an (optional) semicolon. + * * `afterName`: the space between the at-rule name and its parameters. + * * `left`: the space symbols between `/*` and the commentโ€™s text. + * * `right`: the space symbols between the commentโ€™s text + * and */. + * - `important`: the content of the important statement, + * if it is not just `!important`. + * + * PostCSS filters out the comments inside selectors, declaration values + * and at-rule parameters but it stores the origin content in raws. + * + * ```js + * const root = postcss.parse('a {\n color:black\n}') + * root.first.first.raws //=> { before: '\n ', between: ':' } + * ``` + */ + raws: any + + /** + * It represents information related to origin of a node and is required + * for generating source maps. + * + * The nodes that are created manually using the public APIs + * provided by PostCSS will have `source` undefined and + * will be absent in the source map. + * + * For this reason, the plugin developer should consider + * duplicating nodes as the duplicate node will have the + * same source as the original node by default or assign + * source to a node created manually. + * + * ```js + * decl.source.input.from //=> '/home/ai/source.css' + * decl.source.start //=> { line: 10, column: 2 } + * decl.source.end //=> { line: 10, column: 12 } + * ``` + * + * ```js + * // Incorrect method, source not specified! + * const prefixed = postcss.decl({ + * prop: '-moz-' + decl.prop, + * value: decl.value + * }) + * + * // Correct method, source is inherited when duplicating. + * const prefixed = decl.clone({ + * prop: '-moz-' + decl.prop + * }) + * ``` + * + * ```js + * if (atrule.name === 'add-link') { + * const rule = postcss.rule({ + * selector: 'a', + * source: atrule.source + * }) + * + * atrule.parent.insertBefore(atrule, rule) + * } + * ``` + */ + source?: Node.Source + + /** + * It represents type of a node in + * an abstract syntax tree. + * + * A type of node helps in identification of a node + * and perform operation based on it's type. + * + * ```js + * const declaration = new Declaration({ + * prop: 'color', + * value: 'black' + * }) + * + * declaration.type //=> 'decl' + * ``` + */ + type: string + + constructor(defaults?: object) + + /** + * Insert new node after current node to current nodeโ€™s parent. + * + * Just alias for `node.parent.insertAfter(node, add)`. + * + * ```js + * decl.after('color: black') + * ``` + * + * @param newNode New node. + * @return This node for methods chain. + */ + after( + newNode: Node | Node.ChildProps | readonly Node[] | string | undefined + ): this + + /** + * It assigns properties to an existing node instance. + * + * ```js + * decl.assign({ prop: 'word-wrap', value: 'break-word' }) + * ``` + * + * @param overrides New properties to override the node. + * + * @return `this` for method chaining. + */ + assign(overrides: object): this + + /** + * Insert new node before current node to current nodeโ€™s parent. + * + * Just alias for `node.parent.insertBefore(node, add)`. + * + * ```js + * decl.before('content: ""') + * ``` + * + * @param newNode New node. + * @return This node for methods chain. + */ + before( + newNode: Node | Node.ChildProps | readonly Node[] | string | undefined + ): this + + /** + * Clear the code style properties for the node and its children. + * + * ```js + * node.raws.before //=> ' ' + * node.cleanRaws() + * node.raws.before //=> undefined + * ``` + * + * @param keepBetween Keep the `raws.between` symbols. + */ + cleanRaws(keepBetween?: boolean): void + + /** + * It creates clone of an existing node, which includes all the properties + * and their values, that includes `raws` but not `type`. + * + * ```js + * decl.raws.before //=> "\n " + * const cloned = decl.clone({ prop: '-moz-' + decl.prop }) + * cloned.raws.before //=> "\n " + * cloned.toString() //=> -moz-transform: scale(0) + * ``` + * + * @param overrides New properties to override in the clone. + * + * @return Duplicate of the node instance. + */ + clone(overrides?: object): this + + /** + * Shortcut to clone the node and insert the resulting cloned node + * after the current node. + * + * @param overrides New properties to override in the clone. + * @return New node. + */ + cloneAfter(overrides?: object): this + + /** + * Shortcut to clone the node and insert the resulting cloned node + * before the current node. + * + * ```js + * decl.cloneBefore({ prop: '-moz-' + decl.prop }) + * ``` + * + * @param overrides Mew properties to override in the clone. + * + * @return New node + */ + cloneBefore(overrides?: object): this + + /** + * It creates an instance of the class `CssSyntaxError` and parameters passed + * to this method are assigned to the error instance. + * + * The error instance will have description for the + * error, original position of the node in the + * source, showing line and column number. + * + * If any previous map is present, it would be used + * to get original position of the source. + * + * The Previous Map here is referred to the source map + * generated by previous compilation, example: Less, + * Stylus and Sass. + * + * This method returns the error instance instead of + * throwing it. + * + * ```js + * if (!variables[name]) { + * throw decl.error(`Unknown variable ${name}`, { word: name }) + * // CssSyntaxError: postcss-vars:a.sass:4:3: Unknown variable $black + * // color: $black + * // a + * // ^ + * // background: white + * } + * ``` + * + * @param message Description for the error instance. + * @param options Options for the error instance. + * + * @return Error instance is returned. + */ + error(message: string, options?: Node.NodeErrorOptions): CssSyntaxError + + /** + * Returns the next child of the nodeโ€™s parent. + * Returns `undefined` if the current node is the last child. + * + * ```js + * if (comment.text === 'delete next') { + * const next = comment.next() + * if (next) { + * next.remove() + * } + * } + * ``` + * + * @return Next node. + */ + next(): Node.ChildNode | undefined + + /** + * Get the position for a word or an index inside the node. + * + * @param opts Options. + * @return Position. + */ + positionBy(opts?: Pick): Node.Position + + /** + * Convert string index to line/column. + * + * @param index The symbol number in the nodeโ€™s string. + * @return Symbol position in file. + */ + positionInside(index: number): Node.Position + + /** + * Returns the previous child of the nodeโ€™s parent. + * Returns `undefined` if the current node is the first child. + * + * ```js + * const annotation = decl.prev() + * if (annotation.type === 'comment') { + * readAnnotation(annotation.text) + * } + * ``` + * + * @return Previous node. + */ + prev(): Node.ChildNode | undefined + + /** + * Get the range for a word or start and end index inside the node. + * The start index is inclusive; the end index is exclusive. + * + * @param opts Options. + * @return Range. + */ + rangeBy( + opts?: Pick + ): Node.Range + + /** + * Returns a `raws` value. If the node is missing + * the code style property (because the node was manually built or cloned), + * PostCSS will try to autodetect the code style property by looking + * at other nodes in the tree. + * + * ```js + * const root = postcss.parse('a { background: white }') + * root.nodes[0].append({ prop: 'color', value: 'black' }) + * root.nodes[0].nodes[1].raws.before //=> undefined + * root.nodes[0].nodes[1].raw('before') //=> ' ' + * ``` + * + * @param prop Name of code style property. + * @param defaultType Name of default value, it can be missed + * if the value is the same as prop. + * @return {string} Code style value. + */ + raw(prop: string, defaultType?: string): string + + /** + * It removes the node from its parent and deletes its parent property. + * + * ```js + * if (decl.prop.match(/^-webkit-/)) { + * decl.remove() + * } + * ``` + * + * @return `this` for method chaining. + */ + remove(): this + + /** + * Inserts node(s) before the current node and removes the current node. + * + * ```js + * AtRule: { + * mixin: atrule => { + * atrule.replaceWith(mixinRules[atrule.params]) + * } + * } + * ``` + * + * @param nodes Mode(s) to replace current one. + * @return Current node to methods chain. + */ + replaceWith(...nodes: NewChild[]): this + + /** + * Finds the Root instance of the nodeโ€™s tree. + * + * ```js + * root.nodes[0].nodes[0].root() === root + * ``` + * + * @return Root parent. + */ + root(): Root + + /** + * Fix circular links on `JSON.stringify()`. + * + * @return Cleaned object. + */ + toJSON(): object + + /** + * It compiles the node to browser readable cascading style sheets string + * depending on it's type. + * + * ```js + * new Rule({ selector: 'a' }).toString() //=> "a {}" + * ``` + * + * @param stringifier A syntax to use in string generation. + * @return CSS string of this node. + */ + toString(stringifier?: Stringifier | Syntax): string + + /** + * It is a wrapper for {@link Result#warn}, providing convenient + * way of generating warnings. + * + * ```js + * Declaration: { + * bad: (decl, { result }) => { + * decl.warn(result, 'Deprecated property: bad') + * } + * } + * ``` + * + * @param result The `Result` instance that will receive the warning. + * @param message Description for the warning. + * @param options Options for the warning. + * + * @return `Warning` instance is returned + */ + warn(result: Result, message: string, options?: WarningOptions): Warning + + /** + * If this node isn't already dirty, marks it and its ancestors as such. This + * indicates to the LazyResult processor that the {@link Root} has been + * modified by the current plugin and may need to be processed again by other + * plugins. + */ + protected markDirty(): void +} + +declare class Node extends Node_ {} + +export = Node diff --git a/node_modules/postcss/lib/node.js b/node_modules/postcss/lib/node.js new file mode 100644 index 0000000..b403b71 --- /dev/null +++ b/node_modules/postcss/lib/node.js @@ -0,0 +1,449 @@ +'use strict' + +let CssSyntaxError = require('./css-syntax-error') +let Stringifier = require('./stringifier') +let stringify = require('./stringify') +let { isClean, my } = require('./symbols') + +function cloneNode(obj, parent) { + let cloned = new obj.constructor() + + for (let i in obj) { + if (!Object.prototype.hasOwnProperty.call(obj, i)) { + /* c8 ignore next 2 */ + continue + } + if (i === 'proxyCache') continue + let value = obj[i] + let type = typeof value + + if (i === 'parent' && type === 'object') { + if (parent) cloned[i] = parent + } else if (i === 'source') { + cloned[i] = value + } else if (Array.isArray(value)) { + cloned[i] = value.map(j => cloneNode(j, cloned)) + } else { + if (type === 'object' && value !== null) value = cloneNode(value) + cloned[i] = value + } + } + + return cloned +} + +function sourceOffset(inputCSS, position) { + // Not all custom syntaxes support `offset` in `source.start` and `source.end` + if (position && typeof position.offset !== 'undefined') { + return position.offset + } + + let column = 1 + let line = 1 + let offset = 0 + + for (let i = 0; i < inputCSS.length; i++) { + if (line === position.line && column === position.column) { + offset = i + break + } + + if (inputCSS[i] === '\n') { + column = 1 + line += 1 + } else { + column += 1 + } + } + + return offset +} + +class Node { + get proxyOf() { + return this + } + + constructor(defaults = {}) { + this.raws = {} + this[isClean] = false + this[my] = true + + for (let name in defaults) { + if (name === 'nodes') { + this.nodes = [] + for (let node of defaults[name]) { + if (typeof node.clone === 'function') { + this.append(node.clone()) + } else { + this.append(node) + } + } + } else { + this[name] = defaults[name] + } + } + } + + addToError(error) { + error.postcssNode = this + if (error.stack && this.source && /\n\s{4}at /.test(error.stack)) { + let s = this.source + error.stack = error.stack.replace( + /\n\s{4}at /, + `$&${s.input.from}:${s.start.line}:${s.start.column}$&` + ) + } + return error + } + + after(add) { + this.parent.insertAfter(this, add) + return this + } + + assign(overrides = {}) { + for (let name in overrides) { + this[name] = overrides[name] + } + return this + } + + before(add) { + this.parent.insertBefore(this, add) + return this + } + + cleanRaws(keepBetween) { + delete this.raws.before + delete this.raws.after + if (!keepBetween) delete this.raws.between + } + + clone(overrides = {}) { + let cloned = cloneNode(this) + for (let name in overrides) { + cloned[name] = overrides[name] + } + return cloned + } + + cloneAfter(overrides = {}) { + let cloned = this.clone(overrides) + this.parent.insertAfter(this, cloned) + return cloned + } + + cloneBefore(overrides = {}) { + let cloned = this.clone(overrides) + this.parent.insertBefore(this, cloned) + return cloned + } + + error(message, opts = {}) { + if (this.source) { + let { end, start } = this.rangeBy(opts) + return this.source.input.error( + message, + { column: start.column, line: start.line }, + { column: end.column, line: end.line }, + opts + ) + } + return new CssSyntaxError(message) + } + + getProxyProcessor() { + return { + get(node, prop) { + if (prop === 'proxyOf') { + return node + } else if (prop === 'root') { + return () => node.root().toProxy() + } else { + return node[prop] + } + }, + + set(node, prop, value) { + if (node[prop] === value) return true + node[prop] = value + if ( + prop === 'prop' || + prop === 'value' || + prop === 'name' || + prop === 'params' || + prop === 'important' || + /* c8 ignore next */ + prop === 'text' + ) { + node.markDirty() + } + return true + } + } + } + + /* c8 ignore next 3 */ + markClean() { + this[isClean] = true + } + + markDirty() { + if (this[isClean]) { + this[isClean] = false + let next = this + while ((next = next.parent)) { + next[isClean] = false + } + } + } + + next() { + if (!this.parent) return undefined + let index = this.parent.index(this) + return this.parent.nodes[index + 1] + } + + positionBy(opts = {}) { + let pos = this.source.start + if (opts.index) { + pos = this.positionInside(opts.index) + } else if (opts.word) { + let inputString = + 'document' in this.source.input + ? this.source.input.document + : this.source.input.css + let stringRepresentation = inputString.slice( + sourceOffset(inputString, this.source.start), + sourceOffset(inputString, this.source.end) + ) + let index = stringRepresentation.indexOf(opts.word) + if (index !== -1) pos = this.positionInside(index) + } + return pos + } + + positionInside(index) { + let column = this.source.start.column + let line = this.source.start.line + let inputString = + 'document' in this.source.input + ? this.source.input.document + : this.source.input.css + let offset = sourceOffset(inputString, this.source.start) + let end = offset + index + + for (let i = offset; i < end; i++) { + if (inputString[i] === '\n') { + column = 1 + line += 1 + } else { + column += 1 + } + } + + return { column, line, offset: end } + } + + prev() { + if (!this.parent) return undefined + let index = this.parent.index(this) + return this.parent.nodes[index - 1] + } + + rangeBy(opts = {}) { + let inputString = + 'document' in this.source.input + ? this.source.input.document + : this.source.input.css + let start = { + column: this.source.start.column, + line: this.source.start.line, + offset: sourceOffset(inputString, this.source.start) + } + let end = this.source.end + ? { + column: this.source.end.column + 1, + line: this.source.end.line, + offset: + typeof this.source.end.offset === 'number' + ? // `source.end.offset` is exclusive, so we don't need to add 1 + this.source.end.offset + : // Since line/column in this.source.end is inclusive, + // the `sourceOffset(... , this.source.end)` returns an inclusive offset. + // So, we add 1 to convert it to exclusive. + sourceOffset(inputString, this.source.end) + 1 + } + : { + column: start.column + 1, + line: start.line, + offset: start.offset + 1 + } + + if (opts.word) { + let stringRepresentation = inputString.slice( + sourceOffset(inputString, this.source.start), + sourceOffset(inputString, this.source.end) + ) + let index = stringRepresentation.indexOf(opts.word) + if (index !== -1) { + start = this.positionInside(index) + end = this.positionInside(index + opts.word.length) + } + } else { + if (opts.start) { + start = { + column: opts.start.column, + line: opts.start.line, + offset: sourceOffset(inputString, opts.start) + } + } else if (opts.index) { + start = this.positionInside(opts.index) + } + + if (opts.end) { + end = { + column: opts.end.column, + line: opts.end.line, + offset: sourceOffset(inputString, opts.end) + } + } else if (typeof opts.endIndex === 'number') { + end = this.positionInside(opts.endIndex) + } else if (opts.index) { + end = this.positionInside(opts.index + 1) + } + } + + if ( + end.line < start.line || + (end.line === start.line && end.column <= start.column) + ) { + end = { + column: start.column + 1, + line: start.line, + offset: start.offset + 1 + } + } + + return { end, start } + } + + raw(prop, defaultType) { + let str = new Stringifier() + return str.raw(this, prop, defaultType) + } + + remove() { + if (this.parent) { + this.parent.removeChild(this) + } + this.parent = undefined + return this + } + + replaceWith(...nodes) { + if (this.parent) { + let bookmark = this + let foundSelf = false + for (let node of nodes) { + if (node === this) { + foundSelf = true + } else if (foundSelf) { + this.parent.insertAfter(bookmark, node) + bookmark = node + } else { + this.parent.insertBefore(bookmark, node) + } + } + + if (!foundSelf) { + this.remove() + } + } + + return this + } + + root() { + let result = this + while (result.parent && result.parent.type !== 'document') { + result = result.parent + } + return result + } + + toJSON(_, inputs) { + let fixed = {} + let emitInputs = inputs == null + inputs = inputs || new Map() + let inputsNextIndex = 0 + + for (let name in this) { + if (!Object.prototype.hasOwnProperty.call(this, name)) { + /* c8 ignore next 2 */ + continue + } + if (name === 'parent' || name === 'proxyCache') continue + let value = this[name] + + if (Array.isArray(value)) { + fixed[name] = value.map(i => { + if (typeof i === 'object' && i.toJSON) { + return i.toJSON(null, inputs) + } else { + return i + } + }) + } else if (typeof value === 'object' && value.toJSON) { + fixed[name] = value.toJSON(null, inputs) + } else if (name === 'source') { + if (value == null) continue + let inputId = inputs.get(value.input) + if (inputId == null) { + inputId = inputsNextIndex + inputs.set(value.input, inputsNextIndex) + inputsNextIndex++ + } + fixed[name] = { + end: value.end, + inputId, + start: value.start + } + } else { + fixed[name] = value + } + } + + if (emitInputs) { + fixed.inputs = [...inputs.keys()].map(input => input.toJSON()) + } + + return fixed + } + + toProxy() { + if (!this.proxyCache) { + this.proxyCache = new Proxy(this, this.getProxyProcessor()) + } + return this.proxyCache + } + + toString(stringifier = stringify) { + if (stringifier.stringify) stringifier = stringifier.stringify + let result = '' + stringifier(this, i => { + result += i + }) + return result + } + + warn(result, text, opts = {}) { + let data = { node: this } + for (let i in opts) data[i] = opts[i] + return result.warn(text, data) + } +} + +module.exports = Node +Node.default = Node diff --git a/node_modules/postcss/lib/parse.d.ts b/node_modules/postcss/lib/parse.d.ts new file mode 100644 index 0000000..4c943a4 --- /dev/null +++ b/node_modules/postcss/lib/parse.d.ts @@ -0,0 +1,9 @@ +import { Parser } from './postcss.js' + +interface Parse extends Parser { + default: Parse +} + +declare const parse: Parse + +export = parse diff --git a/node_modules/postcss/lib/parse.js b/node_modules/postcss/lib/parse.js new file mode 100644 index 0000000..00a1037 --- /dev/null +++ b/node_modules/postcss/lib/parse.js @@ -0,0 +1,42 @@ +'use strict' + +let Container = require('./container') +let Input = require('./input') +let Parser = require('./parser') + +function parse(css, opts) { + let input = new Input(css, opts) + let parser = new Parser(input) + try { + parser.parse() + } catch (e) { + if (process.env.NODE_ENV !== 'production') { + if (e.name === 'CssSyntaxError' && opts && opts.from) { + if (/\.scss$/i.test(opts.from)) { + e.message += + '\nYou tried to parse SCSS with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-scss parser' + } else if (/\.sass/i.test(opts.from)) { + e.message += + '\nYou tried to parse Sass with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-sass parser' + } else if (/\.less$/i.test(opts.from)) { + e.message += + '\nYou tried to parse Less with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-less parser' + } + } + } + throw e + } + + return parser.root +} + +module.exports = parse +parse.default = parse + +Container.registerParse(parse) diff --git a/node_modules/postcss/lib/parser.js b/node_modules/postcss/lib/parser.js new file mode 100644 index 0000000..64fb5d8 --- /dev/null +++ b/node_modules/postcss/lib/parser.js @@ -0,0 +1,611 @@ +'use strict' + +let AtRule = require('./at-rule') +let Comment = require('./comment') +let Declaration = require('./declaration') +let Root = require('./root') +let Rule = require('./rule') +let tokenizer = require('./tokenize') + +const SAFE_COMMENT_NEIGHBOR = { + empty: true, + space: true +} + +function findLastWithPosition(tokens) { + for (let i = tokens.length - 1; i >= 0; i--) { + let token = tokens[i] + let pos = token[3] || token[2] + if (pos) return pos + } +} + +class Parser { + constructor(input) { + this.input = input + + this.root = new Root() + this.current = this.root + this.spaces = '' + this.semicolon = false + + this.createTokenizer() + this.root.source = { input, start: { column: 1, line: 1, offset: 0 } } + } + + atrule(token) { + let node = new AtRule() + node.name = token[1].slice(1) + if (node.name === '') { + this.unnamedAtrule(node, token) + } + this.init(node, token[2]) + + let type + let prev + let shift + let last = false + let open = false + let params = [] + let brackets = [] + + while (!this.tokenizer.endOfFile()) { + token = this.tokenizer.nextToken() + type = token[0] + + if (type === '(' || type === '[') { + brackets.push(type === '(' ? ')' : ']') + } else if (type === '{' && brackets.length > 0) { + brackets.push('}') + } else if (type === brackets[brackets.length - 1]) { + brackets.pop() + } + + if (brackets.length === 0) { + if (type === ';') { + node.source.end = this.getPosition(token[2]) + node.source.end.offset++ + this.semicolon = true + break + } else if (type === '{') { + open = true + break + } else if (type === '}') { + if (params.length > 0) { + shift = params.length - 1 + prev = params[shift] + while (prev && prev[0] === 'space') { + prev = params[--shift] + } + if (prev) { + node.source.end = this.getPosition(prev[3] || prev[2]) + node.source.end.offset++ + } + } + this.end(token) + break + } else { + params.push(token) + } + } else { + params.push(token) + } + + if (this.tokenizer.endOfFile()) { + last = true + break + } + } + + node.raws.between = this.spacesAndCommentsFromEnd(params) + if (params.length) { + node.raws.afterName = this.spacesAndCommentsFromStart(params) + this.raw(node, 'params', params) + if (last) { + token = params[params.length - 1] + node.source.end = this.getPosition(token[3] || token[2]) + node.source.end.offset++ + this.spaces = node.raws.between + node.raws.between = '' + } + } else { + node.raws.afterName = '' + node.params = '' + } + + if (open) { + node.nodes = [] + this.current = node + } + } + + checkMissedSemicolon(tokens) { + let colon = this.colon(tokens) + if (colon === false) return + + let founded = 0 + let token + for (let j = colon - 1; j >= 0; j--) { + token = tokens[j] + if (token[0] !== 'space') { + founded += 1 + if (founded === 2) break + } + } + // If the token is a word, e.g. `!important`, `red` or any other valid property's value. + // Then we need to return the colon after that word token. [3] is the "end" colon of that word. + // And because we need it after that one we do +1 to get the next one. + throw this.input.error( + 'Missed semicolon', + token[0] === 'word' ? token[3] + 1 : token[2] + ) + } + + colon(tokens) { + let brackets = 0 + let prev, token, type + for (let [i, element] of tokens.entries()) { + token = element + type = token[0] + + if (type === '(') { + brackets += 1 + } + if (type === ')') { + brackets -= 1 + } + if (brackets === 0 && type === ':') { + if (!prev) { + this.doubleColon(token) + } else if (prev[0] === 'word' && prev[1] === 'progid') { + continue + } else { + return i + } + } + + prev = token + } + return false + } + + comment(token) { + let node = new Comment() + this.init(node, token[2]) + node.source.end = this.getPosition(token[3] || token[2]) + node.source.end.offset++ + + let text = token[1].slice(2, -2) + if (/^\s*$/.test(text)) { + node.text = '' + node.raws.left = text + node.raws.right = '' + } else { + let match = text.match(/^(\s*)([^]*\S)(\s*)$/) + node.text = match[2] + node.raws.left = match[1] + node.raws.right = match[3] + } + } + + createTokenizer() { + this.tokenizer = tokenizer(this.input) + } + + decl(tokens, customProperty) { + let node = new Declaration() + this.init(node, tokens[0][2]) + + let last = tokens[tokens.length - 1] + if (last[0] === ';') { + this.semicolon = true + tokens.pop() + } + + node.source.end = this.getPosition( + last[3] || last[2] || findLastWithPosition(tokens) + ) + node.source.end.offset++ + + while (tokens[0][0] !== 'word') { + if (tokens.length === 1) this.unknownWord(tokens) + node.raws.before += tokens.shift()[1] + } + node.source.start = this.getPosition(tokens[0][2]) + + node.prop = '' + while (tokens.length) { + let type = tokens[0][0] + if (type === ':' || type === 'space' || type === 'comment') { + break + } + node.prop += tokens.shift()[1] + } + + node.raws.between = '' + + let token + while (tokens.length) { + token = tokens.shift() + + if (token[0] === ':') { + node.raws.between += token[1] + break + } else { + if (token[0] === 'word' && /\w/.test(token[1])) { + this.unknownWord([token]) + } + node.raws.between += token[1] + } + } + + if (node.prop[0] === '_' || node.prop[0] === '*') { + node.raws.before += node.prop[0] + node.prop = node.prop.slice(1) + } + + let firstSpaces = [] + let next + while (tokens.length) { + next = tokens[0][0] + if (next !== 'space' && next !== 'comment') break + firstSpaces.push(tokens.shift()) + } + + this.precheckMissedSemicolon(tokens) + + for (let i = tokens.length - 1; i >= 0; i--) { + token = tokens[i] + if (token[1].toLowerCase() === '!important') { + node.important = true + let string = this.stringFrom(tokens, i) + string = this.spacesFromEnd(tokens) + string + if (string !== ' !important') node.raws.important = string + break + } else if (token[1].toLowerCase() === 'important') { + let cache = tokens.slice(0) + let str = '' + for (let j = i; j > 0; j--) { + let type = cache[j][0] + if (str.trim().startsWith('!') && type !== 'space') { + break + } + str = cache.pop()[1] + str + } + if (str.trim().startsWith('!')) { + node.important = true + node.raws.important = str + tokens = cache + } + } + + if (token[0] !== 'space' && token[0] !== 'comment') { + break + } + } + + let hasWord = tokens.some(i => i[0] !== 'space' && i[0] !== 'comment') + + if (hasWord) { + node.raws.between += firstSpaces.map(i => i[1]).join('') + firstSpaces = [] + } + this.raw(node, 'value', firstSpaces.concat(tokens), customProperty) + + if (node.value.includes(':') && !customProperty) { + this.checkMissedSemicolon(tokens) + } + } + + doubleColon(token) { + throw this.input.error( + 'Double colon', + { offset: token[2] }, + { offset: token[2] + token[1].length } + ) + } + + emptyRule(token) { + let node = new Rule() + this.init(node, token[2]) + node.selector = '' + node.raws.between = '' + this.current = node + } + + end(token) { + if (this.current.nodes && this.current.nodes.length) { + this.current.raws.semicolon = this.semicolon + } + this.semicolon = false + + this.current.raws.after = (this.current.raws.after || '') + this.spaces + this.spaces = '' + + if (this.current.parent) { + this.current.source.end = this.getPosition(token[2]) + this.current.source.end.offset++ + this.current = this.current.parent + } else { + this.unexpectedClose(token) + } + } + + endFile() { + if (this.current.parent) this.unclosedBlock() + if (this.current.nodes && this.current.nodes.length) { + this.current.raws.semicolon = this.semicolon + } + this.current.raws.after = (this.current.raws.after || '') + this.spaces + this.root.source.end = this.getPosition(this.tokenizer.position()) + } + + freeSemicolon(token) { + this.spaces += token[1] + if (this.current.nodes) { + let prev = this.current.nodes[this.current.nodes.length - 1] + if (prev && prev.type === 'rule' && !prev.raws.ownSemicolon) { + prev.raws.ownSemicolon = this.spaces + this.spaces = '' + prev.source.end = this.getPosition(token[2]) + prev.source.end.offset += prev.raws.ownSemicolon.length + } + } + } + + // Helpers + + getPosition(offset) { + let pos = this.input.fromOffset(offset) + return { + column: pos.col, + line: pos.line, + offset + } + } + + init(node, offset) { + this.current.push(node) + node.source = { + input: this.input, + start: this.getPosition(offset) + } + node.raws.before = this.spaces + this.spaces = '' + if (node.type !== 'comment') this.semicolon = false + } + + other(start) { + let end = false + let type = null + let colon = false + let bracket = null + let brackets = [] + let customProperty = start[1].startsWith('--') + + let tokens = [] + let token = start + while (token) { + type = token[0] + tokens.push(token) + + if (type === '(' || type === '[') { + if (!bracket) bracket = token + brackets.push(type === '(' ? ')' : ']') + } else if (customProperty && colon && type === '{') { + if (!bracket) bracket = token + brackets.push('}') + } else if (brackets.length === 0) { + if (type === ';') { + if (colon) { + this.decl(tokens, customProperty) + return + } else { + break + } + } else if (type === '{') { + this.rule(tokens) + return + } else if (type === '}') { + this.tokenizer.back(tokens.pop()) + end = true + break + } else if (type === ':') { + colon = true + } + } else if (type === brackets[brackets.length - 1]) { + brackets.pop() + if (brackets.length === 0) bracket = null + } + + token = this.tokenizer.nextToken() + } + + if (this.tokenizer.endOfFile()) end = true + if (brackets.length > 0) this.unclosedBracket(bracket) + + if (end && colon) { + if (!customProperty) { + while (tokens.length) { + token = tokens[tokens.length - 1][0] + if (token !== 'space' && token !== 'comment') break + this.tokenizer.back(tokens.pop()) + } + } + this.decl(tokens, customProperty) + } else { + this.unknownWord(tokens) + } + } + + parse() { + let token + while (!this.tokenizer.endOfFile()) { + token = this.tokenizer.nextToken() + + switch (token[0]) { + case 'space': + this.spaces += token[1] + break + + case ';': + this.freeSemicolon(token) + break + + case '}': + this.end(token) + break + + case 'comment': + this.comment(token) + break + + case 'at-word': + this.atrule(token) + break + + case '{': + this.emptyRule(token) + break + + default: + this.other(token) + break + } + } + this.endFile() + } + + precheckMissedSemicolon(/* tokens */) { + // Hook for Safe Parser + } + + raw(node, prop, tokens, customProperty) { + let token, type + let length = tokens.length + let value = '' + let clean = true + let next, prev + + for (let i = 0; i < length; i += 1) { + token = tokens[i] + type = token[0] + if (type === 'space' && i === length - 1 && !customProperty) { + clean = false + } else if (type === 'comment') { + prev = tokens[i - 1] ? tokens[i - 1][0] : 'empty' + next = tokens[i + 1] ? tokens[i + 1][0] : 'empty' + if (!SAFE_COMMENT_NEIGHBOR[prev] && !SAFE_COMMENT_NEIGHBOR[next]) { + if (value.slice(-1) === ',') { + clean = false + } else { + value += token[1] + } + } else { + clean = false + } + } else { + value += token[1] + } + } + if (!clean) { + let raw = tokens.reduce((all, i) => all + i[1], '') + node.raws[prop] = { raw, value } + } + node[prop] = value + } + + rule(tokens) { + tokens.pop() + + let node = new Rule() + this.init(node, tokens[0][2]) + + node.raws.between = this.spacesAndCommentsFromEnd(tokens) + this.raw(node, 'selector', tokens) + this.current = node + } + + spacesAndCommentsFromEnd(tokens) { + let lastTokenType + let spaces = '' + while (tokens.length) { + lastTokenType = tokens[tokens.length - 1][0] + if (lastTokenType !== 'space' && lastTokenType !== 'comment') break + spaces = tokens.pop()[1] + spaces + } + return spaces + } + + // Errors + + spacesAndCommentsFromStart(tokens) { + let next + let spaces = '' + while (tokens.length) { + next = tokens[0][0] + if (next !== 'space' && next !== 'comment') break + spaces += tokens.shift()[1] + } + return spaces + } + + spacesFromEnd(tokens) { + let lastTokenType + let spaces = '' + while (tokens.length) { + lastTokenType = tokens[tokens.length - 1][0] + if (lastTokenType !== 'space') break + spaces = tokens.pop()[1] + spaces + } + return spaces + } + + stringFrom(tokens, from) { + let result = '' + for (let i = from; i < tokens.length; i++) { + result += tokens[i][1] + } + tokens.splice(from, tokens.length - from) + return result + } + + unclosedBlock() { + let pos = this.current.source.start + throw this.input.error('Unclosed block', pos.line, pos.column) + } + + unclosedBracket(bracket) { + throw this.input.error( + 'Unclosed bracket', + { offset: bracket[2] }, + { offset: bracket[2] + 1 } + ) + } + + unexpectedClose(token) { + throw this.input.error( + 'Unexpected }', + { offset: token[2] }, + { offset: token[2] + 1 } + ) + } + + unknownWord(tokens) { + throw this.input.error( + 'Unknown word ' + tokens[0][1], + { offset: tokens[0][2] }, + { offset: tokens[0][2] + tokens[0][1].length } + ) + } + + unnamedAtrule(node, token) { + throw this.input.error( + 'At-rule without name', + { offset: token[2] }, + { offset: token[2] + token[1].length } + ) + } +} + +module.exports = Parser diff --git a/node_modules/postcss/lib/postcss.d.mts b/node_modules/postcss/lib/postcss.d.mts new file mode 100644 index 0000000..d343f3c --- /dev/null +++ b/node_modules/postcss/lib/postcss.d.mts @@ -0,0 +1,69 @@ +export { + // Type-only exports + AcceptedPlugin, + + AnyNode, + atRule, + AtRule, + AtRuleProps, + Builder, + ChildNode, + ChildProps, + comment, + Comment, + CommentProps, + Container, + ContainerProps, + CssSyntaxError, + decl, + Declaration, + DeclarationProps, + // postcss function / namespace + default, + document, + Document, + DocumentProps, + FilePosition, + fromJSON, + Helpers, + Input, + + JSONHydrator, + // This is a class, but itโ€™s not re-exported. Thatโ€™s why itโ€™s exported as type-only here. + type LazyResult, + list, + Message, + Node, + NodeErrorOptions, + NodeProps, + OldPlugin, + parse, + Parser, + // @ts-expect-error This value exists, but itโ€™s untyped. + plugin, + Plugin, + PluginCreator, + Position, + Postcss, + ProcessOptions, + Processor, + Result, + root, + Root, + RootProps, + rule, + Rule, + RuleProps, + Source, + SourceMap, + SourceMapOptions, + Stringifier, + // Value exports from postcss.mjs + stringify, + Syntax, + TransformCallback, + Transformer, + Warning, + + WarningOptions +} from './postcss.js' diff --git a/node_modules/postcss/lib/postcss.d.ts b/node_modules/postcss/lib/postcss.d.ts new file mode 100644 index 0000000..c5e3605 --- /dev/null +++ b/node_modules/postcss/lib/postcss.d.ts @@ -0,0 +1,458 @@ +import { RawSourceMap, SourceMapGenerator } from 'source-map-js' + +import AtRule, { AtRuleProps } from './at-rule.js' +import Comment, { CommentProps } from './comment.js' +import Container, { ContainerProps, NewChild } from './container.js' +import CssSyntaxError from './css-syntax-error.js' +import Declaration, { DeclarationProps } from './declaration.js' +import Document, { DocumentProps } from './document.js' +import Input, { FilePosition } from './input.js' +import LazyResult from './lazy-result.js' +import list from './list.js' +import Node, { + AnyNode, + ChildNode, + ChildProps, + NodeErrorOptions, + NodeProps, + Position, + Source +} from './node.js' +import Processor from './processor.js' +import Result, { Message } from './result.js' +import Root, { RootProps } from './root.js' +import Rule, { RuleProps } from './rule.js' +import Warning, { WarningOptions } from './warning.js' + +type DocumentProcessor = ( + document: Document, + helper: postcss.Helpers +) => Promise | void +type RootProcessor = ( + root: Root, + helper: postcss.Helpers +) => Promise | void +type DeclarationProcessor = ( + decl: Declaration, + helper: postcss.Helpers +) => Promise | void +type RuleProcessor = ( + rule: Rule, + helper: postcss.Helpers +) => Promise | void +type AtRuleProcessor = ( + atRule: AtRule, + helper: postcss.Helpers +) => Promise | void +type CommentProcessor = ( + comment: Comment, + helper: postcss.Helpers +) => Promise | void + +interface Processors { + /** + * Will be called on all`AtRule` nodes. + * + * Will be called again on node or children changes. + */ + AtRule?: { [name: string]: AtRuleProcessor } | AtRuleProcessor + + /** + * Will be called on all `AtRule` nodes, when all children will be processed. + * + * Will be called again on node or children changes. + */ + AtRuleExit?: { [name: string]: AtRuleProcessor } | AtRuleProcessor + + /** + * Will be called on all `Comment` nodes. + * + * Will be called again on node or children changes. + */ + Comment?: CommentProcessor + + /** + * Will be called on all `Comment` nodes after listeners + * for `Comment` event. + * + * Will be called again on node or children changes. + */ + CommentExit?: CommentProcessor + + /** + * Will be called on all `Declaration` nodes after listeners + * for `Declaration` event. + * + * Will be called again on node or children changes. + */ + Declaration?: { [prop: string]: DeclarationProcessor } | DeclarationProcessor + + /** + * Will be called on all `Declaration` nodes. + * + * Will be called again on node or children changes. + */ + DeclarationExit?: + | { [prop: string]: DeclarationProcessor } + | DeclarationProcessor + + /** + * Will be called on `Document` node. + * + * Will be called again on children changes. + */ + Document?: DocumentProcessor + + /** + * Will be called on `Document` node, when all children will be processed. + * + * Will be called again on children changes. + */ + DocumentExit?: DocumentProcessor + + /** + * Will be called on `Root` node once. + */ + Once?: RootProcessor + + /** + * Will be called on `Root` node once, when all children will be processed. + */ + OnceExit?: RootProcessor + + /** + * Will be called on `Root` node. + * + * Will be called again on children changes. + */ + Root?: RootProcessor + + /** + * Will be called on `Root` node, when all children will be processed. + * + * Will be called again on children changes. + */ + RootExit?: RootProcessor + + /** + * Will be called on all `Rule` nodes. + * + * Will be called again on node or children changes. + */ + Rule?: RuleProcessor + + /** + * Will be called on all `Rule` nodes, when all children will be processed. + * + * Will be called again on node or children changes. + */ + RuleExit?: RuleProcessor +} + +declare namespace postcss { + export { + AnyNode, + AtRule, + AtRuleProps, + ChildNode, + ChildProps, + Comment, + CommentProps, + Container, + ContainerProps, + CssSyntaxError, + Declaration, + DeclarationProps, + Document, + DocumentProps, + FilePosition, + Input, + LazyResult, + list, + Message, + NewChild, + Node, + NodeErrorOptions, + NodeProps, + Position, + Processor, + Result, + Root, + RootProps, + Rule, + RuleProps, + Source, + Warning, + WarningOptions + } + + export type SourceMap = { + toJSON(): RawSourceMap + } & SourceMapGenerator + + export type Helpers = { postcss: Postcss; result: Result } & Postcss + + export interface Plugin extends Processors { + postcssPlugin: string + prepare?: (result: Result) => Processors + } + + export interface PluginCreator { + (opts?: PluginOptions): Plugin | Processor + postcss: true + } + + export interface Transformer extends TransformCallback { + postcssPlugin: string + postcssVersion: string + } + + export interface TransformCallback { + (root: Root, result: Result): Promise | void + } + + export interface OldPlugin extends Transformer { + (opts?: T): Transformer + postcss: Transformer + } + + export type AcceptedPlugin = + | { + postcss: Processor | TransformCallback + } + | OldPlugin + | Plugin + | PluginCreator + | Processor + | TransformCallback + + export interface Parser { + ( + css: { toString(): string } | string, + opts?: Pick + ): RootNode + } + + export interface Builder { + (part: string, node?: AnyNode, type?: 'end' | 'start'): void + } + + export interface Stringifier { + (node: AnyNode, builder: Builder): void + } + + export interface JSONHydrator { + (data: object): Node + (data: object[]): Node[] + } + + export interface Syntax { + /** + * Function to generate AST by string. + */ + parse?: Parser + + /** + * Class to generate string by AST. + */ + stringify?: Stringifier + } + + export interface SourceMapOptions { + /** + * Use absolute path in generated source map. + */ + absolute?: boolean + + /** + * Indicates that PostCSS should add annotation comments to the CSS. + * By default, PostCSS will always add a comment with a path + * to the source map. PostCSS will not add annotations to CSS files + * that do not contain any comments. + * + * By default, PostCSS presumes that you want to save the source map as + * `opts.to + '.map'` and will use this path in the annotation comment. + * A different path can be set by providing a string value for annotation. + * + * If you have set `inline: true`, annotation cannot be disabled. + */ + annotation?: ((file: string, root: Root) => string) | boolean | string + + /** + * Override `from` in mapโ€™s sources. + */ + from?: string + + /** + * Indicates that the source map should be embedded in the output CSS + * as a Base64-encoded comment. By default, it is `true`. + * But if all previous maps are external, not inline, PostCSS will not embed + * the map even if you do not set this option. + * + * If you have an inline source map, the result.map property will be empty, + * as the source map will be contained within the text of `result.css`. + */ + inline?: boolean + + /** + * Source map content from a previous processing step (e.g., Sass). + * + * PostCSS will try to read the previous source map + * automatically (based on comments within the source CSS), but you can use + * this option to identify it manually. + * + * If desired, you can omit the previous map with prev: `false`. + */ + prev?: ((file: string) => string) | boolean | object | string + + /** + * Indicates that PostCSS should set the origin content (e.g., Sass source) + * of the source map. By default, it is true. But if all previous maps do not + * contain sources content, PostCSS will also leave it out even if you + * do not set this option. + */ + sourcesContent?: boolean + } + + export interface ProcessOptions { + /** + * Input file if it is not simple CSS file, but HTML with + + `; +} + +const ERR_LOAD_URL = "ERR_LOAD_URL"; +const ERR_LOAD_PUBLIC_URL = "ERR_LOAD_PUBLIC_URL"; +const ERR_DENIED_ID = "ERR_DENIED_ID"; +const debugLoad = createDebugger("vite:load"); +const debugTransform = createDebugger("vite:transform"); +const debugCache$1 = createDebugger("vite:cache"); +function transformRequest(environment, url, options = {}) { + if (!options.ssr) { + options = { ...options, ssr: environment.config.consumer === "server" }; + } + if (environment._closing && environment.config.dev.recoverable) + throwClosedServerError(); + const cacheKey = `${options.html ? "html:" : ""}${url}`; + const timestamp = Date.now(); + const pending = environment._pendingRequests.get(cacheKey); + if (pending) { + return environment.moduleGraph.getModuleByUrl(removeTimestampQuery(url)).then((module) => { + if (!module || pending.timestamp > module.lastInvalidationTimestamp) { + return pending.request; + } else { + pending.abort(); + return transformRequest(environment, url, options); + } + }); + } + const request = doTransform(environment, url, options, timestamp); + let cleared = false; + const clearCache = () => { + if (!cleared) { + environment._pendingRequests.delete(cacheKey); + cleared = true; + } + }; + environment._pendingRequests.set(cacheKey, { + request, + timestamp, + abort: clearCache + }); + return request.finally(clearCache); +} +async function doTransform(environment, url, options, timestamp) { + url = removeTimestampQuery(url); + const { pluginContainer } = environment; + let module = await environment.moduleGraph.getModuleByUrl(url); + if (module) { + const cached = await getCachedTransformResult( + environment, + url, + module, + timestamp + ); + if (cached) return cached; + } + const resolved = module ? void 0 : await pluginContainer.resolveId(url, void 0) ?? void 0; + const id = module?.id ?? resolved?.id ?? url; + module ??= environment.moduleGraph.getModuleById(id); + if (module) { + await environment.moduleGraph._ensureEntryFromUrl(url, void 0, resolved); + const cached = await getCachedTransformResult( + environment, + url, + module, + timestamp + ); + if (cached) return cached; + } + const result = loadAndTransform( + environment, + id, + url, + options, + timestamp, + module, + resolved + ); + const { depsOptimizer } = environment; + if (!depsOptimizer?.isOptimizedDepFile(id)) { + environment._registerRequestProcessing(id, () => result); + } + return result; +} +async function getCachedTransformResult(environment, url, module, timestamp) { + const prettyUrl = debugCache$1 ? prettifyUrl(url, environment.config.root) : ""; + const softInvalidatedTransformResult = await handleModuleSoftInvalidation( + environment, + module, + timestamp + ); + if (softInvalidatedTransformResult) { + debugCache$1?.(`[memory-hmr] ${prettyUrl}`); + return softInvalidatedTransformResult; + } + const cached = module.transformResult; + if (cached) { + debugCache$1?.(`[memory] ${prettyUrl}`); + return cached; + } +} +async function loadAndTransform(environment, id, url, options, timestamp, mod, resolved) { + const { config, pluginContainer, logger } = environment; + const prettyUrl = debugLoad || debugTransform ? prettifyUrl(url, config.root) : ""; + const moduleGraph = environment.moduleGraph; + if (options.allowId && !options.allowId(id)) { + const err = new Error(`Denied ID ${id}`); + err.code = ERR_DENIED_ID; + throw err; + } + let code = null; + let map = null; + const loadStart = debugLoad ? performance$1.now() : 0; + const loadResult = await pluginContainer.load(id); + if (loadResult == null) { + const file = cleanUrl(id); + if (options.html && !id.endsWith(".html")) { + return null; + } + if (environment.config.consumer === "server" || isFileLoadingAllowed(environment.getTopLevelConfig(), file)) { + try { + code = await fsp.readFile(file, "utf-8"); + debugLoad?.(`${timeFrom(loadStart)} [fs] ${prettyUrl}`); + } catch (e) { + if (e.code !== "ENOENT") { + if (e.code === "EISDIR") { + e.message = `${e.message} ${file}`; + } + throw e; + } + } + if (code != null && environment.pluginContainer.watcher) { + ensureWatchedFile( + environment.pluginContainer.watcher, + file, + config.root + ); + } + } + if (code) { + try { + const extracted = await extractSourcemapFromFile(code, file); + if (extracted) { + code = extracted.code; + map = extracted.map; + } + } catch (e) { + logger.warn(`Failed to load source map for ${file}. +${e}`, { + timestamp: true + }); + } + } + } else { + debugLoad?.(`${timeFrom(loadStart)} [plugin] ${prettyUrl}`); + if (isObject$1(loadResult)) { + code = loadResult.code; + map = loadResult.map; + } else { + code = loadResult; + } + } + if (code == null) { + const isPublicFile = checkPublicFile(url, environment.getTopLevelConfig()); + let publicDirName = path$b.relative(config.root, config.publicDir); + if (publicDirName[0] !== ".") publicDirName = "/" + publicDirName; + const msg = isPublicFile ? `This file is in ${publicDirName} and will be copied as-is during build without going through the plugin transforms, and therefore should not be imported from source code. It can only be referenced via HTML tags.` : `Does the file exist?`; + const importerMod = moduleGraph.idToModuleMap.get(id)?.importers.values().next().value; + const importer = importerMod?.file || importerMod?.url; + const err = new Error( + `Failed to load url ${url} (resolved id: ${id})${importer ? ` in ${importer}` : ""}. ${msg}` + ); + err.code = isPublicFile ? ERR_LOAD_PUBLIC_URL : ERR_LOAD_URL; + throw err; + } + if (environment._closing && environment.config.dev.recoverable) + throwClosedServerError(); + mod ??= await moduleGraph._ensureEntryFromUrl(url, void 0, resolved); + const transformStart = debugTransform ? performance$1.now() : 0; + const transformResult = await pluginContainer.transform(code, id, { + inMap: map + }); + const originalCode = code; + if (transformResult.code === originalCode) { + debugTransform?.( + timeFrom(transformStart) + colors$1.dim(` [skipped] ${prettyUrl}`) + ); + } else { + debugTransform?.(`${timeFrom(transformStart)} ${prettyUrl}`); + code = transformResult.code; + map = transformResult.map; + } + let normalizedMap; + if (typeof map === "string") { + normalizedMap = JSON.parse(map); + } else if (map) { + normalizedMap = map; + } else { + normalizedMap = null; + } + if (normalizedMap && "version" in normalizedMap && mod.file) { + if (normalizedMap.mappings) { + await injectSourcesContent(normalizedMap, mod.file, logger); + } + const sourcemapPath = `${mod.file}.map`; + applySourcemapIgnoreList( + normalizedMap, + sourcemapPath, + config.server.sourcemapIgnoreList, + logger + ); + if (path$b.isAbsolute(mod.file)) { + let modDirname; + for (let sourcesIndex = 0; sourcesIndex < normalizedMap.sources.length; ++sourcesIndex) { + const sourcePath = normalizedMap.sources[sourcesIndex]; + if (sourcePath) { + if (path$b.isAbsolute(sourcePath)) { + modDirname ??= path$b.dirname(mod.file); + normalizedMap.sources[sourcesIndex] = path$b.relative( + modDirname, + sourcePath + ); + } + } + } + } + } + if (environment._closing && environment.config.dev.recoverable) + throwClosedServerError(); + const topLevelConfig = environment.getTopLevelConfig(); + const result = environment.config.dev.moduleRunnerTransform ? await ssrTransform(code, normalizedMap, url, originalCode, { + json: { + stringify: topLevelConfig.json.stringify === true && topLevelConfig.json.namedExports !== true + } + }) : { + code, + map: normalizedMap, + etag: getEtag(code, { weak: true }) + }; + if (timestamp > mod.lastInvalidationTimestamp) + moduleGraph.updateModuleTransformResult(mod, result); + return result; +} +async function handleModuleSoftInvalidation(environment, mod, timestamp) { + const transformResult = mod.invalidationState; + mod.invalidationState = void 0; + if (!transformResult || transformResult === "HARD_INVALIDATED") return; + if (mod.transformResult) { + throw new Error( + `Internal server error: Soft-invalidated module "${mod.url}" should not have existing transform result` + ); + } + let result; + if (transformResult.ssr) { + result = transformResult; + } else { + await init; + const source = transformResult.code; + const s = new MagicString(source); + const [imports] = parse$d(source, mod.id || void 0); + for (const imp of imports) { + let rawUrl = source.slice(imp.s, imp.e); + if (rawUrl === "import.meta") continue; + const hasQuotes = rawUrl[0] === '"' || rawUrl[0] === "'"; + if (hasQuotes) { + rawUrl = rawUrl.slice(1, -1); + } + const urlWithoutTimestamp = removeTimestampQuery(rawUrl); + const hmrUrl = unwrapId$1( + stripBase( + removeImportQuery(urlWithoutTimestamp), + environment.config.base + ) + ); + for (const importedMod of mod.importedModules) { + if (importedMod.url !== hmrUrl) continue; + if (importedMod.lastHMRTimestamp > 0) { + const replacedUrl = injectQuery( + urlWithoutTimestamp, + `t=${importedMod.lastHMRTimestamp}` + ); + const start = hasQuotes ? imp.s + 1 : imp.s; + const end = hasQuotes ? imp.e - 1 : imp.e; + s.overwrite(start, end, replacedUrl); + } + if (imp.d === -1 && environment.config.dev.preTransformRequests) { + environment.warmupRequest(hmrUrl); + } + break; + } + } + const code = s.toString(); + result = { + ...transformResult, + code, + etag: getEtag(code, { weak: true }) + }; + } + if (timestamp > mod.lastInvalidationTimestamp) + environment.moduleGraph.updateModuleTransformResult(mod, result); + return result; +} + +const ALLOWED_META_NAME = [ + "msapplication-tileimage", + "msapplication-square70x70logo", + "msapplication-square150x150logo", + "msapplication-wide310x150logo", + "msapplication-square310x310logo", + "msapplication-config", + "twitter:image" +]; +const ALLOWED_META_PROPERTY = [ + "og:image", + "og:image:url", + "og:image:secure_url", + "og:audio", + "og:audio:secure_url", + "og:video", + "og:video:secure_url" +]; +const DEFAULT_HTML_ASSET_SOURCES = { + audio: { + srcAttributes: ["src"] + }, + embed: { + srcAttributes: ["src"] + }, + img: { + srcAttributes: ["src"], + srcsetAttributes: ["srcset"] + }, + image: { + srcAttributes: ["href", "xlink:href"] + }, + input: { + srcAttributes: ["src"] + }, + link: { + srcAttributes: ["href"], + srcsetAttributes: ["imagesrcset"] + }, + object: { + srcAttributes: ["data"] + }, + source: { + srcAttributes: ["src"], + srcsetAttributes: ["srcset"] + }, + track: { + srcAttributes: ["src"] + }, + use: { + srcAttributes: ["href", "xlink:href"] + }, + video: { + srcAttributes: ["src", "poster"] + }, + meta: { + srcAttributes: ["content"], + filter({ attributes }) { + if (attributes.name && ALLOWED_META_NAME.includes(attributes.name.trim().toLowerCase())) { + return true; + } + if (attributes.property && ALLOWED_META_PROPERTY.includes(attributes.property.trim().toLowerCase())) { + return true; + } + return false; + } + } +}; +function getNodeAssetAttributes(node) { + const matched = DEFAULT_HTML_ASSET_SOURCES[node.nodeName]; + if (!matched) return []; + const attributes = {}; + for (const attr of node.attrs) { + attributes[getAttrKey(attr)] = attr.value; + } + if ("vite-ignore" in attributes) { + return [ + { + type: "remove", + key: "vite-ignore", + value: "", + attributes, + location: node.sourceCodeLocation.attrs["vite-ignore"] + } + ]; + } + const actions = []; + function handleAttributeKey(key, type) { + const value = attributes[key]; + if (!value) return; + if (matched.filter && !matched.filter({ key, value, attributes })) return; + const location = node.sourceCodeLocation.attrs[key]; + actions.push({ type, key, value, attributes, location }); + } + matched.srcAttributes?.forEach((key) => handleAttributeKey(key, "src")); + matched.srcsetAttributes?.forEach((key) => handleAttributeKey(key, "srcset")); + return actions; +} +function getAttrKey(attr) { + return attr.prefix === void 0 ? attr.name : `${attr.prefix}:${attr.name}`; +} + +const modulePreloadPolyfillId = "vite/modulepreload-polyfill"; +const resolvedModulePreloadPolyfillId = "\0" + modulePreloadPolyfillId + ".js"; +function modulePreloadPolyfillPlugin(config) { + let polyfillString; + return { + name: "vite:modulepreload-polyfill", + resolveId: { + handler(id) { + if (id === modulePreloadPolyfillId) { + return resolvedModulePreloadPolyfillId; + } + } + }, + load: { + handler(id) { + if (id === resolvedModulePreloadPolyfillId) { + if (config.command !== "build" || this.environment.config.consumer !== "client") { + return ""; + } + if (!polyfillString) { + polyfillString = `${isModernFlag}&&(${polyfill.toString()}());`; + } + return { code: polyfillString, moduleSideEffects: true }; + } + } + } + }; +} +function polyfill() { + const relList = document.createElement("link").relList; + if (relList && relList.supports && relList.supports("modulepreload")) { + return; + } + for (const link of document.querySelectorAll('link[rel="modulepreload"]')) { + processPreload(link); + } + new MutationObserver((mutations) => { + for (const mutation of mutations) { + if (mutation.type !== "childList") { + continue; + } + for (const node of mutation.addedNodes) { + if (node.tagName === "LINK" && node.rel === "modulepreload") + processPreload(node); + } + } + }).observe(document, { childList: true, subtree: true }); + function getFetchOpts(link) { + const fetchOpts = {}; + if (link.integrity) fetchOpts.integrity = link.integrity; + if (link.referrerPolicy) fetchOpts.referrerPolicy = link.referrerPolicy; + if (link.crossOrigin === "use-credentials") + fetchOpts.credentials = "include"; + else if (link.crossOrigin === "anonymous") fetchOpts.credentials = "omit"; + else fetchOpts.credentials = "same-origin"; + return fetchOpts; + } + function processPreload(link) { + if (link.ep) + return; + link.ep = true; + const fetchOpts = getFetchOpts(link); + fetch(link.href, fetchOpts); + } +} + +const htmlProxyRE$1 = /[?&]html-proxy=?(?:&inline-css)?(?:&style-attr)?&index=(\d+)\.(?:js|css)$/; +const isHtmlProxyRE = /[?&]html-proxy\b/; +const inlineCSSRE$1 = /__VITE_INLINE_CSS__([a-z\d]{8}_\d+)__/g; +const inlineImportRE = /(?]*type\s*=\s*(?:"importmap"|'importmap'|importmap)[^>]*>.*?<\/script>/is; +const moduleScriptRE = /[ \t]*]*type\s*=\s*(?:"module"|'module'|module)[^>]*>/i; +const modulePreloadLinkRE = /[ \t]*]*rel\s*=\s*(?:"modulepreload"|'modulepreload'|modulepreload)[\s\S]*?\/>/i; +const importMapAppendRE = new RegExp( + [moduleScriptRE, modulePreloadLinkRE].map((r) => r.source).join("|"), + "i" +); +const isHTMLProxy = (id) => isHtmlProxyRE.test(id); +const isHTMLRequest = (request) => htmlLangRE.test(request); +const htmlProxyMap = /* @__PURE__ */ new WeakMap(); +const htmlProxyResult = /* @__PURE__ */ new Map(); +function htmlInlineProxyPlugin(config) { + htmlProxyMap.set(config, /* @__PURE__ */ new Map()); + return { + name: "vite:html-inline-proxy", + resolveId: { + handler(id) { + if (isHTMLProxy(id)) { + return id; + } + } + }, + load: { + handler(id) { + const proxyMatch = htmlProxyRE$1.exec(id); + if (proxyMatch) { + const index = Number(proxyMatch[1]); + const file = cleanUrl(id); + const url = file.replace(normalizePath$3(config.root), ""); + const result = htmlProxyMap.get(config).get(url)?.[index]; + if (result) { + return { ...result, moduleSideEffects: true }; + } else { + throw new Error(`No matching HTML proxy module found from ${id}`); + } + } + } + } + }; +} +function addToHTMLProxyCache(config, filePath, index, result) { + if (!htmlProxyMap.get(config)) { + htmlProxyMap.set(config, /* @__PURE__ */ new Map()); + } + if (!htmlProxyMap.get(config).get(filePath)) { + htmlProxyMap.get(config).set(filePath, []); + } + htmlProxyMap.get(config).get(filePath)[index] = result; +} +function addToHTMLProxyTransformResult(hash, code) { + htmlProxyResult.set(hash, code); +} +const noInlineLinkRels = /* @__PURE__ */ new Set([ + "icon", + "apple-touch-icon", + "apple-touch-startup-image", + "manifest" +]); +const isAsyncScriptMap = /* @__PURE__ */ new WeakMap(); +function nodeIsElement(node) { + return node.nodeName[0] !== "#"; +} +function traverseNodes(node, visitor) { + if (node.nodeName === "template") { + node = node.content; + } + visitor(node); + if (nodeIsElement(node) || node.nodeName === "#document" || node.nodeName === "#document-fragment") { + node.childNodes.forEach((childNode) => traverseNodes(childNode, visitor)); + } +} +async function traverseHtml(html, filePath, visitor) { + const { parse } = await import('./dep-CvfTChi5.js'); + const ast = parse(html, { + scriptingEnabled: false, + // parse inside