From be1ad3a2a372670d65e3679bcbe5d6f8ba83b31d Mon Sep 17 00:00:00 2001 From: leca Date: Fri, 19 Jul 2024 21:05:41 +0300 Subject: [PATCH] first commit --- node_modules/.bin/nodemon | 1 + node_modules/.bin/nodetouch | 1 + node_modules/.bin/semver | 1 + node_modules/.package-lock.json | 440 +++++++ node_modules/anymatch/LICENSE | 15 + node_modules/anymatch/README.md | 87 ++ node_modules/anymatch/index.d.ts | 20 + node_modules/anymatch/index.js | 104 ++ node_modules/anymatch/package.json | 48 + .../balanced-match/.github/FUNDING.yml | 2 + node_modules/balanced-match/LICENSE.md | 21 + node_modules/balanced-match/README.md | 97 ++ node_modules/balanced-match/index.js | 62 + node_modules/balanced-match/package.json | 48 + .../binary-extensions/binary-extensions.json | 263 ++++ .../binary-extensions.json.d.ts | 3 + node_modules/binary-extensions/index.d.ts | 14 + node_modules/binary-extensions/index.js | 1 + node_modules/binary-extensions/license | 10 + node_modules/binary-extensions/package.json | 40 + node_modules/binary-extensions/readme.md | 25 + node_modules/brace-expansion/LICENSE | 21 + node_modules/brace-expansion/README.md | 129 ++ node_modules/brace-expansion/index.js | 201 +++ node_modules/brace-expansion/package.json | 47 + node_modules/braces/LICENSE | 21 + node_modules/braces/README.md | 586 +++++++++ node_modules/braces/index.js | 170 +++ node_modules/braces/lib/compile.js | 60 + node_modules/braces/lib/constants.js | 57 + node_modules/braces/lib/expand.js | 113 ++ node_modules/braces/lib/parse.js | 331 +++++ node_modules/braces/lib/stringify.js | 32 + node_modules/braces/lib/utils.js | 122 ++ node_modules/braces/package.json | 77 ++ node_modules/chokidar/LICENSE | 21 + node_modules/chokidar/README.md | 308 +++++ node_modules/chokidar/index.js | 973 +++++++++++++++ node_modules/chokidar/lib/constants.js | 66 + node_modules/chokidar/lib/fsevents-handler.js | 526 ++++++++ node_modules/chokidar/lib/nodefs-handler.js | 654 ++++++++++ node_modules/chokidar/package.json | 70 ++ node_modules/chokidar/types/index.d.ts | 192 +++ node_modules/concat-map/.travis.yml | 4 + node_modules/concat-map/LICENSE | 18 + node_modules/concat-map/README.markdown | 62 + node_modules/concat-map/example/map.js | 6 + node_modules/concat-map/index.js | 13 + node_modules/concat-map/package.json | 43 + node_modules/concat-map/test/map.js | 39 + node_modules/debug/LICENSE | 20 + node_modules/debug/README.md | 481 ++++++++ node_modules/debug/package.json | 60 + node_modules/debug/src/browser.js | 269 ++++ node_modules/debug/src/common.js | 274 +++++ node_modules/debug/src/index.js | 10 + node_modules/debug/src/node.js | 263 ++++ node_modules/fill-range/LICENSE | 21 + node_modules/fill-range/README.md | 237 ++++ node_modules/fill-range/index.js | 248 ++++ node_modules/fill-range/package.json | 74 ++ node_modules/glob-parent/CHANGELOG.md | 110 ++ node_modules/glob-parent/LICENSE | 15 + node_modules/glob-parent/README.md | 137 +++ node_modules/glob-parent/index.js | 42 + node_modules/glob-parent/package.json | 48 + node_modules/has-flag/index.js | 8 + node_modules/has-flag/license | 9 + node_modules/has-flag/package.json | 44 + node_modules/has-flag/readme.md | 70 ++ node_modules/ignore-by-default/LICENSE | 14 + node_modules/ignore-by-default/README.md | 26 + node_modules/ignore-by-default/index.js | 12 + node_modules/ignore-by-default/package.json | 34 + node_modules/is-binary-path/index.d.ts | 17 + node_modules/is-binary-path/index.js | 7 + node_modules/is-binary-path/license | 9 + node_modules/is-binary-path/package.json | 40 + node_modules/is-binary-path/readme.md | 34 + node_modules/is-extglob/LICENSE | 21 + node_modules/is-extglob/README.md | 107 ++ node_modules/is-extglob/index.js | 20 + node_modules/is-extglob/package.json | 69 ++ node_modules/is-glob/LICENSE | 21 + node_modules/is-glob/README.md | 206 ++++ node_modules/is-glob/index.js | 150 +++ node_modules/is-glob/package.json | 81 ++ node_modules/is-number/LICENSE | 21 + node_modules/is-number/README.md | 187 +++ node_modules/is-number/index.js | 18 + node_modules/is-number/package.json | 82 ++ node_modules/minimatch/LICENSE | 15 + node_modules/minimatch/README.md | 230 ++++ node_modules/minimatch/minimatch.js | 947 ++++++++++++++ node_modules/minimatch/package.json | 33 + node_modules/ms/index.js | 162 +++ node_modules/ms/license.md | 21 + node_modules/ms/package.json | 37 + node_modules/ms/readme.md | 60 + node_modules/nodemon/.prettierrc.json | 3 + node_modules/nodemon/LICENSE | 21 + node_modules/nodemon/README.md | 452 +++++++ node_modules/nodemon/bin/nodemon.js | 16 + node_modules/nodemon/bin/windows-kill.exe | Bin 0 -> 80384 bytes node_modules/nodemon/doc/cli/authors.txt | 8 + node_modules/nodemon/doc/cli/config.txt | 44 + node_modules/nodemon/doc/cli/help.txt | 29 + node_modules/nodemon/doc/cli/logo.txt | 20 + node_modules/nodemon/doc/cli/options.txt | 36 + node_modules/nodemon/doc/cli/topics.txt | 8 + node_modules/nodemon/doc/cli/usage.txt | 3 + node_modules/nodemon/doc/cli/whoami.txt | 9 + node_modules/nodemon/index.d.ts | 141 +++ node_modules/nodemon/jsconfig.json | 7 + node_modules/nodemon/lib/cli/index.js | 49 + node_modules/nodemon/lib/cli/parse.js | 230 ++++ node_modules/nodemon/lib/config/command.js | 43 + node_modules/nodemon/lib/config/defaults.js | 34 + node_modules/nodemon/lib/config/exec.js | 234 ++++ node_modules/nodemon/lib/config/index.js | 93 ++ node_modules/nodemon/lib/config/load.js | 223 ++++ node_modules/nodemon/lib/help/index.js | 27 + node_modules/nodemon/lib/index.js | 1 + node_modules/nodemon/lib/monitor/index.js | 4 + node_modules/nodemon/lib/monitor/match.js | 276 +++++ node_modules/nodemon/lib/monitor/run.js | 555 +++++++++ node_modules/nodemon/lib/monitor/signals.js | 34 + node_modules/nodemon/lib/monitor/watch.js | 244 ++++ node_modules/nodemon/lib/nodemon.js | 315 +++++ node_modules/nodemon/lib/rules/add.js | 89 ++ node_modules/nodemon/lib/rules/index.js | 53 + node_modules/nodemon/lib/rules/parse.js | 43 + node_modules/nodemon/lib/spawn.js | 74 ++ node_modules/nodemon/lib/utils/bus.js | 44 + node_modules/nodemon/lib/utils/clone.js | 40 + node_modules/nodemon/lib/utils/colour.js | 26 + node_modules/nodemon/lib/utils/index.js | 103 ++ node_modules/nodemon/lib/utils/log.js | 82 ++ node_modules/nodemon/lib/utils/merge.js | 47 + node_modules/nodemon/lib/version.js | 100 ++ node_modules/nodemon/package.json | 75 ++ node_modules/normalize-path/LICENSE | 21 + node_modules/normalize-path/README.md | 127 ++ node_modules/normalize-path/index.js | 35 + node_modules/normalize-path/package.json | 77 ++ node_modules/pg-cloudflare/LICENSE | 21 + node_modules/pg-cloudflare/README.md | 33 + node_modules/pg-cloudflare/dist/empty.d.ts | 2 + node_modules/pg-cloudflare/dist/empty.js | 4 + node_modules/pg-cloudflare/dist/empty.js.map | 1 + node_modules/pg-cloudflare/dist/index.d.ts | 31 + node_modules/pg-cloudflare/dist/index.js | 146 +++ node_modules/pg-cloudflare/dist/index.js.map | 1 + node_modules/pg-cloudflare/package.json | 32 + node_modules/pg-cloudflare/src/empty.ts | 3 + node_modules/pg-cloudflare/src/index.ts | 164 +++ node_modules/pg-cloudflare/src/types.d.ts | 25 + node_modules/pg-connection-string/LICENSE | 21 + node_modules/pg-connection-string/README.md | 77 ++ node_modules/pg-connection-string/index.d.ts | 15 + node_modules/pg-connection-string/index.js | 112 ++ .../pg-connection-string/package.json | 40 + node_modules/pg-int8/LICENSE | 13 + node_modules/pg-int8/README.md | 16 + node_modules/pg-int8/index.js | 100 ++ node_modules/pg-int8/package.json | 24 + node_modules/pg-pool/LICENSE | 21 + node_modules/pg-pool/README.md | 376 ++++++ node_modules/pg-pool/index.js | 467 +++++++ node_modules/pg-pool/package.json | 41 + .../pg-pool/test/bring-your-own-promise.js | 42 + .../pg-pool/test/connection-strings.js | 29 + .../pg-pool/test/connection-timeout.js | 229 ++++ node_modules/pg-pool/test/ending.js | 40 + node_modules/pg-pool/test/error-handling.js | 260 ++++ node_modules/pg-pool/test/events.js | 124 ++ .../pg-pool/test/idle-timeout-exit.js | 20 + node_modules/pg-pool/test/idle-timeout.js | 118 ++ node_modules/pg-pool/test/index.js | 226 ++++ node_modules/pg-pool/test/lifetime-timeout.js | 48 + node_modules/pg-pool/test/logging.js | 20 + node_modules/pg-pool/test/max-uses.js | 98 ++ .../pg-pool/test/releasing-clients.js | 54 + node_modules/pg-pool/test/setup.js | 10 + node_modules/pg-pool/test/sizing.js | 58 + node_modules/pg-pool/test/submittable.js | 19 + node_modules/pg-pool/test/timeout.js | 0 node_modules/pg-pool/test/verify.js | 24 + node_modules/pg-protocol/LICENSE | 21 + node_modules/pg-protocol/README.md | 3 + node_modules/pg-protocol/dist/b.d.ts | 1 + node_modules/pg-protocol/dist/b.js | 25 + node_modules/pg-protocol/dist/b.js.map | 1 + .../pg-protocol/dist/buffer-reader.d.ts | 14 + .../pg-protocol/dist/buffer-reader.js | 50 + .../pg-protocol/dist/buffer-reader.js.map | 1 + .../pg-protocol/dist/buffer-writer.d.ts | 16 + .../pg-protocol/dist/buffer-writer.js | 81 ++ .../pg-protocol/dist/buffer-writer.js.map | 1 + .../pg-protocol/dist/inbound-parser.test.d.ts | 1 + .../pg-protocol/dist/inbound-parser.test.js | 511 ++++++++ .../dist/inbound-parser.test.js.map | 1 + node_modules/pg-protocol/dist/index.d.ts | 6 + node_modules/pg-protocol/dist/index.js | 15 + node_modules/pg-protocol/dist/index.js.map | 1 + node_modules/pg-protocol/dist/messages.d.ts | 162 +++ node_modules/pg-protocol/dist/messages.js | 160 +++ node_modules/pg-protocol/dist/messages.js.map | 1 + .../dist/outbound-serializer.test.d.ts | 1 + .../dist/outbound-serializer.test.js | 248 ++++ .../dist/outbound-serializer.test.js.map | 1 + node_modules/pg-protocol/dist/parser.d.ts | 39 + node_modules/pg-protocol/dist/parser.js | 304 +++++ node_modules/pg-protocol/dist/parser.js.map | 1 + node_modules/pg-protocol/dist/serializer.d.ts | 42 + node_modules/pg-protocol/dist/serializer.js | 189 +++ .../pg-protocol/dist/serializer.js.map | 1 + node_modules/pg-protocol/package.json | 35 + node_modules/pg-protocol/src/b.ts | 28 + node_modules/pg-protocol/src/buffer-reader.ts | 53 + node_modules/pg-protocol/src/buffer-writer.ts | 85 ++ .../pg-protocol/src/inbound-parser.test.ts | 557 +++++++++ node_modules/pg-protocol/src/index.ts | 11 + node_modules/pg-protocol/src/messages.ts | 262 ++++ .../src/outbound-serializer.test.ts | 272 ++++ node_modules/pg-protocol/src/parser.ts | 389 ++++++ node_modules/pg-protocol/src/serializer.ts | 274 +++++ .../pg-protocol/src/testing/buffer-list.ts | 75 ++ .../pg-protocol/src/testing/test-buffers.ts | 166 +++ .../pg-protocol/src/types/chunky.d.ts | 1 + node_modules/pg-types/.travis.yml | 7 + node_modules/pg-types/Makefile | 14 + node_modules/pg-types/README.md | 75 ++ node_modules/pg-types/index.d.ts | 137 +++ node_modules/pg-types/index.js | 47 + node_modules/pg-types/index.test-d.ts | 21 + node_modules/pg-types/lib/arrayParser.js | 11 + node_modules/pg-types/lib/binaryParsers.js | 257 ++++ node_modules/pg-types/lib/builtins.js | 73 ++ node_modules/pg-types/lib/textParsers.js | 215 ++++ node_modules/pg-types/package.json | 42 + node_modules/pg-types/test/index.js | 24 + node_modules/pg-types/test/types.js | 597 +++++++++ node_modules/pg/LICENSE | 21 + node_modules/pg/README.md | 89 ++ node_modules/pg/lib/client.js | 631 ++++++++++ node_modules/pg/lib/connection-parameters.js | 167 +++ node_modules/pg/lib/connection.js | 223 ++++ node_modules/pg/lib/crypto/sasl.js | 186 +++ node_modules/pg/lib/crypto/utils-legacy.js | 37 + node_modules/pg/lib/crypto/utils-webcrypto.js | 83 ++ node_modules/pg/lib/crypto/utils.js | 9 + node_modules/pg/lib/defaults.js | 84 ++ node_modules/pg/lib/index.js | 58 + node_modules/pg/lib/native/client.js | 307 +++++ node_modules/pg/lib/native/index.js | 2 + node_modules/pg/lib/native/query.js | 168 +++ node_modules/pg/lib/query.js | 239 ++++ node_modules/pg/lib/result.js | 107 ++ node_modules/pg/lib/stream.js | 28 + node_modules/pg/lib/type-overrides.js | 35 + node_modules/pg/lib/utils.js | 213 ++++ node_modules/pg/package.json | 62 + node_modules/pgpass/README.md | 74 ++ node_modules/pgpass/lib/helper.js | 233 ++++ node_modules/pgpass/lib/index.js | 23 + node_modules/pgpass/package.json | 41 + node_modules/picomatch/CHANGELOG.md | 136 ++ node_modules/picomatch/LICENSE | 21 + node_modules/picomatch/README.md | 708 +++++++++++ node_modules/picomatch/index.js | 3 + node_modules/picomatch/lib/constants.js | 179 +++ node_modules/picomatch/lib/parse.js | 1091 +++++++++++++++++ node_modules/picomatch/lib/picomatch.js | 342 ++++++ node_modules/picomatch/lib/scan.js | 391 ++++++ node_modules/picomatch/lib/utils.js | 64 + node_modules/picomatch/package.json | 81 ++ node_modules/postgres-array/index.d.ts | 4 + node_modules/postgres-array/index.js | 97 ++ node_modules/postgres-array/license | 21 + node_modules/postgres-array/package.json | 35 + node_modules/postgres-array/readme.md | 43 + node_modules/postgres-bytea/index.js | 31 + node_modules/postgres-bytea/license | 21 + node_modules/postgres-bytea/package.json | 34 + node_modules/postgres-bytea/readme.md | 34 + node_modules/postgres-date/index.js | 116 ++ node_modules/postgres-date/license | 21 + node_modules/postgres-date/package.json | 33 + node_modules/postgres-date/readme.md | 49 + node_modules/postgres-interval/index.d.ts | 20 + node_modules/postgres-interval/index.js | 125 ++ node_modules/postgres-interval/license | 21 + node_modules/postgres-interval/package.json | 36 + node_modules/postgres-interval/readme.md | 48 + node_modules/pstree.remy/.travis.yml | 8 + node_modules/pstree.remy/LICENSE | 7 + node_modules/pstree.remy/README.md | 26 + node_modules/pstree.remy/lib/index.js | 37 + node_modules/pstree.remy/lib/tree.js | 37 + node_modules/pstree.remy/lib/utils.js | 53 + node_modules/pstree.remy/package.json | 33 + .../pstree.remy/tests/fixtures/index.js | 13 + node_modules/pstree.remy/tests/fixtures/out1 | 10 + node_modules/pstree.remy/tests/fixtures/out2 | 29 + node_modules/pstree.remy/tests/index.test.js | 51 + node_modules/readdirp/LICENSE | 21 + node_modules/readdirp/README.md | 122 ++ node_modules/readdirp/index.d.ts | 43 + node_modules/readdirp/index.js | 287 +++++ node_modules/readdirp/package.json | 122 ++ node_modules/semver/LICENSE | 15 + node_modules/semver/README.md | 654 ++++++++++ node_modules/semver/bin/semver.js | 188 +++ node_modules/semver/classes/comparator.js | 141 +++ node_modules/semver/classes/index.js | 5 + node_modules/semver/classes/range.js | 554 +++++++++ node_modules/semver/classes/semver.js | 302 +++++ node_modules/semver/functions/clean.js | 6 + node_modules/semver/functions/cmp.js | 52 + node_modules/semver/functions/coerce.js | 60 + .../semver/functions/compare-build.js | 7 + .../semver/functions/compare-loose.js | 3 + node_modules/semver/functions/compare.js | 5 + node_modules/semver/functions/diff.js | 65 + node_modules/semver/functions/eq.js | 3 + node_modules/semver/functions/gt.js | 3 + node_modules/semver/functions/gte.js | 3 + node_modules/semver/functions/inc.js | 19 + node_modules/semver/functions/lt.js | 3 + node_modules/semver/functions/lte.js | 3 + node_modules/semver/functions/major.js | 3 + node_modules/semver/functions/minor.js | 3 + node_modules/semver/functions/neq.js | 3 + node_modules/semver/functions/parse.js | 16 + node_modules/semver/functions/patch.js | 3 + node_modules/semver/functions/prerelease.js | 6 + node_modules/semver/functions/rcompare.js | 3 + node_modules/semver/functions/rsort.js | 3 + node_modules/semver/functions/satisfies.js | 10 + node_modules/semver/functions/sort.js | 3 + node_modules/semver/functions/valid.js | 6 + node_modules/semver/index.js | 89 ++ node_modules/semver/internal/constants.js | 35 + node_modules/semver/internal/debug.js | 9 + node_modules/semver/internal/identifiers.js | 23 + node_modules/semver/internal/lrucache.js | 40 + node_modules/semver/internal/parse-options.js | 15 + node_modules/semver/internal/re.js | 217 ++++ node_modules/semver/package.json | 77 ++ node_modules/semver/preload.js | 2 + node_modules/semver/range.bnf | 16 + node_modules/semver/ranges/gtr.js | 4 + node_modules/semver/ranges/intersects.js | 7 + node_modules/semver/ranges/ltr.js | 4 + node_modules/semver/ranges/max-satisfying.js | 25 + node_modules/semver/ranges/min-satisfying.js | 24 + node_modules/semver/ranges/min-version.js | 61 + node_modules/semver/ranges/outside.js | 80 ++ node_modules/semver/ranges/simplify.js | 47 + node_modules/semver/ranges/subset.js | 247 ++++ node_modules/semver/ranges/to-comparators.js | 8 + node_modules/semver/ranges/valid.js | 11 + node_modules/simple-update-notifier/LICENSE | 21 + node_modules/simple-update-notifier/README.md | 82 ++ .../simple-update-notifier/build/index.d.ts | 13 + .../simple-update-notifier/build/index.js | 210 ++++ .../simple-update-notifier/package.json | 100 ++ .../src/borderedText.ts | 12 + .../simple-update-notifier/src/cache.spec.ts | 17 + .../simple-update-notifier/src/cache.ts | 44 + .../src/getDistVersion.spec.ts | 35 + .../src/getDistVersion.ts | 29 + .../src/hasNewVersion.spec.ts | 82 ++ .../src/hasNewVersion.ts | 40 + .../simple-update-notifier/src/index.spec.ts | 27 + .../simple-update-notifier/src/index.ts | 34 + .../simple-update-notifier/src/isNpmOrYarn.ts | 12 + .../simple-update-notifier/src/types.ts | 8 + node_modules/split2/LICENSE | 13 + node_modules/split2/README.md | 85 ++ node_modules/split2/bench.js | 27 + node_modules/split2/index.js | 141 +++ node_modules/split2/package.json | 39 + node_modules/split2/test.js | 409 ++++++ node_modules/supports-color/browser.js | 5 + node_modules/supports-color/index.js | 131 ++ node_modules/supports-color/license | 9 + node_modules/supports-color/package.json | 53 + node_modules/supports-color/readme.md | 66 + node_modules/to-regex-range/LICENSE | 21 + node_modules/to-regex-range/README.md | 305 +++++ node_modules/to-regex-range/index.js | 288 +++++ node_modules/to-regex-range/package.json | 88 ++ node_modules/touch/LICENSE | 15 + node_modules/touch/README.md | 52 + node_modules/touch/bin/nodetouch.js | 112 ++ node_modules/touch/index.js | 224 ++++ node_modules/touch/package.json | 25 + .../undefsafe/.github/workflows/release.yml | 25 + node_modules/undefsafe/.jscsrc | 13 + node_modules/undefsafe/.jshintrc | 16 + node_modules/undefsafe/.travis.yml | 18 + node_modules/undefsafe/LICENSE | 22 + node_modules/undefsafe/README.md | 63 + node_modules/undefsafe/example.js | 14 + node_modules/undefsafe/lib/undefsafe.js | 125 ++ node_modules/undefsafe/package.json | 34 + node_modules/xtend/.jshintrc | 30 + node_modules/xtend/LICENSE | 20 + node_modules/xtend/README.md | 32 + node_modules/xtend/immutable.js | 19 + node_modules/xtend/mutable.js | 17 + node_modules/xtend/package.json | 55 + node_modules/xtend/test.js | 103 ++ package-lock.json | 462 +++++++ package.json | 16 + root.crt | 59 + src/index.js | 43 + 419 files changed, 39487 insertions(+) create mode 120000 node_modules/.bin/nodemon create mode 120000 node_modules/.bin/nodetouch create mode 120000 node_modules/.bin/semver create mode 100644 node_modules/.package-lock.json create mode 100644 node_modules/anymatch/LICENSE create mode 100644 node_modules/anymatch/README.md create mode 100644 node_modules/anymatch/index.d.ts create mode 100644 node_modules/anymatch/index.js create mode 100644 node_modules/anymatch/package.json create mode 100644 node_modules/balanced-match/.github/FUNDING.yml create mode 100644 node_modules/balanced-match/LICENSE.md create mode 100644 node_modules/balanced-match/README.md create mode 100644 node_modules/balanced-match/index.js create mode 100644 node_modules/balanced-match/package.json create mode 100644 node_modules/binary-extensions/binary-extensions.json create mode 100644 node_modules/binary-extensions/binary-extensions.json.d.ts create mode 100644 node_modules/binary-extensions/index.d.ts create mode 100644 node_modules/binary-extensions/index.js create mode 100644 node_modules/binary-extensions/license create mode 100644 node_modules/binary-extensions/package.json create mode 100644 node_modules/binary-extensions/readme.md create mode 100644 node_modules/brace-expansion/LICENSE create mode 100644 node_modules/brace-expansion/README.md create mode 100644 node_modules/brace-expansion/index.js create mode 100644 node_modules/brace-expansion/package.json create mode 100644 node_modules/braces/LICENSE create mode 100644 node_modules/braces/README.md create mode 100644 node_modules/braces/index.js create mode 100644 node_modules/braces/lib/compile.js create mode 100644 node_modules/braces/lib/constants.js create mode 100644 node_modules/braces/lib/expand.js create mode 100644 node_modules/braces/lib/parse.js create mode 100644 node_modules/braces/lib/stringify.js create mode 100644 node_modules/braces/lib/utils.js create mode 100644 node_modules/braces/package.json create mode 100644 node_modules/chokidar/LICENSE create mode 100644 node_modules/chokidar/README.md create mode 100644 node_modules/chokidar/index.js create mode 100644 node_modules/chokidar/lib/constants.js create mode 100644 node_modules/chokidar/lib/fsevents-handler.js create mode 100644 node_modules/chokidar/lib/nodefs-handler.js create mode 100644 node_modules/chokidar/package.json create mode 100644 node_modules/chokidar/types/index.d.ts create mode 100644 node_modules/concat-map/.travis.yml create mode 100644 node_modules/concat-map/LICENSE create mode 100644 node_modules/concat-map/README.markdown create mode 100644 node_modules/concat-map/example/map.js create mode 100644 node_modules/concat-map/index.js create mode 100644 node_modules/concat-map/package.json create mode 100644 node_modules/concat-map/test/map.js create mode 100644 node_modules/debug/LICENSE create mode 100644 node_modules/debug/README.md create mode 100644 node_modules/debug/package.json create mode 100644 node_modules/debug/src/browser.js create mode 100644 node_modules/debug/src/common.js create mode 100644 node_modules/debug/src/index.js create mode 100644 node_modules/debug/src/node.js create mode 100644 node_modules/fill-range/LICENSE create mode 100644 node_modules/fill-range/README.md create mode 100644 node_modules/fill-range/index.js create mode 100644 node_modules/fill-range/package.json create mode 100644 node_modules/glob-parent/CHANGELOG.md create mode 100644 node_modules/glob-parent/LICENSE create mode 100644 node_modules/glob-parent/README.md create mode 100644 node_modules/glob-parent/index.js create mode 100644 node_modules/glob-parent/package.json create mode 100644 node_modules/has-flag/index.js create mode 100644 node_modules/has-flag/license create mode 100644 node_modules/has-flag/package.json create mode 100644 node_modules/has-flag/readme.md create mode 100644 node_modules/ignore-by-default/LICENSE create mode 100644 node_modules/ignore-by-default/README.md create mode 100644 node_modules/ignore-by-default/index.js create mode 100644 node_modules/ignore-by-default/package.json create mode 100644 node_modules/is-binary-path/index.d.ts create mode 100644 node_modules/is-binary-path/index.js create mode 100644 node_modules/is-binary-path/license create mode 100644 node_modules/is-binary-path/package.json create mode 100644 node_modules/is-binary-path/readme.md create mode 100644 node_modules/is-extglob/LICENSE create mode 100644 node_modules/is-extglob/README.md create mode 100644 node_modules/is-extglob/index.js create mode 100644 node_modules/is-extglob/package.json create mode 100644 node_modules/is-glob/LICENSE create mode 100644 node_modules/is-glob/README.md create mode 100644 node_modules/is-glob/index.js create mode 100644 node_modules/is-glob/package.json create mode 100644 node_modules/is-number/LICENSE create mode 100644 node_modules/is-number/README.md create mode 100644 node_modules/is-number/index.js create mode 100644 node_modules/is-number/package.json create mode 100644 node_modules/minimatch/LICENSE create mode 100644 node_modules/minimatch/README.md create mode 100644 node_modules/minimatch/minimatch.js create mode 100644 node_modules/minimatch/package.json create mode 100644 node_modules/ms/index.js create mode 100644 node_modules/ms/license.md create mode 100644 node_modules/ms/package.json create mode 100644 node_modules/ms/readme.md create mode 100644 node_modules/nodemon/.prettierrc.json create mode 100644 node_modules/nodemon/LICENSE create mode 100644 node_modules/nodemon/README.md create mode 100755 node_modules/nodemon/bin/nodemon.js create mode 100644 node_modules/nodemon/bin/windows-kill.exe create mode 100644 node_modules/nodemon/doc/cli/authors.txt create mode 100644 node_modules/nodemon/doc/cli/config.txt create mode 100644 node_modules/nodemon/doc/cli/help.txt create mode 100644 node_modules/nodemon/doc/cli/logo.txt create mode 100644 node_modules/nodemon/doc/cli/options.txt create mode 100644 node_modules/nodemon/doc/cli/topics.txt create mode 100644 node_modules/nodemon/doc/cli/usage.txt create mode 100644 node_modules/nodemon/doc/cli/whoami.txt create mode 100644 node_modules/nodemon/index.d.ts create mode 100644 node_modules/nodemon/jsconfig.json create mode 100644 node_modules/nodemon/lib/cli/index.js create mode 100644 node_modules/nodemon/lib/cli/parse.js create mode 100644 node_modules/nodemon/lib/config/command.js create mode 100644 node_modules/nodemon/lib/config/defaults.js create mode 100644 node_modules/nodemon/lib/config/exec.js create mode 100644 node_modules/nodemon/lib/config/index.js create mode 100644 node_modules/nodemon/lib/config/load.js create mode 100644 node_modules/nodemon/lib/help/index.js create mode 100644 node_modules/nodemon/lib/index.js create mode 100644 node_modules/nodemon/lib/monitor/index.js create mode 100644 node_modules/nodemon/lib/monitor/match.js create mode 100644 node_modules/nodemon/lib/monitor/run.js create mode 100644 node_modules/nodemon/lib/monitor/signals.js create mode 100644 node_modules/nodemon/lib/monitor/watch.js create mode 100644 node_modules/nodemon/lib/nodemon.js create mode 100644 node_modules/nodemon/lib/rules/add.js create mode 100644 node_modules/nodemon/lib/rules/index.js create mode 100644 node_modules/nodemon/lib/rules/parse.js create mode 100644 node_modules/nodemon/lib/spawn.js create mode 100644 node_modules/nodemon/lib/utils/bus.js create mode 100644 node_modules/nodemon/lib/utils/clone.js create mode 100644 node_modules/nodemon/lib/utils/colour.js create mode 100644 node_modules/nodemon/lib/utils/index.js create mode 100644 node_modules/nodemon/lib/utils/log.js create mode 100644 node_modules/nodemon/lib/utils/merge.js create mode 100644 node_modules/nodemon/lib/version.js create mode 100644 node_modules/nodemon/package.json create mode 100644 node_modules/normalize-path/LICENSE create mode 100644 node_modules/normalize-path/README.md create mode 100644 node_modules/normalize-path/index.js create mode 100644 node_modules/normalize-path/package.json create mode 100644 node_modules/pg-cloudflare/LICENSE create mode 100644 node_modules/pg-cloudflare/README.md create mode 100644 node_modules/pg-cloudflare/dist/empty.d.ts create mode 100644 node_modules/pg-cloudflare/dist/empty.js create mode 100644 node_modules/pg-cloudflare/dist/empty.js.map create mode 100644 node_modules/pg-cloudflare/dist/index.d.ts create mode 100644 node_modules/pg-cloudflare/dist/index.js create mode 100644 node_modules/pg-cloudflare/dist/index.js.map create mode 100644 node_modules/pg-cloudflare/package.json create mode 100644 node_modules/pg-cloudflare/src/empty.ts create mode 100644 node_modules/pg-cloudflare/src/index.ts create mode 100644 node_modules/pg-cloudflare/src/types.d.ts create mode 100644 node_modules/pg-connection-string/LICENSE create mode 100644 node_modules/pg-connection-string/README.md create mode 100644 node_modules/pg-connection-string/index.d.ts create mode 100644 node_modules/pg-connection-string/index.js create mode 100644 node_modules/pg-connection-string/package.json create mode 100644 node_modules/pg-int8/LICENSE create mode 100644 node_modules/pg-int8/README.md create mode 100644 node_modules/pg-int8/index.js create mode 100644 node_modules/pg-int8/package.json create mode 100644 node_modules/pg-pool/LICENSE create mode 100644 node_modules/pg-pool/README.md create mode 100644 node_modules/pg-pool/index.js create mode 100644 node_modules/pg-pool/package.json create mode 100644 node_modules/pg-pool/test/bring-your-own-promise.js create mode 100644 node_modules/pg-pool/test/connection-strings.js create mode 100644 node_modules/pg-pool/test/connection-timeout.js create mode 100644 node_modules/pg-pool/test/ending.js create mode 100644 node_modules/pg-pool/test/error-handling.js create mode 100644 node_modules/pg-pool/test/events.js create mode 100644 node_modules/pg-pool/test/idle-timeout-exit.js create mode 100644 node_modules/pg-pool/test/idle-timeout.js create mode 100644 node_modules/pg-pool/test/index.js create mode 100644 node_modules/pg-pool/test/lifetime-timeout.js create mode 100644 node_modules/pg-pool/test/logging.js create mode 100644 node_modules/pg-pool/test/max-uses.js create mode 100644 node_modules/pg-pool/test/releasing-clients.js create mode 100644 node_modules/pg-pool/test/setup.js create mode 100644 node_modules/pg-pool/test/sizing.js create mode 100644 node_modules/pg-pool/test/submittable.js create mode 100644 node_modules/pg-pool/test/timeout.js create mode 100644 node_modules/pg-pool/test/verify.js create mode 100644 node_modules/pg-protocol/LICENSE create mode 100644 node_modules/pg-protocol/README.md create mode 100644 node_modules/pg-protocol/dist/b.d.ts create mode 100644 node_modules/pg-protocol/dist/b.js create mode 100644 node_modules/pg-protocol/dist/b.js.map create mode 100644 node_modules/pg-protocol/dist/buffer-reader.d.ts create mode 100644 node_modules/pg-protocol/dist/buffer-reader.js create mode 100644 node_modules/pg-protocol/dist/buffer-reader.js.map create mode 100644 node_modules/pg-protocol/dist/buffer-writer.d.ts create mode 100644 node_modules/pg-protocol/dist/buffer-writer.js create mode 100644 node_modules/pg-protocol/dist/buffer-writer.js.map create mode 100644 node_modules/pg-protocol/dist/inbound-parser.test.d.ts create mode 100644 node_modules/pg-protocol/dist/inbound-parser.test.js create mode 100644 node_modules/pg-protocol/dist/inbound-parser.test.js.map create mode 100644 node_modules/pg-protocol/dist/index.d.ts create mode 100644 node_modules/pg-protocol/dist/index.js create mode 100644 node_modules/pg-protocol/dist/index.js.map create mode 100644 node_modules/pg-protocol/dist/messages.d.ts create mode 100644 node_modules/pg-protocol/dist/messages.js create mode 100644 node_modules/pg-protocol/dist/messages.js.map create mode 100644 node_modules/pg-protocol/dist/outbound-serializer.test.d.ts create mode 100644 node_modules/pg-protocol/dist/outbound-serializer.test.js create mode 100644 node_modules/pg-protocol/dist/outbound-serializer.test.js.map create mode 100644 node_modules/pg-protocol/dist/parser.d.ts create mode 100644 node_modules/pg-protocol/dist/parser.js create mode 100644 node_modules/pg-protocol/dist/parser.js.map create mode 100644 node_modules/pg-protocol/dist/serializer.d.ts create mode 100644 node_modules/pg-protocol/dist/serializer.js create mode 100644 node_modules/pg-protocol/dist/serializer.js.map create mode 100644 node_modules/pg-protocol/package.json create mode 100644 node_modules/pg-protocol/src/b.ts create mode 100644 node_modules/pg-protocol/src/buffer-reader.ts create mode 100644 node_modules/pg-protocol/src/buffer-writer.ts create mode 100644 node_modules/pg-protocol/src/inbound-parser.test.ts create mode 100644 node_modules/pg-protocol/src/index.ts create mode 100644 node_modules/pg-protocol/src/messages.ts create mode 100644 node_modules/pg-protocol/src/outbound-serializer.test.ts create mode 100644 node_modules/pg-protocol/src/parser.ts create mode 100644 node_modules/pg-protocol/src/serializer.ts create mode 100644 node_modules/pg-protocol/src/testing/buffer-list.ts create mode 100644 node_modules/pg-protocol/src/testing/test-buffers.ts create mode 100644 node_modules/pg-protocol/src/types/chunky.d.ts create mode 100644 node_modules/pg-types/.travis.yml create mode 100644 node_modules/pg-types/Makefile create mode 100644 node_modules/pg-types/README.md create mode 100644 node_modules/pg-types/index.d.ts create mode 100644 node_modules/pg-types/index.js create mode 100644 node_modules/pg-types/index.test-d.ts create mode 100644 node_modules/pg-types/lib/arrayParser.js create mode 100644 node_modules/pg-types/lib/binaryParsers.js create mode 100644 node_modules/pg-types/lib/builtins.js create mode 100644 node_modules/pg-types/lib/textParsers.js create mode 100644 node_modules/pg-types/package.json create mode 100644 node_modules/pg-types/test/index.js create mode 100644 node_modules/pg-types/test/types.js create mode 100644 node_modules/pg/LICENSE create mode 100644 node_modules/pg/README.md create mode 100644 node_modules/pg/lib/client.js create mode 100644 node_modules/pg/lib/connection-parameters.js create mode 100644 node_modules/pg/lib/connection.js create mode 100644 node_modules/pg/lib/crypto/sasl.js create mode 100644 node_modules/pg/lib/crypto/utils-legacy.js create mode 100644 node_modules/pg/lib/crypto/utils-webcrypto.js create mode 100644 node_modules/pg/lib/crypto/utils.js create mode 100644 node_modules/pg/lib/defaults.js create mode 100644 node_modules/pg/lib/index.js create mode 100644 node_modules/pg/lib/native/client.js create mode 100644 node_modules/pg/lib/native/index.js create mode 100644 node_modules/pg/lib/native/query.js create mode 100644 node_modules/pg/lib/query.js create mode 100644 node_modules/pg/lib/result.js create mode 100644 node_modules/pg/lib/stream.js create mode 100644 node_modules/pg/lib/type-overrides.js create mode 100644 node_modules/pg/lib/utils.js create mode 100644 node_modules/pg/package.json create mode 100644 node_modules/pgpass/README.md create mode 100644 node_modules/pgpass/lib/helper.js create mode 100644 node_modules/pgpass/lib/index.js create mode 100644 node_modules/pgpass/package.json create mode 100644 node_modules/picomatch/CHANGELOG.md create mode 100644 node_modules/picomatch/LICENSE create mode 100644 node_modules/picomatch/README.md create mode 100644 node_modules/picomatch/index.js create mode 100644 node_modules/picomatch/lib/constants.js create mode 100644 node_modules/picomatch/lib/parse.js create mode 100644 node_modules/picomatch/lib/picomatch.js create mode 100644 node_modules/picomatch/lib/scan.js create mode 100644 node_modules/picomatch/lib/utils.js create mode 100644 node_modules/picomatch/package.json create mode 100644 node_modules/postgres-array/index.d.ts create mode 100644 node_modules/postgres-array/index.js create mode 100644 node_modules/postgres-array/license create mode 100644 node_modules/postgres-array/package.json create mode 100644 node_modules/postgres-array/readme.md create mode 100644 node_modules/postgres-bytea/index.js create mode 100644 node_modules/postgres-bytea/license create mode 100644 node_modules/postgres-bytea/package.json create mode 100644 node_modules/postgres-bytea/readme.md create mode 100644 node_modules/postgres-date/index.js create mode 100644 node_modules/postgres-date/license create mode 100644 node_modules/postgres-date/package.json create mode 100644 node_modules/postgres-date/readme.md create mode 100644 node_modules/postgres-interval/index.d.ts create mode 100644 node_modules/postgres-interval/index.js create mode 100644 node_modules/postgres-interval/license create mode 100644 node_modules/postgres-interval/package.json create mode 100644 node_modules/postgres-interval/readme.md create mode 100644 node_modules/pstree.remy/.travis.yml create mode 100644 node_modules/pstree.remy/LICENSE create mode 100644 node_modules/pstree.remy/README.md create mode 100644 node_modules/pstree.remy/lib/index.js create mode 100644 node_modules/pstree.remy/lib/tree.js create mode 100644 node_modules/pstree.remy/lib/utils.js create mode 100644 node_modules/pstree.remy/package.json create mode 100644 node_modules/pstree.remy/tests/fixtures/index.js create mode 100644 node_modules/pstree.remy/tests/fixtures/out1 create mode 100644 node_modules/pstree.remy/tests/fixtures/out2 create mode 100644 node_modules/pstree.remy/tests/index.test.js create mode 100644 node_modules/readdirp/LICENSE create mode 100644 node_modules/readdirp/README.md create mode 100644 node_modules/readdirp/index.d.ts create mode 100644 node_modules/readdirp/index.js create mode 100644 node_modules/readdirp/package.json create mode 100644 node_modules/semver/LICENSE create mode 100644 node_modules/semver/README.md create mode 100755 node_modules/semver/bin/semver.js create mode 100644 node_modules/semver/classes/comparator.js create mode 100644 node_modules/semver/classes/index.js create mode 100644 node_modules/semver/classes/range.js create mode 100644 node_modules/semver/classes/semver.js create mode 100644 node_modules/semver/functions/clean.js create mode 100644 node_modules/semver/functions/cmp.js create mode 100644 node_modules/semver/functions/coerce.js create mode 100644 node_modules/semver/functions/compare-build.js create mode 100644 node_modules/semver/functions/compare-loose.js create mode 100644 node_modules/semver/functions/compare.js create mode 100644 node_modules/semver/functions/diff.js create mode 100644 node_modules/semver/functions/eq.js create mode 100644 node_modules/semver/functions/gt.js create mode 100644 node_modules/semver/functions/gte.js create mode 100644 node_modules/semver/functions/inc.js create mode 100644 node_modules/semver/functions/lt.js create mode 100644 node_modules/semver/functions/lte.js create mode 100644 node_modules/semver/functions/major.js create mode 100644 node_modules/semver/functions/minor.js create mode 100644 node_modules/semver/functions/neq.js create mode 100644 node_modules/semver/functions/parse.js create mode 100644 node_modules/semver/functions/patch.js create mode 100644 node_modules/semver/functions/prerelease.js create mode 100644 node_modules/semver/functions/rcompare.js create mode 100644 node_modules/semver/functions/rsort.js create mode 100644 node_modules/semver/functions/satisfies.js create mode 100644 node_modules/semver/functions/sort.js create mode 100644 node_modules/semver/functions/valid.js create mode 100644 node_modules/semver/index.js create mode 100644 node_modules/semver/internal/constants.js create mode 100644 node_modules/semver/internal/debug.js create mode 100644 node_modules/semver/internal/identifiers.js create mode 100644 node_modules/semver/internal/lrucache.js create mode 100644 node_modules/semver/internal/parse-options.js create mode 100644 node_modules/semver/internal/re.js create mode 100644 node_modules/semver/package.json create mode 100644 node_modules/semver/preload.js create mode 100644 node_modules/semver/range.bnf create mode 100644 node_modules/semver/ranges/gtr.js create mode 100644 node_modules/semver/ranges/intersects.js create mode 100644 node_modules/semver/ranges/ltr.js create mode 100644 node_modules/semver/ranges/max-satisfying.js create mode 100644 node_modules/semver/ranges/min-satisfying.js create mode 100644 node_modules/semver/ranges/min-version.js create mode 100644 node_modules/semver/ranges/outside.js create mode 100644 node_modules/semver/ranges/simplify.js create mode 100644 node_modules/semver/ranges/subset.js create mode 100644 node_modules/semver/ranges/to-comparators.js create mode 100644 node_modules/semver/ranges/valid.js create mode 100644 node_modules/simple-update-notifier/LICENSE create mode 100644 node_modules/simple-update-notifier/README.md create mode 100644 node_modules/simple-update-notifier/build/index.d.ts create mode 100644 node_modules/simple-update-notifier/build/index.js create mode 100644 node_modules/simple-update-notifier/package.json create mode 100644 node_modules/simple-update-notifier/src/borderedText.ts create mode 100644 node_modules/simple-update-notifier/src/cache.spec.ts create mode 100644 node_modules/simple-update-notifier/src/cache.ts create mode 100644 node_modules/simple-update-notifier/src/getDistVersion.spec.ts create mode 100644 node_modules/simple-update-notifier/src/getDistVersion.ts create mode 100644 node_modules/simple-update-notifier/src/hasNewVersion.spec.ts create mode 100644 node_modules/simple-update-notifier/src/hasNewVersion.ts create mode 100644 node_modules/simple-update-notifier/src/index.spec.ts create mode 100644 node_modules/simple-update-notifier/src/index.ts create mode 100644 node_modules/simple-update-notifier/src/isNpmOrYarn.ts create mode 100644 node_modules/simple-update-notifier/src/types.ts create mode 100644 node_modules/split2/LICENSE create mode 100644 node_modules/split2/README.md create mode 100644 node_modules/split2/bench.js create mode 100644 node_modules/split2/index.js create mode 100644 node_modules/split2/package.json create mode 100644 node_modules/split2/test.js create mode 100644 node_modules/supports-color/browser.js create mode 100644 node_modules/supports-color/index.js create mode 100644 node_modules/supports-color/license create mode 100644 node_modules/supports-color/package.json create mode 100644 node_modules/supports-color/readme.md create mode 100644 node_modules/to-regex-range/LICENSE create mode 100644 node_modules/to-regex-range/README.md create mode 100644 node_modules/to-regex-range/index.js create mode 100644 node_modules/to-regex-range/package.json create mode 100644 node_modules/touch/LICENSE create mode 100644 node_modules/touch/README.md create mode 100755 node_modules/touch/bin/nodetouch.js create mode 100644 node_modules/touch/index.js create mode 100644 node_modules/touch/package.json create mode 100644 node_modules/undefsafe/.github/workflows/release.yml create mode 100644 node_modules/undefsafe/.jscsrc create mode 100644 node_modules/undefsafe/.jshintrc create mode 100644 node_modules/undefsafe/.travis.yml create mode 100644 node_modules/undefsafe/LICENSE create mode 100644 node_modules/undefsafe/README.md create mode 100644 node_modules/undefsafe/example.js create mode 100644 node_modules/undefsafe/lib/undefsafe.js create mode 100644 node_modules/undefsafe/package.json create mode 100644 node_modules/xtend/.jshintrc create mode 100644 node_modules/xtend/LICENSE create mode 100644 node_modules/xtend/README.md create mode 100644 node_modules/xtend/immutable.js create mode 100644 node_modules/xtend/mutable.js create mode 100644 node_modules/xtend/package.json create mode 100644 node_modules/xtend/test.js create mode 100644 package-lock.json create mode 100644 package.json create mode 100644 root.crt create mode 100644 src/index.js diff --git a/node_modules/.bin/nodemon b/node_modules/.bin/nodemon new file mode 120000 index 0000000..1056ddc --- /dev/null +++ b/node_modules/.bin/nodemon @@ -0,0 +1 @@ +../nodemon/bin/nodemon.js \ No newline at end of file diff --git a/node_modules/.bin/nodetouch b/node_modules/.bin/nodetouch new file mode 120000 index 0000000..3409fdb --- /dev/null +++ b/node_modules/.bin/nodetouch @@ -0,0 +1 @@ +../touch/bin/nodetouch.js \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 0000000..5aaadf4 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..f773360 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,440 @@ +{ + "name": "dh", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/nodemon": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.4.tgz", + "integrity": "sha512-wjPBbFhtpJwmIeY2yP7QF+UKzPfltVGtfce1g/bB15/8vCGZj8uxD62b/b9M9/WVgme0NZudpownKN+c0plXlQ==", + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^4", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pg": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.12.0.tgz", + "integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==", + "dependencies": { + "pg-connection-string": "^2.6.4", + "pg-pool": "^3.6.2", + "pg-protocol": "^1.6.1", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.1.1" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz", + "integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.4.tgz", + "integrity": "sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.2.tgz", + "integrity": "sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.1.tgz", + "integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==" + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-update-notifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/touch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", + "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/node_modules/anymatch/LICENSE b/node_modules/anymatch/LICENSE new file mode 100644 index 0000000..491766c --- /dev/null +++ b/node_modules/anymatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/anymatch/README.md b/node_modules/anymatch/README.md new file mode 100644 index 0000000..1dd67f5 --- /dev/null +++ b/node_modules/anymatch/README.md @@ -0,0 +1,87 @@ +anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master) +====== +Javascript module to match a string against a regular expression, glob, string, +or function that takes the string as an argument and returns a truthy or falsy +value. The matcher can also be an array of any or all of these. Useful for +allowing a very flexible user-defined config to define things like file paths. + +__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ + + +Usage +----- +```sh +npm install anymatch +``` + +#### anymatch(matchers, testString, [returnIndex], [options]) +* __matchers__: (_Array|String|RegExp|Function_) +String to be directly matched, string with glob patterns, regular expression +test, function that takes the testString as an argument and returns a truthy +value if it should be matched, or an array of any number and mix of these types. +* __testString__: (_String|Array_) The string to test against the matchers. If +passed as an array, the first element of the array will be used as the +`testString` for non-function matchers, while the entire array will be applied +as the arguments for function matchers. +* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options. + * __returnIndex__: (_Boolean [optional]_) If true, return the array index of +the first matcher that that testString matched, or -1 if no match, instead of a +boolean result. + +```js +const anymatch = require('anymatch'); + +const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ; + +anymatch(matchers, 'path/to/file.js'); // true +anymatch(matchers, 'path/anyjs/baz.js'); // true +anymatch(matchers, 'path/to/foo.js'); // true +anymatch(matchers, 'path/to/bar.js'); // true +anymatch(matchers, 'bar.js'); // false + +// returnIndex = true +anymatch(matchers, 'foo.js', {returnIndex: true}); // 2 +anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1 + +// any picomatc + +// using globs to match directories and their children +anymatch('node_modules', 'node_modules'); // true +anymatch('node_modules', 'node_modules/somelib/index.js'); // false +anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true +anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false +anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true + +const matcher = anymatch(matchers); +['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ] +anymatch master* ❯ + +``` + +#### anymatch(matchers) +You can also pass in only your matcher(s) to get a curried function that has +already been bound to the provided matching criteria. This can be used as an +`Array#filter` callback. + +```js +var matcher = anymatch(matchers); + +matcher('path/to/file.js'); // true +matcher('path/anyjs/baz.js', true); // 1 + +['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] +``` + +Changelog +---------- +[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) + +- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only. +- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). +- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) +for glob pattern matching. Issues with glob pattern matching should be +reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). + +License +------- +[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) diff --git a/node_modules/anymatch/index.d.ts b/node_modules/anymatch/index.d.ts new file mode 100644 index 0000000..3ef7eaa --- /dev/null +++ b/node_modules/anymatch/index.d.ts @@ -0,0 +1,20 @@ +type AnymatchFn = (testString: string) => boolean; +type AnymatchPattern = string|RegExp|AnymatchFn; +type AnymatchMatcher = AnymatchPattern|AnymatchPattern[] +type AnymatchTester = { + (testString: string|any[], returnIndex: true): number; + (testString: string|any[]): boolean; +} + +type PicomatchOptions = {dot: boolean}; + +declare const anymatch: { + (matchers: AnymatchMatcher): AnymatchTester; + (matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester; + (matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number; + (matchers: AnymatchMatcher, testString: string|any[]): boolean; +} + +export {AnymatchMatcher as Matcher} +export {AnymatchTester as Tester} +export default anymatch diff --git a/node_modules/anymatch/index.js b/node_modules/anymatch/index.js new file mode 100644 index 0000000..8eb73e9 --- /dev/null +++ b/node_modules/anymatch/index.js @@ -0,0 +1,104 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { value: true }); + +const picomatch = require('picomatch'); +const normalizePath = require('normalize-path'); + +/** + * @typedef {(testString: string) => boolean} AnymatchFn + * @typedef {string|RegExp|AnymatchFn} AnymatchPattern + * @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher + */ +const BANG = '!'; +const DEFAULT_OPTIONS = {returnIndex: false}; +const arrify = (item) => Array.isArray(item) ? item : [item]; + +/** + * @param {AnymatchPattern} matcher + * @param {object} options + * @returns {AnymatchFn} + */ +const createPattern = (matcher, options) => { + if (typeof matcher === 'function') { + return matcher; + } + if (typeof matcher === 'string') { + const glob = picomatch(matcher, options); + return (string) => matcher === string || glob(string); + } + if (matcher instanceof RegExp) { + return (string) => matcher.test(string); + } + return (string) => false; +}; + +/** + * @param {Array} patterns + * @param {Array} negPatterns + * @param {String|Array} args + * @param {Boolean} returnIndex + * @returns {boolean|number} + */ +const matchPatterns = (patterns, negPatterns, args, returnIndex) => { + const isList = Array.isArray(args); + const _path = isList ? args[0] : args; + if (!isList && typeof _path !== 'string') { + throw new TypeError('anymatch: second argument must be a string: got ' + + Object.prototype.toString.call(_path)) + } + const path = normalizePath(_path, false); + + for (let index = 0; index < negPatterns.length; index++) { + const nglob = negPatterns[index]; + if (nglob(path)) { + return returnIndex ? -1 : false; + } + } + + const applied = isList && [path].concat(args.slice(1)); + for (let index = 0; index < patterns.length; index++) { + const pattern = patterns[index]; + if (isList ? pattern(...applied) : pattern(path)) { + return returnIndex ? index : true; + } + } + + return returnIndex ? -1 : false; +}; + +/** + * @param {AnymatchMatcher} matchers + * @param {Array|string} testString + * @param {object} options + * @returns {boolean|number|Function} + */ +const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => { + if (matchers == null) { + throw new TypeError('anymatch: specify first argument'); + } + const opts = typeof options === 'boolean' ? {returnIndex: options} : options; + const returnIndex = opts.returnIndex || false; + + // Early cache for matchers. + const mtchers = arrify(matchers); + const negatedGlobs = mtchers + .filter(item => typeof item === 'string' && item.charAt(0) === BANG) + .map(item => item.slice(1)) + .map(item => picomatch(item, opts)); + const patterns = mtchers + .filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG)) + .map(matcher => createPattern(matcher, opts)); + + if (testString == null) { + return (testString, ri = false) => { + const returnIndex = typeof ri === 'boolean' ? ri : false; + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); + } + } + + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); +}; + +anymatch.default = anymatch; +module.exports = anymatch; diff --git a/node_modules/anymatch/package.json b/node_modules/anymatch/package.json new file mode 100644 index 0000000..2cb2307 --- /dev/null +++ b/node_modules/anymatch/package.json @@ -0,0 +1,48 @@ +{ + "name": "anymatch", + "version": "3.1.3", + "description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", + "files": [ + "index.js", + "index.d.ts" + ], + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "author": { + "name": "Elan Shanker", + "url": "https://github.com/es128" + }, + "license": "ISC", + "homepage": "https://github.com/micromatch/anymatch", + "repository": { + "type": "git", + "url": "https://github.com/micromatch/anymatch" + }, + "keywords": [ + "match", + "any", + "string", + "file", + "fs", + "list", + "glob", + "regex", + "regexp", + "regular", + "expression", + "function" + ], + "scripts": { + "test": "nyc mocha", + "mocha": "mocha" + }, + "devDependencies": { + "mocha": "^6.1.3", + "nyc": "^14.0.0" + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json new file mode 100644 index 0000000..ac08048 --- /dev/null +++ b/node_modules/binary-extensions/binary-extensions.json @@ -0,0 +1,263 @@ +[ + "3dm", + "3ds", + "3g2", + "3gp", + "7z", + "a", + "aac", + "adp", + "afdesign", + "afphoto", + "afpub", + "ai", + "aif", + "aiff", + "alz", + "ape", + "apk", + "appimage", + "ar", + "arj", + "asf", + "au", + "avi", + "bak", + "baml", + "bh", + "bin", + "bk", + "bmp", + "btif", + "bz2", + "bzip2", + "cab", + "caf", + "cgm", + "class", + "cmx", + "cpio", + "cr2", + "cur", + "dat", + "dcm", + "deb", + "dex", + "djvu", + "dll", + "dmg", + "dng", + "doc", + "docm", + "docx", + "dot", + "dotm", + "dra", + "DS_Store", + "dsk", + "dts", + "dtshd", + "dvb", + "dwg", + "dxf", + "ecelp4800", + "ecelp7470", + "ecelp9600", + "egg", + "eol", + "eot", + "epub", + "exe", + "f4v", + "fbs", + "fh", + "fla", + "flac", + "flatpak", + "fli", + "flv", + "fpx", + "fst", + "fvt", + "g3", + "gh", + "gif", + "graffle", + "gz", + "gzip", + "h261", + "h263", + "h264", + "icns", + "ico", + "ief", + "img", + "ipa", + "iso", + "jar", + "jpeg", + "jpg", + "jpgv", + "jpm", + "jxr", + "key", + "ktx", + "lha", + "lib", + "lvp", + "lz", + "lzh", + "lzma", + "lzo", + "m3u", + "m4a", + "m4v", + "mar", + "mdi", + "mht", + "mid", + "midi", + "mj2", + "mka", + "mkv", + "mmr", + "mng", + "mobi", + "mov", + "movie", + "mp3", + "mp4", + "mp4a", + "mpeg", + "mpg", + "mpga", + "mxu", + "nef", + "npx", + "numbers", + "nupkg", + "o", + "odp", + "ods", + "odt", + "oga", + "ogg", + "ogv", + "otf", + "ott", + "pages", + "pbm", + "pcx", + "pdb", + "pdf", + "pea", + "pgm", + "pic", + "png", + "pnm", + "pot", + "potm", + "potx", + "ppa", + "ppam", + "ppm", + "pps", + "ppsm", + "ppsx", + "ppt", + "pptm", + "pptx", + "psd", + "pya", + "pyc", + "pyo", + "pyv", + "qt", + "rar", + "ras", + "raw", + "resources", + "rgb", + "rip", + "rlc", + "rmf", + "rmvb", + "rpm", + "rtf", + "rz", + "s3m", + "s7z", + "scpt", + "sgi", + "shar", + "snap", + "sil", + "sketch", + "slk", + "smv", + "snk", + "so", + "stl", + "suo", + "sub", + "swf", + "tar", + "tbz", + "tbz2", + "tga", + "tgz", + "thmx", + "tif", + "tiff", + "tlz", + "ttc", + "ttf", + "txz", + "udf", + "uvh", + "uvi", + "uvm", + "uvp", + "uvs", + "uvu", + "viv", + "vob", + "war", + "wav", + "wax", + "wbmp", + "wdp", + "weba", + "webm", + "webp", + "whl", + "wim", + "wm", + "wma", + "wmv", + "wmx", + "woff", + "woff2", + "wrm", + "wvx", + "xbm", + "xif", + "xla", + "xlam", + "xls", + "xlsb", + "xlsm", + "xlsx", + "xlt", + "xltm", + "xltx", + "xm", + "xmind", + "xpi", + "xpm", + "xwd", + "xz", + "z", + "zip", + "zipx" +] diff --git a/node_modules/binary-extensions/binary-extensions.json.d.ts b/node_modules/binary-extensions/binary-extensions.json.d.ts new file mode 100644 index 0000000..94a248c --- /dev/null +++ b/node_modules/binary-extensions/binary-extensions.json.d.ts @@ -0,0 +1,3 @@ +declare const binaryExtensionsJson: readonly string[]; + +export = binaryExtensionsJson; diff --git a/node_modules/binary-extensions/index.d.ts b/node_modules/binary-extensions/index.d.ts new file mode 100644 index 0000000..f469ac5 --- /dev/null +++ b/node_modules/binary-extensions/index.d.ts @@ -0,0 +1,14 @@ +/** +List of binary file extensions. + +@example +``` +import binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` +*/ +declare const binaryExtensions: readonly string[]; + +export = binaryExtensions; diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js new file mode 100644 index 0000000..d46e468 --- /dev/null +++ b/node_modules/binary-extensions/index.js @@ -0,0 +1 @@ +module.exports = require('./binary-extensions.json'); diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license new file mode 100644 index 0000000..5493a1a --- /dev/null +++ b/node_modules/binary-extensions/license @@ -0,0 +1,10 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) +Copyright (c) Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json new file mode 100644 index 0000000..4710c33 --- /dev/null +++ b/node_modules/binary-extensions/package.json @@ -0,0 +1,40 @@ +{ + "name": "binary-extensions", + "version": "2.3.0", + "description": "List of binary file extensions", + "license": "MIT", + "repository": "sindresorhus/binary-extensions", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "sideEffects": false, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "binary-extensions.json", + "binary-extensions.json.d.ts" + ], + "keywords": [ + "binary", + "extensions", + "extension", + "file", + "json", + "list", + "array" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/binary-extensions/readme.md b/node_modules/binary-extensions/readme.md new file mode 100644 index 0000000..88519b3 --- /dev/null +++ b/node_modules/binary-extensions/readme.md @@ -0,0 +1,25 @@ +# binary-extensions + +> List of binary file extensions + +The list is just a [JSON file](binary-extensions.json) and can be used anywhere. + +## Install + +```sh +npm install binary-extensions +``` + +## Usage + +```js +const binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` + +## Related + +- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file +- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..0478be8 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..a18faa8 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/braces/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md new file mode 100644 index 0000000..f59dd60 --- /dev/null +++ b/node_modules/braces/README.md @@ -0,0 +1,586 @@ +# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) + +> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save braces +``` + +## v3.0.0 Released!! + +See the [changelog](CHANGELOG.md) for details. + +## Why use braces? + +Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. + +- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) +- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. +- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. +- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). +- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). +- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` +- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` +- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` +- [Supports escaping](#escaping) - To prevent evaluation of special characters. + +## Usage + +The main export is a function that takes one or more brace `patterns` and `options`. + +```js +const braces = require('braces'); +// braces(patterns[, options]); + +console.log(braces(['{01..05}', '{a..e}'])); +//=> ['(0[1-5])', '([a-e])'] + +console.log(braces(['{01..05}', '{a..e}'], { expand: true })); +//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] +``` + +### Brace Expansion vs. Compilation + +By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. + +**Compiled** + +```js +console.log(braces('a/{x,y,z}/b')); +//=> ['a/(x|y|z)/b'] +console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); +//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] +``` + +**Expanded** + +Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): + +```js +console.log(braces('a/{x,y,z}/b', { expand: true })); +//=> ['a/x/b', 'a/y/b', 'a/z/b'] + +console.log(braces.expand('{01..10}')); +//=> ['01','02','03','04','05','06','07','08','09','10'] +``` + +### Lists + +Expand lists (like Bash "sets"): + +```js +console.log(braces('a/{foo,bar,baz}/*.js')); +//=> ['a/(foo|bar|baz)/*.js'] + +console.log(braces.expand('a/{foo,bar,baz}/*.js')); +//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] +``` + +### Sequences + +Expand ranges of characters (like Bash "sequences"): + +```js +console.log(braces.expand('{1..3}')); // ['1', '2', '3'] +console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] +console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] +console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] + +// supports zero-padded ranges +console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] +console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] +``` + +See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. + +### Steppped ranges + +Steps, or increments, may be used with ranges: + +```js +console.log(braces.expand('{2..10..2}')); +//=> ['2', '4', '6', '8', '10'] + +console.log(braces('{2..10..2}')); +//=> ['(2|4|6|8|10)'] +``` + +When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. + +### Nesting + +Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. + +**"Expanded" braces** + +```js +console.log(braces.expand('a{b,c,/{x,y}}/e')); +//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] + +console.log(braces.expand('a/{x,{1..5},y}/c')); +//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] +``` + +**"Optimized" braces** + +```js +console.log(braces('a{b,c,/{x,y}}/e')); +//=> ['a(b|c|/(x|y))/e'] + +console.log(braces('a/{x,{1..5},y}/c')); +//=> ['a/(x|([1-5])|y)/c'] +``` + +### Escaping + +**Escaping braces** + +A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: + +```js +console.log(braces.expand('a\\{d,c,b}e')); +//=> ['a{d,c,b}e'] + +console.log(braces.expand('a{d,c,b\\}e')); +//=> ['a{d,c,b}e'] +``` + +**Escaping commas** + +Commas inside braces may also be escaped: + +```js +console.log(braces.expand('a{b\\,c}d')); +//=> ['a{b,c}d'] + +console.log(braces.expand('a{d\\,c,b}e')); +//=> ['ad,ce', 'abe'] +``` + +**Single items** + +Following bash conventions, a brace pattern is also not expanded when it contains a single character: + +```js +console.log(braces.expand('a{b}c')); +//=> ['a{b}c'] +``` + +## Options + +### options.maxLength + +**Type**: `Number` + +**Default**: `10,000` + +**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. + +```js +console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error +``` + +### options.expand + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). + +```js +console.log(braces('a/{b,c}/d', { expand: true })); +//=> [ 'a/b/d', 'a/c/d' ] +``` + +### options.nodupes + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Remove duplicates from the returned array. + +### options.rangeLimit + +**Type**: `Number` + +**Default**: `1000` + +**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. + +You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. + +**Examples** + +```js +// pattern exceeds the "rangeLimit", so it's optimized automatically +console.log(braces.expand('{1..1000}')); +//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] + +// pattern does not exceed "rangeLimit", so it's NOT optimized +console.log(braces.expand('{1..100}')); +//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] +``` + +### options.transform + +**Type**: `Function` + +**Default**: `undefined` + +**Description**: Customize range expansion. + +**Example: Transforming non-numeric values** + +```js +const alpha = braces.expand('x/{a..e}/y', { + transform(value, index) { + // When non-numeric values are passed, "value" is a character code. + return 'foo/' + String.fromCharCode(value) + '-' + index; + }, +}); +console.log(alpha); +//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] +``` + +**Example: Transforming numeric values** + +```js +const numeric = braces.expand('{1..5}', { + transform(value) { + // when numeric values are passed, "value" is a number + return 'foo/' + value * 2; + }, +}); +console.log(numeric); +//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] +``` + +### options.quantifiers + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. + +Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) + +The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. + +**Examples** + +```js +const braces = require('braces'); +console.log(braces('a/b{1,3}/{x,y,z}')); +//=> [ 'a/b(1|3)/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); +//=> [ 'a/b{1,3}/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); +//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] +``` + +### options.keepEscaping + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Do not strip backslashes that were used for escaping from the result. + +## What is "brace expansion"? + +Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). + +In addition to "expansion", braces are also used for matching. In other words: + +- [brace expansion](#brace-expansion) is for generating new lists +- [brace matching](#brace-matching) is for filtering existing lists + +
+More about brace expansion (click to expand) + +There are two main types of brace expansion: + +1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` +2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". + +Here are some example brace patterns to illustrate how they work: + +**Sets** + +``` +{a,b,c} => a b c +{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 +``` + +**Sequences** + +``` +{1..9} => 1 2 3 4 5 6 7 8 9 +{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 +{1..20..3} => 1 4 7 10 13 16 19 +{a..j} => a b c d e f g h i j +{j..a} => j i h g f e d c b a +{a..z..3} => a d g j m p s v y +``` + +**Combination** + +Sets and sequences can be mixed together or used along with any other strings. + +``` +{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 +foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar +``` + +The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. + +## Brace matching + +In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. + +For example, the pattern `foo/{1..3}/bar` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +``` + +But not: + +``` +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +## Brace matching pitfalls + +Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. + +### tldr + +**"brace bombs"** + +- brace expansion can eat up a huge amount of processing resources +- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially +- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) + +For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. + +### The solution + +Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. + +### Geometric complexity + +At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. + +For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: + +``` +{1,2}{3,4} => (2X2) => 13 14 23 24 +{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 +``` + +But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: + +``` +{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 + 249 257 258 259 267 268 269 347 348 349 357 + 358 359 367 368 369 +``` + +Now, imagine how this complexity grows given that each element is a n-tuple: + +``` +{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) +{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) +``` + +Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. + +**More information** + +Interested in learning more about brace expansion? + +- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) +- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) +- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) + +
+ +## Performance + +Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. + +### Better algorithms + +Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. + +Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. + +**The proof is in the numbers** + +Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------------------- | ------------------- | ---------------------------- | +| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | +| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | +| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | +| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | +| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | +| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | +| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | +| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | +| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | +| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | +| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | +| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | +| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | +| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | +| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | +| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | +| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | + +### Faster algorithms + +When you need expansion, braces is still much faster. + +_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------- | --------------------------- | ---------------------------- | +| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | +| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | +| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | +| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | +| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | +| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | +| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | +| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | + +If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm benchmark +``` + +### Latest results + +Braces is more accurate, without sacrificing performance. + +```bash +● expand - range (expanded) + braces x 53,167 ops/sec ±0.12% (102 runs sampled) + minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) +● expand - range (optimized for regex) + braces x 373,442 ops/sec ±0.04% (100 runs sampled) + minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) +● expand - nested ranges (expanded) + braces x 33,921 ops/sec ±0.09% (99 runs sampled) + minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) +● expand - nested ranges (optimized for regex) + braces x 287,479 ops/sec ±0.52% (98 runs sampled) + minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) +● expand - set (expanded) + braces x 238,243 ops/sec ±0.19% (97 runs sampled) + minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) +● expand - set (optimized for regex) + braces x 321,844 ops/sec ±0.10% (97 runs sampled) + minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) +● expand - nested sets (expanded) + braces x 165,371 ops/sec ±0.42% (96 runs sampled) + minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) +● expand - nested sets (optimized for regex) + braces x 242,948 ops/sec ±0.12% (99 runs sampled) + minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| ----------- | ------------------------------------------------------------- | +| 197 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [es128](https://github.com/es128) | +| 1 | [eush77](https://github.com/eush77) | +| 1 | [hemanth](https://github.com/hemanth) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +- [GitHub Profile](https://github.com/jonschlinkert) +- [Twitter Profile](https://twitter.com/jonschlinkert) +- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +--- + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js new file mode 100644 index 0000000..d222c13 --- /dev/null +++ b/node_modules/braces/index.js @@ -0,0 +1,170 @@ +'use strict'; + +const stringify = require('./lib/stringify'); +const compile = require('./lib/compile'); +const expand = require('./lib/expand'); +const parse = require('./lib/parse'); + +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ + +const braces = (input, options = {}) => { + let output = []; + + if (Array.isArray(input)) { + for (const pattern of input) { + const result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } + + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ + +braces.parse = (input, options = {}) => parse(input, options); + +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); +}; + +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); +}; + +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + + let result = expand(input, options); + + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } + + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } + + return result; +}; + +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } + + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; + +/** + * Expose "braces" + */ + +module.exports = braces; diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js new file mode 100644 index 0000000..dce69be --- /dev/null +++ b/node_modules/braces/lib/compile.js @@ -0,0 +1,60 @@ +'use strict'; + +const fill = require('fill-range'); +const utils = require('./utils'); + +const compile = (ast, options = {}) => { + const walk = (node, parent = {}) => { + const invalidBlock = utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + const invalid = invalidBlock === true || invalidNode === true; + const prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; + + if (node.isOpen === true) { + return prefix + node.value; + } + + if (node.isClose === true) { + console.log('node.isClose', prefix, node.value); + return prefix + node.value; + } + + if (node.type === 'open') { + return invalid ? prefix + node.value : '('; + } + + if (node.type === 'close') { + return invalid ? prefix + node.value : ')'; + } + + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; + } + + if (node.value) { + return node.value; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); + + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } + + if (node.nodes) { + for (const child of node.nodes) { + output += walk(child, node); + } + } + + return output; + }; + + return walk(ast); +}; + +module.exports = compile; diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js new file mode 100644 index 0000000..2bb3b88 --- /dev/null +++ b/node_modules/braces/lib/constants.js @@ -0,0 +1,57 @@ +'use strict'; + +module.exports = { + MAX_LENGTH: 10000, + + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ + + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ + + CHAR_ASTERISK: '*', /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js new file mode 100644 index 0000000..35b2c41 --- /dev/null +++ b/node_modules/braces/lib/expand.js @@ -0,0 +1,113 @@ +'use strict'; + +const fill = require('fill-range'); +const stringify = require('./stringify'); +const utils = require('./utils'); + +const append = (queue = '', stash = '', enclose = false) => { + const result = []; + + queue = [].concat(queue); + stash = [].concat(stash); + + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } + + for (const item of queue) { + if (Array.isArray(item)) { + for (const value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); + } + } + } + return utils.flatten(result); +}; + +const expand = (ast, options = {}) => { + const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; + + const walk = (node, parent = {}) => { + node.queue = []; + + let p = parent; + let q = parent.queue; + + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } + + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } + + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } + + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } + + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } + + const enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; + + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } + + for (let i = 0; i < node.nodes.length; i++) { + const child = node.nodes[i]; + + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } + + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } + + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } + + if (child.nodes) { + walk(child, node); + } + } + + return queue; + }; + + return utils.flatten(walk(ast)); +}; + +module.exports = expand; diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js new file mode 100644 index 0000000..3a6988e --- /dev/null +++ b/node_modules/braces/lib/parse.js @@ -0,0 +1,331 @@ +'use strict'; + +const stringify = require('./stringify'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = require('./constants'); + +/** + * parse + */ + +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + const opts = options || {}; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } + + const ast = { type: 'root', input, nodes: [] }; + const stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + const length = input.length; + let index = 0; + let depth = 0; + let value; + + /** + * Helpers + */ + + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } + + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } + + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; + + push({ type: 'bos' }); + + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); + + /** + * Invalid chars + */ + + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } + + /** + * Escaped chars + */ + + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } + + /** + * Right square bracket (literal): ']' + */ + + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } + + /** + * Left square bracket: '[' + */ + + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + + let next; + + while (index < length && (next = advance())) { + value += next; + + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } + + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; + + if (brackets === 0) { + break; + } + } + } + + push({ type: 'text', value }); + continue; + } + + /** + * Parentheses + */ + + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } + + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } + + /** + * Quotes: '|"|` + */ + + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + const open = value; + let next; + + if (options.keepQuotes !== true) { + value = ''; + } + + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } + + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } + + value += next; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Left curly brace: '{' + */ + + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; + + const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + const brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; + + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } + + /** + * Right curly brace: '}' + */ + + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } + + const type = 'close'; + block = stack.pop(); + block.close = true; + + push({ type, value }); + depth--; + + block = stack[stack.length - 1]; + continue; + } + + /** + * Comma: ',' + */ + + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + const open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } + + push({ type: 'comma', value }); + block.commas++; + continue; + } + + /** + * Dot: '.' + */ + + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + const siblings = block.nodes; + + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } + + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; + + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } + + block.ranges++; + block.args = []; + continue; + } + + if (prev.type === 'range') { + siblings.pop(); + + const before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } + + push({ type: 'dot', value }); + continue; + } + + /** + * Text + */ + + push({ type: 'text', value }); + } + + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); + + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); + + // get the location of the block on parent.nodes (block's siblings) + const parent = stack[stack.length - 1]; + const index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); + + push({ type: 'eos' }); + return ast; +}; + +module.exports = parse; diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js new file mode 100644 index 0000000..8bcf872 --- /dev/null +++ b/node_modules/braces/lib/stringify.js @@ -0,0 +1,32 @@ +'use strict'; + +const utils = require('./utils'); + +module.exports = (ast, options = {}) => { + const stringify = (node, parent = {}) => { + const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; + + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } + + if (node.value) { + return node.value; + } + + if (node.nodes) { + for (const child of node.nodes) { + output += stringify(child); + } + } + return output; + }; + + return stringify(ast); +}; + diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js new file mode 100644 index 0000000..d19311f --- /dev/null +++ b/node_modules/braces/lib/utils.js @@ -0,0 +1,122 @@ +'use strict'; + +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; +}; + +/** + * Find a node of the given type + */ + +exports.find = (node, type) => node.nodes.find(node => node.type === type); + +/** + * Find a node of the given type + */ + +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; + +/** + * Escape the given node with '\\' before node.value + */ + +exports.escapeNode = (block, n = 0, type) => { + const node = block.nodes[n]; + if (!node) return; + + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; + +/** + * Returns true if the given brace node should be enclosed in literal braces + */ + +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a brace node is invalid. + */ + +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a node is an open or close node + */ + +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; + +/** + * Reduce an array of text nodes. + */ + +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); + +/** + * Flatten an array + */ + +exports.flatten = (...args) => { + const result = []; + + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + const ele = arr[i]; + + if (Array.isArray(ele)) { + flat(ele); + continue; + } + + if (ele !== undefined) { + result.push(ele); + } + } + return result; + }; + + flat(args); + return result; +}; diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json new file mode 100644 index 0000000..c3c056e --- /dev/null +++ b/node_modules/braces/package.json @@ -0,0 +1,77 @@ +{ + "name": "braces", + "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", + "version": "3.0.3", + "homepage": "https://github.com/micromatch/braces", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Elan Shanker (https://github.com/es128)", + "Eugene Sharygin (https://github.com/eush77)", + "hemanth.hm (http://h3manth.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/braces", + "bugs": { + "url": "https://github.com/micromatch/braces/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha", + "benchmark": "node benchmark" + }, + "dependencies": { + "fill-range": "^7.1.1" + }, + "devDependencies": { + "ansi-colors": "^3.2.4", + "bash-path": "^2.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "braces", + "expand", + "expansion", + "filepath", + "fill", + "fs", + "glob", + "globbing", + "letter", + "match", + "matches", + "matching", + "number", + "numerical", + "path", + "range", + "ranges", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/node_modules/chokidar/LICENSE b/node_modules/chokidar/LICENSE new file mode 100644 index 0000000..fa9162b --- /dev/null +++ b/node_modules/chokidar/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the “Software”), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/chokidar/README.md b/node_modules/chokidar/README.md new file mode 100644 index 0000000..8e25dec --- /dev/null +++ b/node_modules/chokidar/README.md @@ -0,0 +1,308 @@ +# Chokidar [![Weekly downloads](https://img.shields.io/npm/dw/chokidar.svg)](https://github.com/paulmillr/chokidar) [![Yearly downloads](https://img.shields.io/npm/dy/chokidar.svg)](https://github.com/paulmillr/chokidar) + +> Minimal and efficient cross-platform file watching library + +[![NPM](https://nodei.co/npm/chokidar.png)](https://www.npmjs.com/package/chokidar) + +## Why? + +Node.js `fs.watch`: + +* Doesn't report filenames on MacOS. +* Doesn't report events at all when using editors like Sublime on MacOS. +* Often reports events twice. +* Emits most changes as `rename`. +* Does not provide an easy way to recursively watch file trees. +* Does not support recursive watching on Linux. + +Node.js `fs.watchFile`: + +* Almost as bad at event handling. +* Also does not provide any recursive watching. +* Results in high CPU utilization. + +Chokidar resolves these problems. + +Initially made for **[Brunch](https://brunch.io/)** (an ultra-swift web app build tool), it is now used in +[Microsoft's Visual Studio Code](https://github.com/microsoft/vscode), +[gulp](https://github.com/gulpjs/gulp/), +[karma](https://karma-runner.github.io/), +[PM2](https://github.com/Unitech/PM2), +[browserify](http://browserify.org/), +[webpack](https://webpack.github.io/), +[BrowserSync](https://www.browsersync.io/), +and [many others](https://www.npmjs.com/browse/depended/chokidar). +It has proven itself in production environments. + +Version 3 is out! Check out our blog post about it: [Chokidar 3: How to save 32TB of traffic every week](https://paulmillr.com/posts/chokidar-3-save-32tb-of-traffic/) + +## How? + +Chokidar does still rely on the Node.js core `fs` module, but when using +`fs.watch` and `fs.watchFile` for watching, it normalizes the events it +receives, often checking for truth by getting file stats and/or dir contents. + +On MacOS, chokidar by default uses a native extension exposing the Darwin +`FSEvents` API. This provides very efficient recursive watching compared with +implementations like `kqueue` available on most \*nix platforms. Chokidar still +does have to do some work to normalize the events received that way as well. + +On most other platforms, the `fs.watch`-based implementation is the default, which +avoids polling and keeps CPU usage down. Be advised that chokidar will initiate +watchers recursively for everything within scope of the paths that have been +specified, so be judicious about not wasting system resources by watching much +more than needed. + +## Getting started + +Install with npm: + +```sh +npm install chokidar +``` + +Then `require` and use it in your code: + +```javascript +const chokidar = require('chokidar'); + +// One-liner for current directory +chokidar.watch('.').on('all', (event, path) => { + console.log(event, path); +}); +``` + +## API + +```javascript +// Example of a more typical implementation structure + +// Initialize watcher. +const watcher = chokidar.watch('file, dir, glob, or array', { + ignored: /(^|[\/\\])\../, // ignore dotfiles + persistent: true +}); + +// Something to use when events are received. +const log = console.log.bind(console); +// Add event listeners. +watcher + .on('add', path => log(`File ${path} has been added`)) + .on('change', path => log(`File ${path} has been changed`)) + .on('unlink', path => log(`File ${path} has been removed`)); + +// More possible events. +watcher + .on('addDir', path => log(`Directory ${path} has been added`)) + .on('unlinkDir', path => log(`Directory ${path} has been removed`)) + .on('error', error => log(`Watcher error: ${error}`)) + .on('ready', () => log('Initial scan complete. Ready for changes')) + .on('raw', (event, path, details) => { // internal + log('Raw event info:', event, path, details); + }); + +// 'add', 'addDir' and 'change' events also receive stat() results as second +// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats +watcher.on('change', (path, stats) => { + if (stats) console.log(`File ${path} changed size to ${stats.size}`); +}); + +// Watch new files. +watcher.add('new-file'); +watcher.add(['new-file-2', 'new-file-3', '**/other-file*']); + +// Get list of actual paths being watched on the filesystem +var watchedPaths = watcher.getWatched(); + +// Un-watch some files. +await watcher.unwatch('new-file*'); + +// Stop watching. +// The method is async! +watcher.close().then(() => console.log('closed')); + +// Full list of options. See below for descriptions. +// Do not use this example! +chokidar.watch('file', { + persistent: true, + + ignored: '*.txt', + ignoreInitial: false, + followSymlinks: true, + cwd: '.', + disableGlobbing: false, + + usePolling: false, + interval: 100, + binaryInterval: 300, + alwaysStat: false, + depth: 99, + awaitWriteFinish: { + stabilityThreshold: 2000, + pollInterval: 100 + }, + + ignorePermissionErrors: false, + atomic: true // or a custom 'atomicity delay', in milliseconds (default 100) +}); + +``` + +`chokidar.watch(paths, [options])` + +* `paths` (string or array of strings). Paths to files, dirs to be watched +recursively, or glob patterns. + - Note: globs must not contain windows separators (`\`), + because that's how they work by the standard — + you'll need to replace them with forward slashes (`/`). + - Note 2: for additional glob documentation, check out low-level + library: [picomatch](https://github.com/micromatch/picomatch). +* `options` (object) Options object as defined below: + +#### Persistence + +* `persistent` (default: `true`). Indicates whether the process +should continue to run as long as files are being watched. If set to +`false` when using `fsevents` to watch, no more events will be emitted +after `ready`, even if the process continues to run. + +#### Path filtering + +* `ignored` ([anymatch](https://github.com/es128/anymatch)-compatible definition) +Defines files/paths to be ignored. The whole relative or absolute path is +tested, not just filename. If a function with two arguments is provided, it +gets called twice per path - once with a single argument (the path), second +time with two arguments (the path and the +[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) +object of that path). +* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while +instantiating the watching as chokidar discovers these file paths (before the `ready` event). +* `followSymlinks` (default: `true`). When `false`, only the +symlinks themselves will be watched for changes instead of following +the link references and bubbling events through the link's path. +* `cwd` (no default). The base directory from which watch `paths` are to be +derived. Paths emitted with events will be relative to this. +* `disableGlobbing` (default: `false`). If set to `true` then the strings passed to `.watch()` and `.add()` are treated as +literal path names, even if they look like globs. + +#### Performance + +* `usePolling` (default: `false`). +Whether to use fs.watchFile (backed by polling), or fs.watch. If polling +leads to high CPU utilization, consider setting this to `false`. It is +typically necessary to **set this to `true` to successfully watch files over +a network**, and it may be necessary to successfully watch files in other +non-standard situations. Setting to `true` explicitly on MacOS overrides the +`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable +to true (1) or false (0) in order to override this option. +* _Polling-specific settings_ (effective when `usePolling: true`) + * `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also + set the CHOKIDAR_INTERVAL env variable to override this option. + * `binaryInterval` (default: `300`). Interval of file system + polling for binary files. + ([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) +* `useFsEvents` (default: `true` on MacOS). Whether to use the +`fsevents` watching interface if available. When set to `true` explicitly +and `fsevents` is available this supercedes the `usePolling` setting. When +set to `false` on MacOS, `usePolling: true` becomes the default. +* `alwaysStat` (default: `false`). If relying upon the +[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) +object that may get passed with `add`, `addDir`, and `change` events, set +this to `true` to ensure it is provided even in cases where it wasn't +already available from the underlying watch events. +* `depth` (default: `undefined`). If set, limits how many levels of +subdirectories will be traversed. +* `awaitWriteFinish` (default: `false`). +By default, the `add` event will fire when a file first appears on disk, before +the entire file has been written. Furthermore, in some cases some `change` +events will be emitted while the file is being written. In some cases, +especially when watching for large files there will be a need to wait for the +write operation to finish before responding to a file creation or modification. +Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size, +holding its `add` and `change` events until the size does not change for a +configurable amount of time. The appropriate duration setting is heavily +dependent on the OS and hardware. For accurate detection this parameter should +be relatively high, making file watching much less responsive. +Use with caution. + * *`options.awaitWriteFinish` can be set to an object in order to adjust + timing params:* + * `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in + milliseconds for a file size to remain constant before emitting its event. + * `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds. + +#### Errors + +* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files +that don't have read permissions if possible. If watching fails due to `EPERM` +or `EACCES` with this set to `true`, the errors will be suppressed silently. +* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`). +Automatically filters out artifacts that occur when using editors that use +"atomic writes" instead of writing directly to the source file. If a file is +re-added within 100 ms of being deleted, Chokidar emits a `change` event +rather than `unlink` then `add`. If the default of 100 ms does not work well +for you, you can override it by setting `atomic` to a custom value, in +milliseconds. + +### Methods & Events + +`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`: + +* `.add(path / paths)`: Add files, directories, or glob patterns for tracking. +Takes an array of strings or just one string. +* `.on(event, callback)`: Listen for an FS event. +Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`, +`raw`, `error`. +Additionally `all` is available which gets emitted with the underlying event +name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully. +* `.unwatch(path / paths)`: Stop watching files, directories, or glob patterns. +Takes an array of strings or just one string. +* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen. +* `.getWatched()`: Returns an object representing all the paths on the file +system being watched by this `FSWatcher` instance. The object's keys are all the +directories (using absolute paths unless the `cwd` option was used), and the +values are arrays of the names of the items contained in each directory. + +## CLI + +If you need a CLI interface for your file watching, check out +[chokidar-cli](https://github.com/open-cli-tools/chokidar-cli), allowing you to +execute a command on each change, or get a stdio stream of change events. + +## Install Troubleshooting + +* `npm WARN optional dep failed, continuing fsevents@n.n.n` + * This message is normal part of how `npm` handles optional dependencies and is + not indicative of a problem. Even if accompanied by other related error messages, + Chokidar should function properly. + +* `TypeError: fsevents is not a constructor` + * Update chokidar by doing `rm -rf node_modules package-lock.json yarn.lock && npm install`, or update your dependency that uses chokidar. + +* Chokidar is producing `ENOSP` error on Linux, like this: + * `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell` + `Error: watch /home/ ENOSPC` + * This means Chokidar ran out of file handles and you'll need to increase their count by executing the following command in Terminal: + `echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p` + +## Changelog + +For more detailed changelog, see [`full_changelog.md`](.github/full_changelog.md). +- **v3.5 (Jan 6, 2021):** Support for ARM Macs with Apple Silicon. Fixes for deleted symlinks. +- **v3.4 (Apr 26, 2020):** Support for directory-based symlinks. Fixes for macos file replacement. +- **v3.3 (Nov 2, 2019):** `FSWatcher#close()` method became async. That fixes IO race conditions related to close method. +- **v3.2 (Oct 1, 2019):** Improve Linux RAM usage by 50%. Race condition fixes. Windows glob fixes. Improve stability by using tight range of dependency versions. +- **v3.1 (Sep 16, 2019):** dotfiles are no longer filtered out by default. Use `ignored` option if needed. Improve initial Linux scan time by 50%. +- **v3 (Apr 30, 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16 and higher. +- **v2 (Dec 29, 2017):** Globs are now posix-style-only; without windows support. Tons of bugfixes. +- **v1 (Apr 7, 2015):** Glob support, symlink support, tons of bugfixes. Node 0.8+ is supported +- **v0.1 (Apr 20, 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66) + +## Also + +Why was chokidar named this way? What's the meaning behind it? + +>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). This word is also used in other languages like Urdu as (چوکیدار) which is widely used in Pakistan and India. + +## License + +MIT (c) Paul Miller (), see [LICENSE](LICENSE) file. diff --git a/node_modules/chokidar/index.js b/node_modules/chokidar/index.js new file mode 100644 index 0000000..8752893 --- /dev/null +++ b/node_modules/chokidar/index.js @@ -0,0 +1,973 @@ +'use strict'; + +const { EventEmitter } = require('events'); +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); +const readdirp = require('readdirp'); +const anymatch = require('anymatch').default; +const globParent = require('glob-parent'); +const isGlob = require('is-glob'); +const braces = require('braces'); +const normalizePath = require('normalize-path'); + +const NodeFsHandler = require('./lib/nodefs-handler'); +const FsEventsHandler = require('./lib/fsevents-handler'); +const { + EV_ALL, + EV_READY, + EV_ADD, + EV_CHANGE, + EV_UNLINK, + EV_ADD_DIR, + EV_UNLINK_DIR, + EV_RAW, + EV_ERROR, + + STR_CLOSE, + STR_END, + + BACK_SLASH_RE, + DOUBLE_SLASH_RE, + SLASH_OR_BACK_SLASH_RE, + DOT_RE, + REPLACER_RE, + + SLASH, + SLASH_SLASH, + BRACE_START, + BANG, + ONE_DOT, + TWO_DOTS, + GLOBSTAR, + SLASH_GLOBSTAR, + ANYMATCH_OPTS, + STRING_TYPE, + FUNCTION_TYPE, + EMPTY_STR, + EMPTY_FN, + + isWindows, + isMacos, + isIBMi +} = require('./lib/constants'); + +const stat = promisify(fs.stat); +const readdir = promisify(fs.readdir); + +/** + * @typedef {String} Path + * @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName + * @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType + */ + +/** + * + * @typedef {Object} WatchHelpers + * @property {Boolean} followSymlinks + * @property {'stat'|'lstat'} statMethod + * @property {Path} path + * @property {Path} watchPath + * @property {Function} entryPath + * @property {Boolean} hasGlob + * @property {Object} globFilter + * @property {Function} filterPath + * @property {Function} filterDir + */ + +const arrify = (value = []) => Array.isArray(value) ? value : [value]; +const flatten = (list, result = []) => { + list.forEach(item => { + if (Array.isArray(item)) { + flatten(item, result); + } else { + result.push(item); + } + }); + return result; +}; + +const unifyPaths = (paths_) => { + /** + * @type {Array} + */ + const paths = flatten(arrify(paths_)); + if (!paths.every(p => typeof p === STRING_TYPE)) { + throw new TypeError(`Non-string provided as watch path: ${paths}`); + } + return paths.map(normalizePathToUnix); +}; + +// If SLASH_SLASH occurs at the beginning of path, it is not replaced +// because "//StoragePC/DrivePool/Movies" is a valid network path +const toUnix = (string) => { + let str = string.replace(BACK_SLASH_RE, SLASH); + let prepend = false; + if (str.startsWith(SLASH_SLASH)) { + prepend = true; + } + while (str.match(DOUBLE_SLASH_RE)) { + str = str.replace(DOUBLE_SLASH_RE, SLASH); + } + if (prepend) { + str = SLASH + str; + } + return str; +}; + +// Our version of upath.normalize +// TODO: this is not equal to path-normalize module - investigate why +const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path))); + +const normalizeIgnored = (cwd = EMPTY_STR) => (path) => { + if (typeof path !== STRING_TYPE) return path; + return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path)); +}; + +const getAbsolutePath = (path, cwd) => { + if (sysPath.isAbsolute(path)) { + return path; + } + if (path.startsWith(BANG)) { + return BANG + sysPath.join(cwd, path.slice(1)); + } + return sysPath.join(cwd, path); +}; + +const undef = (opts, key) => opts[key] === undefined; + +/** + * Directory entry. + * @property {Path} path + * @property {Set} items + */ +class DirEntry { + /** + * @param {Path} dir + * @param {Function} removeWatcher + */ + constructor(dir, removeWatcher) { + this.path = dir; + this._removeWatcher = removeWatcher; + /** @type {Set} */ + this.items = new Set(); + } + + add(item) { + const {items} = this; + if (!items) return; + if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item); + } + + async remove(item) { + const {items} = this; + if (!items) return; + items.delete(item); + if (items.size > 0) return; + + const dir = this.path; + try { + await readdir(dir); + } catch (err) { + if (this._removeWatcher) { + this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir)); + } + } + } + + has(item) { + const {items} = this; + if (!items) return; + return items.has(item); + } + + /** + * @returns {Array} + */ + getChildren() { + const {items} = this; + if (!items) return; + return [...items.values()]; + } + + dispose() { + this.items.clear(); + delete this.path; + delete this._removeWatcher; + delete this.items; + Object.freeze(this); + } +} + +const STAT_METHOD_F = 'stat'; +const STAT_METHOD_L = 'lstat'; +class WatchHelper { + constructor(path, watchPath, follow, fsw) { + this.fsw = fsw; + this.path = path = path.replace(REPLACER_RE, EMPTY_STR); + this.watchPath = watchPath; + this.fullWatchPath = sysPath.resolve(watchPath); + this.hasGlob = watchPath !== path; + /** @type {object|boolean} */ + if (path === EMPTY_STR) this.hasGlob = false; + this.globSymlink = this.hasGlob && follow ? undefined : false; + this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false; + this.dirParts = this.getDirParts(path); + this.dirParts.forEach((parts) => { + if (parts.length > 1) parts.pop(); + }); + this.followSymlinks = follow; + this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L; + } + + checkGlobSymlink(entry) { + // only need to resolve once + // first entry should always have entry.parentDir === EMPTY_STR + if (this.globSymlink === undefined) { + this.globSymlink = entry.fullParentDir === this.fullWatchPath ? + false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath}; + } + + if (this.globSymlink) { + return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath); + } + + return entry.fullPath; + } + + entryPath(entry) { + return sysPath.join(this.watchPath, + sysPath.relative(this.watchPath, this.checkGlobSymlink(entry)) + ); + } + + filterPath(entry) { + const {stats} = entry; + if (stats && stats.isSymbolicLink()) return this.filterDir(entry); + const resolvedPath = this.entryPath(entry); + const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ? + this.globFilter(resolvedPath) : true; + return matchesGlob && + this.fsw._isntIgnored(resolvedPath, stats) && + this.fsw._hasReadPermissions(stats); + } + + getDirParts(path) { + if (!this.hasGlob) return []; + const parts = []; + const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path]; + expandedPath.forEach((path) => { + parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE)); + }); + return parts; + } + + filterDir(entry) { + if (this.hasGlob) { + const entryParts = this.getDirParts(this.checkGlobSymlink(entry)); + let globstar = false; + this.unmatchedGlob = !this.dirParts.some((parts) => { + return parts.every((part, i) => { + if (part === GLOBSTAR) globstar = true; + return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS); + }); + }); + } + return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats); + } +} + +/** + * Watches files & directories for changes. Emitted events: + * `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error` + * + * new FSWatcher() + * .add(directories) + * .on('add', path => log('File', path, 'was added')) + */ +class FSWatcher extends EventEmitter { +// Not indenting methods for history sake; for now. +constructor(_opts) { + super(); + + const opts = {}; + if (_opts) Object.assign(opts, _opts); // for frozen objects + + /** @type {Map} */ + this._watched = new Map(); + /** @type {Map} */ + this._closers = new Map(); + /** @type {Set} */ + this._ignoredPaths = new Set(); + + /** @type {Map} */ + this._throttled = new Map(); + + /** @type {Map} */ + this._symlinkPaths = new Map(); + + this._streams = new Set(); + this.closed = false; + + // Set up default options. + if (undef(opts, 'persistent')) opts.persistent = true; + if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false; + if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false; + if (undef(opts, 'interval')) opts.interval = 100; + if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300; + if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false; + opts.enableBinaryInterval = opts.binaryInterval !== opts.interval; + + // Enable fsevents on OS X when polling isn't explicitly enabled. + if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling; + + // If we can't use fsevents, ensure the options reflect it's disabled. + const canUseFsEvents = FsEventsHandler.canUse(); + if (!canUseFsEvents) opts.useFsEvents = false; + + // Use polling on Mac if not using fsevents. + // Other platforms use non-polling fs_watch. + if (undef(opts, 'usePolling') && !opts.useFsEvents) { + opts.usePolling = isMacos; + } + + // Always default to polling on IBM i because fs.watch() is not available on IBM i. + if(isIBMi) { + opts.usePolling = true; + } + + // Global override (useful for end-developers that need to force polling for all + // instances of chokidar, regardless of usage/dependency depth) + const envPoll = process.env.CHOKIDAR_USEPOLLING; + if (envPoll !== undefined) { + const envLower = envPoll.toLowerCase(); + + if (envLower === 'false' || envLower === '0') { + opts.usePolling = false; + } else if (envLower === 'true' || envLower === '1') { + opts.usePolling = true; + } else { + opts.usePolling = !!envLower; + } + } + const envInterval = process.env.CHOKIDAR_INTERVAL; + if (envInterval) { + opts.interval = Number.parseInt(envInterval, 10); + } + + // Editor atomic write normalization enabled by default with fs.watch + if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents; + if (opts.atomic) this._pendingUnlinks = new Map(); + + if (undef(opts, 'followSymlinks')) opts.followSymlinks = true; + + if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false; + if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {}; + const awf = opts.awaitWriteFinish; + if (awf) { + if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000; + if (!awf.pollInterval) awf.pollInterval = 100; + this._pendingWrites = new Map(); + } + if (opts.ignored) opts.ignored = arrify(opts.ignored); + + let readyCalls = 0; + this._emitReady = () => { + readyCalls++; + if (readyCalls >= this._readyCount) { + this._emitReady = EMPTY_FN; + this._readyEmitted = true; + // use process.nextTick to allow time for listener to be bound + process.nextTick(() => this.emit(EV_READY)); + } + }; + this._emitRaw = (...args) => this.emit(EV_RAW, ...args); + this._readyEmitted = false; + this.options = opts; + + // Initialize with proper watcher. + if (opts.useFsEvents) { + this._fsEventsHandler = new FsEventsHandler(this); + } else { + this._nodeFsHandler = new NodeFsHandler(this); + } + + // You’re frozen when your heart’s not open. + Object.freeze(opts); +} + +// Public methods + +/** + * Adds paths to be watched on an existing FSWatcher instance + * @param {Path|Array} paths_ + * @param {String=} _origAdd private; for handling non-existent paths to be watched + * @param {Boolean=} _internal private; indicates a non-user add + * @returns {FSWatcher} for chaining + */ +add(paths_, _origAdd, _internal) { + const {cwd, disableGlobbing} = this.options; + this.closed = false; + let paths = unifyPaths(paths_); + if (cwd) { + paths = paths.map((path) => { + const absPath = getAbsolutePath(path, cwd); + + // Check `path` instead of `absPath` because the cwd portion can't be a glob + if (disableGlobbing || !isGlob(path)) { + return absPath; + } + return normalizePath(absPath); + }); + } + + // set aside negated glob strings + paths = paths.filter((path) => { + if (path.startsWith(BANG)) { + this._ignoredPaths.add(path.slice(1)); + return false; + } + + // if a path is being added that was previously ignored, stop ignoring it + this._ignoredPaths.delete(path); + this._ignoredPaths.delete(path + SLASH_GLOBSTAR); + + // reset the cached userIgnored anymatch fn + // to make ignoredPaths changes effective + this._userIgnored = undefined; + + return true; + }); + + if (this.options.useFsEvents && this._fsEventsHandler) { + if (!this._readyCount) this._readyCount = paths.length; + if (this.options.persistent) this._readyCount += paths.length; + paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path)); + } else { + if (!this._readyCount) this._readyCount = 0; + this._readyCount += paths.length; + Promise.all( + paths.map(async path => { + const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd); + if (res) this._emitReady(); + return res; + }) + ).then(results => { + if (this.closed) return; + results.filter(item => item).forEach(item => { + this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item)); + }); + }); + } + + return this; +} + +/** + * Close watchers or start ignoring events from specified paths. + * @param {Path|Array} paths_ - string or array of strings, file/directory paths and/or globs + * @returns {FSWatcher} for chaining +*/ +unwatch(paths_) { + if (this.closed) return this; + const paths = unifyPaths(paths_); + const {cwd} = this.options; + + paths.forEach((path) => { + // convert to absolute path unless relative path already matches + if (!sysPath.isAbsolute(path) && !this._closers.has(path)) { + if (cwd) path = sysPath.join(cwd, path); + path = sysPath.resolve(path); + } + + this._closePath(path); + + this._ignoredPaths.add(path); + if (this._watched.has(path)) { + this._ignoredPaths.add(path + SLASH_GLOBSTAR); + } + + // reset the cached userIgnored anymatch fn + // to make ignoredPaths changes effective + this._userIgnored = undefined; + }); + + return this; +} + +/** + * Close watchers and remove all listeners from watched paths. + * @returns {Promise}. +*/ +close() { + if (this.closed) return this._closePromise; + this.closed = true; + + // Memory management. + this.removeAllListeners(); + const closers = []; + this._closers.forEach(closerList => closerList.forEach(closer => { + const promise = closer(); + if (promise instanceof Promise) closers.push(promise); + })); + this._streams.forEach(stream => stream.destroy()); + this._userIgnored = undefined; + this._readyCount = 0; + this._readyEmitted = false; + this._watched.forEach(dirent => dirent.dispose()); + ['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => { + this[`_${key}`].clear(); + }); + + this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve(); + return this._closePromise; +} + +/** + * Expose list of watched paths + * @returns {Object} for chaining +*/ +getWatched() { + const watchList = {}; + this._watched.forEach((entry, dir) => { + const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir; + watchList[key || ONE_DOT] = entry.getChildren().sort(); + }); + return watchList; +} + +emitWithAll(event, args) { + this.emit(...args); + if (event !== EV_ERROR) this.emit(EV_ALL, ...args); +} + +// Common helpers +// -------------- + +/** + * Normalize and emit events. + * Calling _emit DOES NOT MEAN emit() would be called! + * @param {EventName} event Type of event + * @param {Path} path File or directory path + * @param {*=} val1 arguments to be passed with event + * @param {*=} val2 + * @param {*=} val3 + * @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag + */ +async _emit(event, path, val1, val2, val3) { + if (this.closed) return; + + const opts = this.options; + if (isWindows) path = sysPath.normalize(path); + if (opts.cwd) path = sysPath.relative(opts.cwd, path); + /** @type Array */ + const args = [event, path]; + if (val3 !== undefined) args.push(val1, val2, val3); + else if (val2 !== undefined) args.push(val1, val2); + else if (val1 !== undefined) args.push(val1); + + const awf = opts.awaitWriteFinish; + let pw; + if (awf && (pw = this._pendingWrites.get(path))) { + pw.lastChange = new Date(); + return this; + } + + if (opts.atomic) { + if (event === EV_UNLINK) { + this._pendingUnlinks.set(path, args); + setTimeout(() => { + this._pendingUnlinks.forEach((entry, path) => { + this.emit(...entry); + this.emit(EV_ALL, ...entry); + this._pendingUnlinks.delete(path); + }); + }, typeof opts.atomic === 'number' ? opts.atomic : 100); + return this; + } + if (event === EV_ADD && this._pendingUnlinks.has(path)) { + event = args[0] = EV_CHANGE; + this._pendingUnlinks.delete(path); + } + } + + if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) { + const awfEmit = (err, stats) => { + if (err) { + event = args[0] = EV_ERROR; + args[1] = err; + this.emitWithAll(event, args); + } else if (stats) { + // if stats doesn't exist the file must have been deleted + if (args.length > 2) { + args[2] = stats; + } else { + args.push(stats); + } + this.emitWithAll(event, args); + } + }; + + this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit); + return this; + } + + if (event === EV_CHANGE) { + const isThrottled = !this._throttle(EV_CHANGE, path, 50); + if (isThrottled) return this; + } + + if (opts.alwaysStat && val1 === undefined && + (event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE) + ) { + const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path; + let stats; + try { + stats = await stat(fullPath); + } catch (err) {} + // Suppress event when fs_stat fails, to avoid sending undefined 'stat' + if (!stats || this.closed) return; + args.push(stats); + } + this.emitWithAll(event, args); + + return this; +} + +/** + * Common handler for errors + * @param {Error} error + * @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag + */ +_handleError(error) { + const code = error && error.code; + if (error && code !== 'ENOENT' && code !== 'ENOTDIR' && + (!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES')) + ) { + this.emit(EV_ERROR, error); + } + return error || this.closed; +} + +/** + * Helper utility for throttling + * @param {ThrottleType} actionType type being throttled + * @param {Path} path being acted upon + * @param {Number} timeout duration of time to suppress duplicate actions + * @returns {Object|false} tracking object or false if action should be suppressed + */ +_throttle(actionType, path, timeout) { + if (!this._throttled.has(actionType)) { + this._throttled.set(actionType, new Map()); + } + + /** @type {Map} */ + const action = this._throttled.get(actionType); + /** @type {Object} */ + const actionPath = action.get(path); + + if (actionPath) { + actionPath.count++; + return false; + } + + let timeoutObject; + const clear = () => { + const item = action.get(path); + const count = item ? item.count : 0; + action.delete(path); + clearTimeout(timeoutObject); + if (item) clearTimeout(item.timeoutObject); + return count; + }; + timeoutObject = setTimeout(clear, timeout); + const thr = {timeoutObject, clear, count: 0}; + action.set(path, thr); + return thr; +} + +_incrReadyCount() { + return this._readyCount++; +} + +/** + * Awaits write operation to finish. + * Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback. + * @param {Path} path being acted upon + * @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished + * @param {EventName} event + * @param {Function} awfEmit Callback to be called when ready for event to be emitted. + */ +_awaitWriteFinish(path, threshold, event, awfEmit) { + let timeoutHandler; + + let fullPath = path; + if (this.options.cwd && !sysPath.isAbsolute(path)) { + fullPath = sysPath.join(this.options.cwd, path); + } + + const now = new Date(); + + const awaitWriteFinish = (prevStat) => { + fs.stat(fullPath, (err, curStat) => { + if (err || !this._pendingWrites.has(path)) { + if (err && err.code !== 'ENOENT') awfEmit(err); + return; + } + + const now = Number(new Date()); + + if (prevStat && curStat.size !== prevStat.size) { + this._pendingWrites.get(path).lastChange = now; + } + const pw = this._pendingWrites.get(path); + const df = now - pw.lastChange; + + if (df >= threshold) { + this._pendingWrites.delete(path); + awfEmit(undefined, curStat); + } else { + timeoutHandler = setTimeout( + awaitWriteFinish, + this.options.awaitWriteFinish.pollInterval, + curStat + ); + } + }); + }; + + if (!this._pendingWrites.has(path)) { + this._pendingWrites.set(path, { + lastChange: now, + cancelWait: () => { + this._pendingWrites.delete(path); + clearTimeout(timeoutHandler); + return event; + } + }); + timeoutHandler = setTimeout( + awaitWriteFinish, + this.options.awaitWriteFinish.pollInterval + ); + } +} + +_getGlobIgnored() { + return [...this._ignoredPaths.values()]; +} + +/** + * Determines whether user has asked to ignore this path. + * @param {Path} path filepath or dir + * @param {fs.Stats=} stats result of fs.stat + * @returns {Boolean} + */ +_isIgnored(path, stats) { + if (this.options.atomic && DOT_RE.test(path)) return true; + if (!this._userIgnored) { + const {cwd} = this.options; + const ign = this.options.ignored; + + const ignored = ign && ign.map(normalizeIgnored(cwd)); + const paths = arrify(ignored) + .filter((path) => typeof path === STRING_TYPE && !isGlob(path)) + .map((path) => path + SLASH_GLOBSTAR); + const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths); + this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS); + } + + return this._userIgnored([path, stats]); +} + +_isntIgnored(path, stat) { + return !this._isIgnored(path, stat); +} + +/** + * Provides a set of common helpers and properties relating to symlink and glob handling. + * @param {Path} path file, directory, or glob pattern being watched + * @param {Number=} depth at any depth > 0, this isn't a glob + * @returns {WatchHelper} object containing helpers for this path + */ +_getWatchHelpers(path, depth) { + const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path); + const follow = this.options.followSymlinks; + + return new WatchHelper(path, watchPath, follow, this); +} + +// Directory helpers +// ----------------- + +/** + * Provides directory tracking objects + * @param {String} directory path of the directory + * @returns {DirEntry} the directory's tracking object + */ +_getWatchedDir(directory) { + if (!this._boundRemove) this._boundRemove = this._remove.bind(this); + const dir = sysPath.resolve(directory); + if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove)); + return this._watched.get(dir); +} + +// File helpers +// ------------ + +/** + * Check for read permissions. + * Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405 + * @param {fs.Stats} stats - object, result of fs_stat + * @returns {Boolean} indicates whether the file can be read +*/ +_hasReadPermissions(stats) { + if (this.options.ignorePermissionErrors) return true; + + // stats.mode may be bigint + const md = stats && Number.parseInt(stats.mode, 10); + const st = md & 0o777; + const it = Number.parseInt(st.toString(8)[0], 10); + return Boolean(4 & it); +} + +/** + * Handles emitting unlink events for + * files and directories, and via recursion, for + * files and directories within directories that are unlinked + * @param {String} directory within which the following item is located + * @param {String} item base path of item/directory + * @returns {void} +*/ +_remove(directory, item, isDirectory) { + // if what is being deleted is a directory, get that directory's paths + // for recursive deleting and cleaning of watched object + // if it is not a directory, nestedDirectoryChildren will be empty array + const path = sysPath.join(directory, item); + const fullPath = sysPath.resolve(path); + isDirectory = isDirectory != null + ? isDirectory + : this._watched.has(path) || this._watched.has(fullPath); + + // prevent duplicate handling in case of arriving here nearly simultaneously + // via multiple paths (such as _handleFile and _handleDir) + if (!this._throttle('remove', path, 100)) return; + + // if the only watched file is removed, watch for its return + if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) { + this.add(directory, item, true); + } + + // This will create a new entry in the watched object in either case + // so we got to do the directory check beforehand + const wp = this._getWatchedDir(path); + const nestedDirectoryChildren = wp.getChildren(); + + // Recursively remove children directories / files. + nestedDirectoryChildren.forEach(nested => this._remove(path, nested)); + + // Check if item was on the watched list and remove it + const parent = this._getWatchedDir(directory); + const wasTracked = parent.has(item); + parent.remove(item); + + // Fixes issue #1042 -> Relative paths were detected and added as symlinks + // (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612), + // but never removed from the map in case the path was deleted. + // This leads to an incorrect state if the path was recreated: + // https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553 + if (this._symlinkPaths.has(fullPath)) { + this._symlinkPaths.delete(fullPath); + } + + // If we wait for this file to be fully written, cancel the wait. + let relPath = path; + if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path); + if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) { + const event = this._pendingWrites.get(relPath).cancelWait(); + if (event === EV_ADD) return; + } + + // The Entry will either be a directory that just got removed + // or a bogus entry to a file, in either case we have to remove it + this._watched.delete(path); + this._watched.delete(fullPath); + const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK; + if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path); + + // Avoid conflicts if we later create another file with the same name + if (!this.options.useFsEvents) { + this._closePath(path); + } +} + +/** + * Closes all watchers for a path + * @param {Path} path + */ +_closePath(path) { + this._closeFile(path) + const dir = sysPath.dirname(path); + this._getWatchedDir(dir).remove(sysPath.basename(path)); +} + +/** + * Closes only file-specific watchers + * @param {Path} path + */ +_closeFile(path) { + const closers = this._closers.get(path); + if (!closers) return; + closers.forEach(closer => closer()); + this._closers.delete(path); +} + +/** + * + * @param {Path} path + * @param {Function} closer + */ +_addPathCloser(path, closer) { + if (!closer) return; + let list = this._closers.get(path); + if (!list) { + list = []; + this._closers.set(path, list); + } + list.push(closer); +} + +_readdirp(root, opts) { + if (this.closed) return; + const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts}; + let stream = readdirp(root, options); + this._streams.add(stream); + stream.once(STR_CLOSE, () => { + stream = undefined; + }); + stream.once(STR_END, () => { + if (stream) { + this._streams.delete(stream); + stream = undefined; + } + }); + return stream; +} + +} + +// Export FSWatcher class +exports.FSWatcher = FSWatcher; + +/** + * Instantiates watcher with paths to be tracked. + * @param {String|Array} paths file/directory paths and/or globs + * @param {Object=} options chokidar opts + * @returns an instance of FSWatcher for chaining. + */ +const watch = (paths, options) => { + const watcher = new FSWatcher(options); + watcher.add(paths); + return watcher; +}; + +exports.watch = watch; diff --git a/node_modules/chokidar/lib/constants.js b/node_modules/chokidar/lib/constants.js new file mode 100644 index 0000000..4743865 --- /dev/null +++ b/node_modules/chokidar/lib/constants.js @@ -0,0 +1,66 @@ +'use strict'; + +const {sep} = require('path'); +const {platform} = process; +const os = require('os'); + +exports.EV_ALL = 'all'; +exports.EV_READY = 'ready'; +exports.EV_ADD = 'add'; +exports.EV_CHANGE = 'change'; +exports.EV_ADD_DIR = 'addDir'; +exports.EV_UNLINK = 'unlink'; +exports.EV_UNLINK_DIR = 'unlinkDir'; +exports.EV_RAW = 'raw'; +exports.EV_ERROR = 'error'; + +exports.STR_DATA = 'data'; +exports.STR_END = 'end'; +exports.STR_CLOSE = 'close'; + +exports.FSEVENT_CREATED = 'created'; +exports.FSEVENT_MODIFIED = 'modified'; +exports.FSEVENT_DELETED = 'deleted'; +exports.FSEVENT_MOVED = 'moved'; +exports.FSEVENT_CLONED = 'cloned'; +exports.FSEVENT_UNKNOWN = 'unknown'; +exports.FSEVENT_FLAG_MUST_SCAN_SUBDIRS = 1; +exports.FSEVENT_TYPE_FILE = 'file'; +exports.FSEVENT_TYPE_DIRECTORY = 'directory'; +exports.FSEVENT_TYPE_SYMLINK = 'symlink'; + +exports.KEY_LISTENERS = 'listeners'; +exports.KEY_ERR = 'errHandlers'; +exports.KEY_RAW = 'rawEmitters'; +exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW]; + +exports.DOT_SLASH = `.${sep}`; + +exports.BACK_SLASH_RE = /\\/g; +exports.DOUBLE_SLASH_RE = /\/\//; +exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/; +exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/; +exports.REPLACER_RE = /^\.[/\\]/; + +exports.SLASH = '/'; +exports.SLASH_SLASH = '//'; +exports.BRACE_START = '{'; +exports.BANG = '!'; +exports.ONE_DOT = '.'; +exports.TWO_DOTS = '..'; +exports.STAR = '*'; +exports.GLOBSTAR = '**'; +exports.ROOT_GLOBSTAR = '/**/*'; +exports.SLASH_GLOBSTAR = '/**'; +exports.DIR_SUFFIX = 'Dir'; +exports.ANYMATCH_OPTS = {dot: true}; +exports.STRING_TYPE = 'string'; +exports.FUNCTION_TYPE = 'function'; +exports.EMPTY_STR = ''; +exports.EMPTY_FN = () => {}; +exports.IDENTITY_FN = val => val; + +exports.isWindows = platform === 'win32'; +exports.isMacos = platform === 'darwin'; +exports.isLinux = platform === 'linux'; +exports.isIBMi = os.type() === 'OS400'; diff --git a/node_modules/chokidar/lib/fsevents-handler.js b/node_modules/chokidar/lib/fsevents-handler.js new file mode 100644 index 0000000..fe29393 --- /dev/null +++ b/node_modules/chokidar/lib/fsevents-handler.js @@ -0,0 +1,526 @@ +'use strict'; + +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); + +let fsevents; +try { + fsevents = require('fsevents'); +} catch (error) { + if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error); +} + +if (fsevents) { + // TODO: real check + const mtch = process.version.match(/v(\d+)\.(\d+)/); + if (mtch && mtch[1] && mtch[2]) { + const maj = Number.parseInt(mtch[1], 10); + const min = Number.parseInt(mtch[2], 10); + if (maj === 8 && min < 16) { + fsevents = undefined; + } + } +} + +const { + EV_ADD, + EV_CHANGE, + EV_ADD_DIR, + EV_UNLINK, + EV_ERROR, + STR_DATA, + STR_END, + FSEVENT_CREATED, + FSEVENT_MODIFIED, + FSEVENT_DELETED, + FSEVENT_MOVED, + // FSEVENT_CLONED, + FSEVENT_UNKNOWN, + FSEVENT_FLAG_MUST_SCAN_SUBDIRS, + FSEVENT_TYPE_FILE, + FSEVENT_TYPE_DIRECTORY, + FSEVENT_TYPE_SYMLINK, + + ROOT_GLOBSTAR, + DIR_SUFFIX, + DOT_SLASH, + FUNCTION_TYPE, + EMPTY_FN, + IDENTITY_FN +} = require('./constants'); + +const Depth = (value) => isNaN(value) ? {} : {depth: value}; + +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const realpath = promisify(fs.realpath); + +const statMethods = { stat, lstat }; + +/** + * @typedef {String} Path + */ + +/** + * @typedef {Object} FsEventsWatchContainer + * @property {Set} listeners + * @property {Function} rawEmitter + * @property {{stop: Function}} watcher + */ + +// fsevents instance helper functions +/** + * Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances) + * @type {Map} + */ +const FSEventsWatchers = new Map(); + +// Threshold of duplicate path prefixes at which to start +// consolidating going forward +const consolidateThreshhold = 10; + +const wrongEventFlags = new Set([ + 69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912 +]); + +/** + * Instantiates the fsevents interface + * @param {Path} path path to be watched + * @param {Function} callback called when fsevents is bound and ready + * @returns {{stop: Function}} new fsevents instance + */ +const createFSEventsInstance = (path, callback) => { + const stop = fsevents.watch(path, callback); + return {stop}; +}; + +/** + * Instantiates the fsevents interface or binds listeners to an existing one covering + * the same file tree. + * @param {Path} path - to be watched + * @param {Path} realPath - real path for symlinks + * @param {Function} listener - called when fsevents emits events + * @param {Function} rawEmitter - passes data to listeners of the 'raw' event + * @returns {Function} closer + */ +function setFSEventsListener(path, realPath, listener, rawEmitter) { + let watchPath = sysPath.extname(realPath) ? sysPath.dirname(realPath) : realPath; + + const parentPath = sysPath.dirname(watchPath); + let cont = FSEventsWatchers.get(watchPath); + + // If we've accumulated a substantial number of paths that + // could have been consolidated by watching one directory + // above the current one, create a watcher on the parent + // path instead, so that we do consolidate going forward. + if (couldConsolidate(parentPath)) { + watchPath = parentPath; + } + + const resolvedPath = sysPath.resolve(path); + const hasSymlink = resolvedPath !== realPath; + + const filteredListener = (fullPath, flags, info) => { + if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath); + if ( + fullPath === resolvedPath || + !fullPath.indexOf(resolvedPath + sysPath.sep) + ) listener(fullPath, flags, info); + }; + + // check if there is already a watcher on a parent path + // modifies `watchPath` to the parent path when it finds a match + let watchedParent = false; + for (const watchedPath of FSEventsWatchers.keys()) { + if (realPath.indexOf(sysPath.resolve(watchedPath) + sysPath.sep) === 0) { + watchPath = watchedPath; + cont = FSEventsWatchers.get(watchPath); + watchedParent = true; + break; + } + } + + if (cont || watchedParent) { + cont.listeners.add(filteredListener); + } else { + cont = { + listeners: new Set([filteredListener]), + rawEmitter, + watcher: createFSEventsInstance(watchPath, (fullPath, flags) => { + if (!cont.listeners.size) return; + if (flags & FSEVENT_FLAG_MUST_SCAN_SUBDIRS) return; + const info = fsevents.getInfo(fullPath, flags); + cont.listeners.forEach(list => { + list(fullPath, flags, info); + }); + + cont.rawEmitter(info.event, fullPath, info); + }) + }; + FSEventsWatchers.set(watchPath, cont); + } + + // removes this instance's listeners and closes the underlying fsevents + // instance if there are no more listeners left + return () => { + const lst = cont.listeners; + + lst.delete(filteredListener); + if (!lst.size) { + FSEventsWatchers.delete(watchPath); + if (cont.watcher) return cont.watcher.stop().then(() => { + cont.rawEmitter = cont.watcher = undefined; + Object.freeze(cont); + }); + } + }; +} + +// Decide whether or not we should start a new higher-level +// parent watcher +const couldConsolidate = (path) => { + let count = 0; + for (const watchPath of FSEventsWatchers.keys()) { + if (watchPath.indexOf(path) === 0) { + count++; + if (count >= consolidateThreshhold) { + return true; + } + } + } + + return false; +}; + +// returns boolean indicating whether fsevents can be used +const canUse = () => fsevents && FSEventsWatchers.size < 128; + +// determines subdirectory traversal levels from root to path +const calcDepth = (path, root) => { + let i = 0; + while (!path.indexOf(root) && (path = sysPath.dirname(path)) !== root) i++; + return i; +}; + +// returns boolean indicating whether the fsevents' event info has the same type +// as the one returned by fs.stat +const sameTypes = (info, stats) => ( + info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() || + info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() || + info.type === FSEVENT_TYPE_FILE && stats.isFile() +) + +/** + * @mixin + */ +class FsEventsHandler { + +/** + * @param {import('../index').FSWatcher} fsw + */ +constructor(fsw) { + this.fsw = fsw; +} +checkIgnored(path, stats) { + const ipaths = this.fsw._ignoredPaths; + if (this.fsw._isIgnored(path, stats)) { + ipaths.add(path); + if (stats && stats.isDirectory()) { + ipaths.add(path + ROOT_GLOBSTAR); + } + return true; + } + + ipaths.delete(path); + ipaths.delete(path + ROOT_GLOBSTAR); +} + +addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) { + const event = watchedDir.has(item) ? EV_CHANGE : EV_ADD; + this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts); +} + +async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) { + try { + const stats = await stat(path) + if (this.fsw.closed) return; + if (sameTypes(info, stats)) { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } catch (error) { + if (error.code === 'EACCES') { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } +} + +handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) { + if (this.fsw.closed || this.checkIgnored(path)) return; + + if (event === EV_UNLINK) { + const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY + // suppress unlink events on never before seen files + if (isDirectory || watchedDir.has(item)) { + this.fsw._remove(parent, item, isDirectory); + } + } else { + if (event === EV_ADD) { + // track new directories + if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path); + + if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) { + // push symlinks back to the top of the stack to get handled + const curDepth = opts.depth === undefined ? + undefined : calcDepth(fullPath, realPath) + 1; + return this._addToFsEvents(path, false, true, curDepth); + } + + // track new paths + // (other than symlinks being followed, which will be tracked soon) + this.fsw._getWatchedDir(parent).add(item); + } + /** + * @type {'add'|'addDir'|'unlink'|'unlinkDir'} + */ + const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event; + this.fsw._emit(eventName, path); + if (eventName === EV_ADD_DIR) this._addToFsEvents(path, false, true); + } +} + +/** + * Handle symlinks encountered during directory scan + * @param {String} watchPath - file/dir path to be watched with fsevents + * @param {String} realPath - real path (in case of symlinks) + * @param {Function} transform - path transformer + * @param {Function} globFilter - path filter in case a glob pattern was provided + * @returns {Function} closer for the watcher instance +*/ +_watchWithFsEvents(watchPath, realPath, transform, globFilter) { + if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return; + const opts = this.fsw.options; + const watchCallback = async (fullPath, flags, info) => { + if (this.fsw.closed) return; + if ( + opts.depth !== undefined && + calcDepth(fullPath, realPath) > opts.depth + ) return; + const path = transform(sysPath.join( + watchPath, sysPath.relative(watchPath, fullPath) + )); + if (globFilter && !globFilter(path)) return; + // ensure directories are tracked + const parent = sysPath.dirname(path); + const item = sysPath.basename(path); + const watchedDir = this.fsw._getWatchedDir( + info.type === FSEVENT_TYPE_DIRECTORY ? path : parent + ); + + // correct for wrong events emitted + if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) { + if (typeof opts.ignored === FUNCTION_TYPE) { + let stats; + try { + stats = await stat(path); + } catch (error) {} + if (this.fsw.closed) return; + if (this.checkIgnored(path, stats)) return; + if (sameTypes(info, stats)) { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } else { + this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } else { + switch (info.event) { + case FSEVENT_CREATED: + case FSEVENT_MODIFIED: + return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + case FSEVENT_DELETED: + case FSEVENT_MOVED: + return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } + }; + + const closer = setFSEventsListener( + watchPath, + realPath, + watchCallback, + this.fsw._emitRaw + ); + + this.fsw._emitReady(); + return closer; +} + +/** + * Handle symlinks encountered during directory scan + * @param {String} linkPath path to symlink + * @param {String} fullPath absolute path to the symlink + * @param {Function} transform pre-existing path transformer + * @param {Number} curDepth level of subdirectories traversed to where symlink is + * @returns {Promise} + */ +async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) { + // don't follow the same symlink more than once + if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return; + + this.fsw._symlinkPaths.set(fullPath, true); + this.fsw._incrReadyCount(); + + try { + const linkTarget = await realpath(linkPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(linkTarget)) { + return this.fsw._emitReady(); + } + + this.fsw._incrReadyCount(); + + // add the linkTarget for watching with a wrapper for transform + // that causes emitted paths to incorporate the link's path + this._addToFsEvents(linkTarget || linkPath, (path) => { + let aliasedPath = linkPath; + if (linkTarget && linkTarget !== DOT_SLASH) { + aliasedPath = path.replace(linkTarget, linkPath); + } else if (path !== DOT_SLASH) { + aliasedPath = sysPath.join(linkPath, path); + } + return transform(aliasedPath); + }, false, curDepth); + } catch(error) { + if (this.fsw._handleError(error)) { + return this.fsw._emitReady(); + } + } +} + +/** + * + * @param {Path} newPath + * @param {fs.Stats} stats + */ +emitAdd(newPath, stats, processPath, opts, forceAdd) { + const pp = processPath(newPath); + const isDir = stats.isDirectory(); + const dirObj = this.fsw._getWatchedDir(sysPath.dirname(pp)); + const base = sysPath.basename(pp); + + // ensure empty dirs get tracked + if (isDir) this.fsw._getWatchedDir(pp); + if (dirObj.has(base)) return; + dirObj.add(base); + + if (!opts.ignoreInitial || forceAdd === true) { + this.fsw._emit(isDir ? EV_ADD_DIR : EV_ADD, pp, stats); + } +} + +initWatch(realPath, path, wh, processPath) { + if (this.fsw.closed) return; + const closer = this._watchWithFsEvents( + wh.watchPath, + sysPath.resolve(realPath || wh.watchPath), + processPath, + wh.globFilter + ); + this.fsw._addPathCloser(path, closer); +} + +/** + * Handle added path with fsevents + * @param {String} path file/dir path or glob pattern + * @param {Function|Boolean=} transform converts working path to what the user expects + * @param {Boolean=} forceAdd ensure add is emitted + * @param {Number=} priorDepth Level of subdirectories already traversed. + * @returns {Promise} + */ +async _addToFsEvents(path, transform, forceAdd, priorDepth) { + if (this.fsw.closed) { + return; + } + const opts = this.fsw.options; + const processPath = typeof transform === FUNCTION_TYPE ? transform : IDENTITY_FN; + + const wh = this.fsw._getWatchHelpers(path); + + // evaluate what is at the path we're being asked to watch + try { + const stats = await statMethods[wh.statMethod](wh.watchPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(wh.watchPath, stats)) { + throw null; + } + if (stats.isDirectory()) { + // emit addDir unless this is a glob parent + if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd); + + // don't recurse further if it would exceed depth setting + if (priorDepth && priorDepth > opts.depth) return; + + // scan the contents of the dir + this.fsw._readdirp(wh.watchPath, { + fileFilter: entry => wh.filterPath(entry), + directoryFilter: entry => wh.filterDir(entry), + ...Depth(opts.depth - (priorDepth || 0)) + }).on(STR_DATA, (entry) => { + // need to check filterPath on dirs b/c filterDir is less restrictive + if (this.fsw.closed) { + return; + } + if (entry.stats.isDirectory() && !wh.filterPath(entry)) return; + + const joinedPath = sysPath.join(wh.watchPath, entry.path); + const {fullPath} = entry; + + if (wh.followSymlinks && entry.stats.isSymbolicLink()) { + // preserve the current depth here since it can't be derived from + // real paths past the symlink + const curDepth = opts.depth === undefined ? + undefined : calcDepth(joinedPath, sysPath.resolve(wh.watchPath)) + 1; + + this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth); + } else { + this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd); + } + }).on(EV_ERROR, EMPTY_FN).on(STR_END, () => { + this.fsw._emitReady(); + }); + } else { + this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd); + this.fsw._emitReady(); + } + } catch (error) { + if (!error || this.fsw._handleError(error)) { + // TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__- + this.fsw._emitReady(); + this.fsw._emitReady(); + } + } + + if (opts.persistent && forceAdd !== true) { + if (typeof transform === FUNCTION_TYPE) { + // realpath has already been resolved + this.initWatch(undefined, path, wh, processPath); + } else { + let realPath; + try { + realPath = await realpath(wh.watchPath); + } catch (e) {} + this.initWatch(realPath, path, wh, processPath); + } + } +} + +} + +module.exports = FsEventsHandler; +module.exports.canUse = canUse; diff --git a/node_modules/chokidar/lib/nodefs-handler.js b/node_modules/chokidar/lib/nodefs-handler.js new file mode 100644 index 0000000..199cfe9 --- /dev/null +++ b/node_modules/chokidar/lib/nodefs-handler.js @@ -0,0 +1,654 @@ +'use strict'; + +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); +const isBinaryPath = require('is-binary-path'); +const { + isWindows, + isLinux, + EMPTY_FN, + EMPTY_STR, + KEY_LISTENERS, + KEY_ERR, + KEY_RAW, + HANDLER_KEYS, + EV_CHANGE, + EV_ADD, + EV_ADD_DIR, + EV_ERROR, + STR_DATA, + STR_END, + BRACE_START, + STAR +} = require('./constants'); + +const THROTTLE_MODE_WATCH = 'watch'; + +const open = promisify(fs.open); +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const close = promisify(fs.close); +const fsrealpath = promisify(fs.realpath); + +const statMethods = { lstat, stat }; + +// TODO: emit errors properly. Example: EMFILE on Macos. +const foreach = (val, fn) => { + if (val instanceof Set) { + val.forEach(fn); + } else { + fn(val); + } +}; + +const addAndConvert = (main, prop, item) => { + let container = main[prop]; + if (!(container instanceof Set)) { + main[prop] = container = new Set([container]); + } + container.add(item); +}; + +const clearItem = cont => key => { + const set = cont[key]; + if (set instanceof Set) { + set.clear(); + } else { + delete cont[key]; + } +}; + +const delFromSet = (main, prop, item) => { + const container = main[prop]; + if (container instanceof Set) { + container.delete(item); + } else if (container === item) { + delete main[prop]; + } +}; + +const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val; + +/** + * @typedef {String} Path + */ + +// fs_watch helpers + +// object to hold per-process fs_watch instances +// (may be shared across chokidar FSWatcher instances) + +/** + * @typedef {Object} FsWatchContainer + * @property {Set} listeners + * @property {Set} errHandlers + * @property {Set} rawEmitters + * @property {fs.FSWatcher=} watcher + * @property {Boolean=} watcherUnusable + */ + +/** + * @type {Map} + */ +const FsWatchInstances = new Map(); + +/** + * Instantiates the fs_watch interface + * @param {String} path to be watched + * @param {Object} options to be passed to fs_watch + * @param {Function} listener main event handler + * @param {Function} errHandler emits info about errors + * @param {Function} emitRaw emits raw event data + * @returns {fs.FSWatcher} new fsevents instance + */ +function createFsWatchInstance(path, options, listener, errHandler, emitRaw) { + const handleEvent = (rawEvent, evPath) => { + listener(path); + emitRaw(rawEvent, evPath, {watchedPath: path}); + + // emit based on events occurring for files from a directory's watcher in + // case the file's watcher misses it (and rely on throttling to de-dupe) + if (evPath && path !== evPath) { + fsWatchBroadcast( + sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath) + ); + } + }; + try { + return fs.watch(path, options, handleEvent); + } catch (error) { + errHandler(error); + } +} + +/** + * Helper for passing fs_watch event data to a collection of listeners + * @param {Path} fullPath absolute path bound to fs_watch instance + * @param {String} type listener type + * @param {*=} val1 arguments to be passed to listeners + * @param {*=} val2 + * @param {*=} val3 + */ +const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => { + const cont = FsWatchInstances.get(fullPath); + if (!cont) return; + foreach(cont[type], (listener) => { + listener(val1, val2, val3); + }); +}; + +/** + * Instantiates the fs_watch interface or binds listeners + * to an existing one covering the same file system entry + * @param {String} path + * @param {String} fullPath absolute path + * @param {Object} options to be passed to fs_watch + * @param {Object} handlers container for event listener functions + */ +const setFsWatchListener = (path, fullPath, options, handlers) => { + const {listener, errHandler, rawEmitter} = handlers; + let cont = FsWatchInstances.get(fullPath); + + /** @type {fs.FSWatcher=} */ + let watcher; + if (!options.persistent) { + watcher = createFsWatchInstance( + path, options, listener, errHandler, rawEmitter + ); + return watcher.close.bind(watcher); + } + if (cont) { + addAndConvert(cont, KEY_LISTENERS, listener); + addAndConvert(cont, KEY_ERR, errHandler); + addAndConvert(cont, KEY_RAW, rawEmitter); + } else { + watcher = createFsWatchInstance( + path, + options, + fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), + errHandler, // no need to use broadcast here + fsWatchBroadcast.bind(null, fullPath, KEY_RAW) + ); + if (!watcher) return; + watcher.on(EV_ERROR, async (error) => { + const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR); + cont.watcherUnusable = true; // documented since Node 10.4.1 + // Workaround for https://github.com/joyent/node/issues/4337 + if (isWindows && error.code === 'EPERM') { + try { + const fd = await open(path, 'r'); + await close(fd); + broadcastErr(error); + } catch (err) {} + } else { + broadcastErr(error); + } + }); + cont = { + listeners: listener, + errHandlers: errHandler, + rawEmitters: rawEmitter, + watcher + }; + FsWatchInstances.set(fullPath, cont); + } + // const index = cont.listeners.indexOf(listener); + + // removes this instance's listeners and closes the underlying fs_watch + // instance if there are no more listeners left + return () => { + delFromSet(cont, KEY_LISTENERS, listener); + delFromSet(cont, KEY_ERR, errHandler); + delFromSet(cont, KEY_RAW, rawEmitter); + if (isEmptySet(cont.listeners)) { + // Check to protect against issue gh-730. + // if (cont.watcherUnusable) { + cont.watcher.close(); + // } + FsWatchInstances.delete(fullPath); + HANDLER_KEYS.forEach(clearItem(cont)); + cont.watcher = undefined; + Object.freeze(cont); + } + }; +}; + +// fs_watchFile helpers + +// object to hold per-process fs_watchFile instances +// (may be shared across chokidar FSWatcher instances) +const FsWatchFileInstances = new Map(); + +/** + * Instantiates the fs_watchFile interface or binds listeners + * to an existing one covering the same file system entry + * @param {String} path to be watched + * @param {String} fullPath absolute path + * @param {Object} options options to be passed to fs_watchFile + * @param {Object} handlers container for event listener functions + * @returns {Function} closer + */ +const setFsWatchFileListener = (path, fullPath, options, handlers) => { + const {listener, rawEmitter} = handlers; + let cont = FsWatchFileInstances.get(fullPath); + + /* eslint-disable no-unused-vars, prefer-destructuring */ + let listeners = new Set(); + let rawEmitters = new Set(); + + const copts = cont && cont.options; + if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) { + // "Upgrade" the watcher to persistence or a quicker interval. + // This creates some unlikely edge case issues if the user mixes + // settings in a very weird way, but solving for those cases + // doesn't seem worthwhile for the added complexity. + listeners = cont.listeners; + rawEmitters = cont.rawEmitters; + fs.unwatchFile(fullPath); + cont = undefined; + } + + /* eslint-enable no-unused-vars, prefer-destructuring */ + + if (cont) { + addAndConvert(cont, KEY_LISTENERS, listener); + addAndConvert(cont, KEY_RAW, rawEmitter); + } else { + // TODO + // listeners.add(listener); + // rawEmitters.add(rawEmitter); + cont = { + listeners: listener, + rawEmitters: rawEmitter, + options, + watcher: fs.watchFile(fullPath, options, (curr, prev) => { + foreach(cont.rawEmitters, (rawEmitter) => { + rawEmitter(EV_CHANGE, fullPath, {curr, prev}); + }); + const currmtime = curr.mtimeMs; + if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) { + foreach(cont.listeners, (listener) => listener(path, curr)); + } + }) + }; + FsWatchFileInstances.set(fullPath, cont); + } + // const index = cont.listeners.indexOf(listener); + + // Removes this instance's listeners and closes the underlying fs_watchFile + // instance if there are no more listeners left. + return () => { + delFromSet(cont, KEY_LISTENERS, listener); + delFromSet(cont, KEY_RAW, rawEmitter); + if (isEmptySet(cont.listeners)) { + FsWatchFileInstances.delete(fullPath); + fs.unwatchFile(fullPath); + cont.options = cont.watcher = undefined; + Object.freeze(cont); + } + }; +}; + +/** + * @mixin + */ +class NodeFsHandler { + +/** + * @param {import("../index").FSWatcher} fsW + */ +constructor(fsW) { + this.fsw = fsW; + this._boundHandleError = (error) => fsW._handleError(error); +} + +/** + * Watch file for changes with fs_watchFile or fs_watch. + * @param {String} path to file or dir + * @param {Function} listener on fs change + * @returns {Function} closer for the watcher instance + */ +_watchWithNodeFs(path, listener) { + const opts = this.fsw.options; + const directory = sysPath.dirname(path); + const basename = sysPath.basename(path); + const parent = this.fsw._getWatchedDir(directory); + parent.add(basename); + const absolutePath = sysPath.resolve(path); + const options = {persistent: opts.persistent}; + if (!listener) listener = EMPTY_FN; + + let closer; + if (opts.usePolling) { + options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ? + opts.binaryInterval : opts.interval; + closer = setFsWatchFileListener(path, absolutePath, options, { + listener, + rawEmitter: this.fsw._emitRaw + }); + } else { + closer = setFsWatchListener(path, absolutePath, options, { + listener, + errHandler: this._boundHandleError, + rawEmitter: this.fsw._emitRaw + }); + } + return closer; +} + +/** + * Watch a file and emit add event if warranted. + * @param {Path} file Path + * @param {fs.Stats} stats result of fs_stat + * @param {Boolean} initialAdd was the file added at watch instantiation? + * @returns {Function} closer for the watcher instance + */ +_handleFile(file, stats, initialAdd) { + if (this.fsw.closed) { + return; + } + const dirname = sysPath.dirname(file); + const basename = sysPath.basename(file); + const parent = this.fsw._getWatchedDir(dirname); + // stats is always present + let prevStats = stats; + + // if the file is already being watched, do nothing + if (parent.has(basename)) return; + + const listener = async (path, newStats) => { + if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return; + if (!newStats || newStats.mtimeMs === 0) { + try { + const newStats = await stat(file); + if (this.fsw.closed) return; + // Check that change event was not fired because of changed only accessTime. + const at = newStats.atimeMs; + const mt = newStats.mtimeMs; + if (!at || at <= mt || mt !== prevStats.mtimeMs) { + this.fsw._emit(EV_CHANGE, file, newStats); + } + if (isLinux && prevStats.ino !== newStats.ino) { + this.fsw._closeFile(path) + prevStats = newStats; + this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener)); + } else { + prevStats = newStats; + } + } catch (error) { + // Fix issues where mtime is null but file is still present + this.fsw._remove(dirname, basename); + } + // add is about to be emitted if file not already tracked in parent + } else if (parent.has(basename)) { + // Check that change event was not fired because of changed only accessTime. + const at = newStats.atimeMs; + const mt = newStats.mtimeMs; + if (!at || at <= mt || mt !== prevStats.mtimeMs) { + this.fsw._emit(EV_CHANGE, file, newStats); + } + prevStats = newStats; + } + } + // kick off the watcher + const closer = this._watchWithNodeFs(file, listener); + + // emit an add event if we're supposed to + if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) { + if (!this.fsw._throttle(EV_ADD, file, 0)) return; + this.fsw._emit(EV_ADD, file, stats); + } + + return closer; +} + +/** + * Handle symlinks encountered while reading a dir. + * @param {Object} entry returned by readdirp + * @param {String} directory path of dir being read + * @param {String} path of this item + * @param {String} item basename of this item + * @returns {Promise} true if no more processing is needed for this entry. + */ +async _handleSymlink(entry, directory, path, item) { + if (this.fsw.closed) { + return; + } + const full = entry.fullPath; + const dir = this.fsw._getWatchedDir(directory); + + if (!this.fsw.options.followSymlinks) { + // watch symlink directly (don't follow) and detect changes + this.fsw._incrReadyCount(); + + let linkPath; + try { + linkPath = await fsrealpath(path); + } catch (e) { + this.fsw._emitReady(); + return true; + } + + if (this.fsw.closed) return; + if (dir.has(item)) { + if (this.fsw._symlinkPaths.get(full) !== linkPath) { + this.fsw._symlinkPaths.set(full, linkPath); + this.fsw._emit(EV_CHANGE, path, entry.stats); + } + } else { + dir.add(item); + this.fsw._symlinkPaths.set(full, linkPath); + this.fsw._emit(EV_ADD, path, entry.stats); + } + this.fsw._emitReady(); + return true; + } + + // don't follow the same symlink more than once + if (this.fsw._symlinkPaths.has(full)) { + return true; + } + + this.fsw._symlinkPaths.set(full, true); +} + +_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) { + // Normalize the directory name on Windows + directory = sysPath.join(directory, EMPTY_STR); + + if (!wh.hasGlob) { + throttler = this.fsw._throttle('readdir', directory, 1000); + if (!throttler) return; + } + + const previous = this.fsw._getWatchedDir(wh.path); + const current = new Set(); + + let stream = this.fsw._readdirp(directory, { + fileFilter: entry => wh.filterPath(entry), + directoryFilter: entry => wh.filterDir(entry), + depth: 0 + }).on(STR_DATA, async (entry) => { + if (this.fsw.closed) { + stream = undefined; + return; + } + const item = entry.path; + let path = sysPath.join(directory, item); + current.add(item); + + if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) { + return; + } + + if (this.fsw.closed) { + stream = undefined; + return; + } + // Files that present in current directory snapshot + // but absent in previous are added to watch list and + // emit `add` event. + if (item === target || !target && !previous.has(item)) { + this.fsw._incrReadyCount(); + + // ensure relativeness of path is preserved in case of watcher reuse + path = sysPath.join(dir, sysPath.relative(dir, path)); + + this._addToNodeFs(path, initialAdd, wh, depth + 1); + } + }).on(EV_ERROR, this._boundHandleError); + + return new Promise(resolve => + stream.once(STR_END, () => { + if (this.fsw.closed) { + stream = undefined; + return; + } + const wasThrottled = throttler ? throttler.clear() : false; + + resolve(); + + // Files that absent in current directory snapshot + // but present in previous emit `remove` event + // and are removed from @watched[directory]. + previous.getChildren().filter((item) => { + return item !== directory && + !current.has(item) && + // in case of intersecting globs; + // a path may have been filtered out of this readdir, but + // shouldn't be removed because it matches a different glob + (!wh.hasGlob || wh.filterPath({ + fullPath: sysPath.resolve(directory, item) + })); + }).forEach((item) => { + this.fsw._remove(directory, item); + }); + + stream = undefined; + + // one more time for any missed in case changes came in extremely quickly + if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler); + }) + ); +} + +/** + * Read directory to add / remove files from `@watched` list and re-read it on change. + * @param {String} dir fs path + * @param {fs.Stats} stats + * @param {Boolean} initialAdd + * @param {Number} depth relative to user-supplied path + * @param {String} target child path targeted for watch + * @param {Object} wh Common watch helpers for this path + * @param {String} realpath + * @returns {Promise} closer for the watcher instance. + */ +async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) { + const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir)); + const tracked = parentDir.has(sysPath.basename(dir)); + if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) { + if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR, dir, stats); + } + + // ensure dir is tracked (harmless if redundant) + parentDir.add(sysPath.basename(dir)); + this.fsw._getWatchedDir(dir); + let throttler; + let closer; + + const oDepth = this.fsw.options.depth; + if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) { + if (!target) { + await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler); + if (this.fsw.closed) return; + } + + closer = this._watchWithNodeFs(dir, (dirPath, stats) => { + // if current directory is removed, do nothing + if (stats && stats.mtimeMs === 0) return; + + this._handleRead(dirPath, false, wh, target, dir, depth, throttler); + }); + } + return closer; +} + +/** + * Handle added file, directory, or glob pattern. + * Delegates call to _handleFile / _handleDir after checks. + * @param {String} path to file or ir + * @param {Boolean} initialAdd was the file added at watch instantiation? + * @param {Object} priorWh depth relative to user-supplied path + * @param {Number} depth Child path actually targeted for watch + * @param {String=} target Child path actually targeted for watch + * @returns {Promise} + */ +async _addToNodeFs(path, initialAdd, priorWh, depth, target) { + const ready = this.fsw._emitReady; + if (this.fsw._isIgnored(path) || this.fsw.closed) { + ready(); + return false; + } + + const wh = this.fsw._getWatchHelpers(path, depth); + if (!wh.hasGlob && priorWh) { + wh.hasGlob = priorWh.hasGlob; + wh.globFilter = priorWh.globFilter; + wh.filterPath = entry => priorWh.filterPath(entry); + wh.filterDir = entry => priorWh.filterDir(entry); + } + + // evaluate what is at the path we're being asked to watch + try { + const stats = await statMethods[wh.statMethod](wh.watchPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(wh.watchPath, stats)) { + ready(); + return false; + } + + const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START); + let closer; + if (stats.isDirectory()) { + const absPath = sysPath.resolve(path); + const targetPath = follow ? await fsrealpath(path) : path; + if (this.fsw.closed) return; + closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath); + if (this.fsw.closed) return; + // preserve this symlink's target path + if (absPath !== targetPath && targetPath !== undefined) { + this.fsw._symlinkPaths.set(absPath, targetPath); + } + } else if (stats.isSymbolicLink()) { + const targetPath = follow ? await fsrealpath(path) : path; + if (this.fsw.closed) return; + const parent = sysPath.dirname(wh.watchPath); + this.fsw._getWatchedDir(parent).add(wh.watchPath); + this.fsw._emit(EV_ADD, wh.watchPath, stats); + closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath); + if (this.fsw.closed) return; + + // preserve this symlink's target path + if (targetPath !== undefined) { + this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath); + } + } else { + closer = this._handleFile(wh.watchPath, stats, initialAdd); + } + ready(); + + this.fsw._addPathCloser(path, closer); + return false; + + } catch (error) { + if (this.fsw._handleError(error)) { + ready(); + return path; + } + } +} + +} + +module.exports = NodeFsHandler; diff --git a/node_modules/chokidar/package.json b/node_modules/chokidar/package.json new file mode 100644 index 0000000..e8f8b3d --- /dev/null +++ b/node_modules/chokidar/package.json @@ -0,0 +1,70 @@ +{ + "name": "chokidar", + "description": "Minimal and efficient cross-platform file watching library", + "version": "3.6.0", + "homepage": "https://github.com/paulmillr/chokidar", + "author": "Paul Miller (https://paulmillr.com)", + "contributors": [ + "Paul Miller (https://paulmillr.com)", + "Elan Shanker" + ], + "engines": { + "node": ">= 8.10.0" + }, + "main": "index.js", + "types": "./types/index.d.ts", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "devDependencies": { + "@types/node": "^14", + "chai": "^4.3", + "dtslint": "^3.3.0", + "eslint": "^7.0.0", + "mocha": "^7.0.0", + "rimraf": "^3.0.0", + "sinon": "^9.0.1", + "sinon-chai": "^3.3.0", + "typescript": "^4.4.3", + "upath": "^1.2.0" + }, + "files": [ + "index.js", + "lib/*.js", + "types/index.d.ts" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/paulmillr/chokidar.git" + }, + "bugs": { + "url": "https://github.com/paulmillr/chokidar/issues" + }, + "license": "MIT", + "scripts": { + "dtslint": "dtslint types", + "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", + "build": "npm ls", + "mocha": "mocha --exit --timeout 90000", + "test": "npm run lint && npm run mocha" + }, + "keywords": [ + "fs", + "watch", + "watchFile", + "watcher", + "watching", + "file", + "fsevents" + ], + "funding": "https://paulmillr.com/funding/" +} diff --git a/node_modules/chokidar/types/index.d.ts b/node_modules/chokidar/types/index.d.ts new file mode 100644 index 0000000..4558066 --- /dev/null +++ b/node_modules/chokidar/types/index.d.ts @@ -0,0 +1,192 @@ +// TypeScript Version: 3.0 + +/// + +import * as fs from "fs"; +import { EventEmitter } from "events"; +import { Matcher } from 'anymatch'; + +export class FSWatcher extends EventEmitter implements fs.FSWatcher { + options: WatchOptions; + + /** + * Constructs a new FSWatcher instance with optional WatchOptions parameter. + */ + constructor(options?: WatchOptions); + + /** + * Add files, directories, or glob patterns for tracking. Takes an array of strings or just one + * string. + */ + add(paths: string | ReadonlyArray): this; + + /** + * Stop watching files, directories, or glob patterns. Takes an array of strings or just one + * string. + */ + unwatch(paths: string | ReadonlyArray): this; + + /** + * Returns an object representing all the paths on the file system being watched by this + * `FSWatcher` instance. The object's keys are all the directories (using absolute paths unless + * the `cwd` option was used), and the values are arrays of the names of the items contained in + * each directory. + */ + getWatched(): { + [directory: string]: string[]; + }; + + /** + * Removes all listeners from watched files. + */ + close(): Promise; + + on(event: 'add'|'addDir'|'change', listener: (path: string, stats?: fs.Stats) => void): this; + + on(event: 'all', listener: (eventName: 'add'|'addDir'|'change'|'unlink'|'unlinkDir', path: string, stats?: fs.Stats) => void): this; + + /** + * Error occurred + */ + on(event: 'error', listener: (error: Error) => void): this; + + /** + * Exposes the native Node `fs.FSWatcher events` + */ + on(event: 'raw', listener: (eventName: string, path: string, details: any) => void): this; + + /** + * Fires when the initial scan is complete + */ + on(event: 'ready', listener: () => void): this; + + on(event: 'unlink'|'unlinkDir', listener: (path: string) => void): this; + + on(event: string, listener: (...args: any[]) => void): this; + + ref(): this; + + unref(): this; +} + +export interface WatchOptions { + /** + * Indicates whether the process should continue to run as long as files are being watched. If + * set to `false` when using `fsevents` to watch, no more events will be emitted after `ready`, + * even if the process continues to run. + */ + persistent?: boolean; + + /** + * ([anymatch](https://github.com/micromatch/anymatch)-compatible definition) Defines files/paths to + * be ignored. The whole relative or absolute path is tested, not just filename. If a function + * with two arguments is provided, it gets called twice per path - once with a single argument + * (the path), second time with two arguments (the path and the + * [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object of that path). + */ + ignored?: Matcher; + + /** + * If set to `false` then `add`/`addDir` events are also emitted for matching paths while + * instantiating the watching as chokidar discovers these file paths (before the `ready` event). + */ + ignoreInitial?: boolean; + + /** + * When `false`, only the symlinks themselves will be watched for changes instead of following + * the link references and bubbling events through the link's path. + */ + followSymlinks?: boolean; + + /** + * The base directory from which watch `paths` are to be derived. Paths emitted with events will + * be relative to this. + */ + cwd?: string; + + /** + * If set to true then the strings passed to .watch() and .add() are treated as literal path + * names, even if they look like globs. Default: false. + */ + disableGlobbing?: boolean; + + /** + * Whether to use fs.watchFile (backed by polling), or fs.watch. If polling leads to high CPU + * utilization, consider setting this to `false`. It is typically necessary to **set this to + * `true` to successfully watch files over a network**, and it may be necessary to successfully + * watch files in other non-standard situations. Setting to `true` explicitly on OS X overrides + * the `useFsEvents` default. + */ + usePolling?: boolean; + + /** + * Whether to use the `fsevents` watching interface if available. When set to `true` explicitly + * and `fsevents` is available this supercedes the `usePolling` setting. When set to `false` on + * OS X, `usePolling: true` becomes the default. + */ + useFsEvents?: boolean; + + /** + * If relying upon the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object that + * may get passed with `add`, `addDir`, and `change` events, set this to `true` to ensure it is + * provided even in cases where it wasn't already available from the underlying watch events. + */ + alwaysStat?: boolean; + + /** + * If set, limits how many levels of subdirectories will be traversed. + */ + depth?: number; + + /** + * Interval of file system polling. + */ + interval?: number; + + /** + * Interval of file system polling for binary files. ([see list of binary extensions](https://gi + * thub.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) + */ + binaryInterval?: number; + + /** + * Indicates whether to watch files that don't have read permissions if possible. If watching + * fails due to `EPERM` or `EACCES` with this set to `true`, the errors will be suppressed + * silently. + */ + ignorePermissionErrors?: boolean; + + /** + * `true` if `useFsEvents` and `usePolling` are `false`). Automatically filters out artifacts + * that occur when using editors that use "atomic writes" instead of writing directly to the + * source file. If a file is re-added within 100 ms of being deleted, Chokidar emits a `change` + * event rather than `unlink` then `add`. If the default of 100 ms does not work well for you, + * you can override it by setting `atomic` to a custom value, in milliseconds. + */ + atomic?: boolean | number; + + /** + * can be set to an object in order to adjust timing params: + */ + awaitWriteFinish?: AwaitWriteFinishOptions | boolean; +} + +export interface AwaitWriteFinishOptions { + /** + * Amount of time in milliseconds for a file size to remain constant before emitting its event. + */ + stabilityThreshold?: number; + + /** + * File size polling interval. + */ + pollInterval?: number; +} + +/** + * produces an instance of `FSWatcher`. + */ +export function watch( + paths: string | ReadonlyArray, + options?: WatchOptions +): FSWatcher; diff --git a/node_modules/concat-map/.travis.yml b/node_modules/concat-map/.travis.yml new file mode 100644 index 0000000..f1d0f13 --- /dev/null +++ b/node_modules/concat-map/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/concat-map/LICENSE b/node_modules/concat-map/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/concat-map/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/concat-map/README.markdown b/node_modules/concat-map/README.markdown new file mode 100644 index 0000000..408f70a --- /dev/null +++ b/node_modules/concat-map/README.markdown @@ -0,0 +1,62 @@ +concat-map +========== + +Concatenative mapdashery. + +[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) + +[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) + +example +======= + +``` js +var concatMap = require('concat-map'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); +``` + +*** + +``` +[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] +``` + +methods +======= + +``` js +var concatMap = require('concat-map') +``` + +concatMap(xs, fn) +----------------- + +Return an array of concatenated elements by calling `fn(x, i)` for each element +`x` and each index `i` in the array `xs`. + +When `fn(x, i)` returns an array, its result will be concatenated with the +result array. If `fn(x, i)` returns anything else, that value will be pushed +onto the end of the result array. + +install +======= + +With [npm](http://npmjs.org) do: + +``` +npm install concat-map +``` + +license +======= + +MIT + +notes +===== + +This module was written while sitting high above the ground in a tree. diff --git a/node_modules/concat-map/example/map.js b/node_modules/concat-map/example/map.js new file mode 100644 index 0000000..3365621 --- /dev/null +++ b/node_modules/concat-map/example/map.js @@ -0,0 +1,6 @@ +var concatMap = require('../'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); diff --git a/node_modules/concat-map/index.js b/node_modules/concat-map/index.js new file mode 100644 index 0000000..b29a781 --- /dev/null +++ b/node_modules/concat-map/index.js @@ -0,0 +1,13 @@ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/concat-map/package.json b/node_modules/concat-map/package.json new file mode 100644 index 0000000..d3640e6 --- /dev/null +++ b/node_modules/concat-map/package.json @@ -0,0 +1,43 @@ +{ + "name" : "concat-map", + "description" : "concatenative mapdashery", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/node-concat-map.git" + }, + "main" : "index.js", + "keywords" : [ + "concat", + "concatMap", + "map", + "functional", + "higher-order" + ], + "directories" : { + "example" : "example", + "test" : "test" + }, + "scripts" : { + "test" : "tape test/*.js" + }, + "devDependencies" : { + "tape" : "~2.4.0" + }, + "license" : "MIT", + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : { + "ie" : [ 6, 7, 8, 9 ], + "ff" : [ 3.5, 10, 15.0 ], + "chrome" : [ 10, 22 ], + "safari" : [ 5.1 ], + "opera" : [ 12 ] + } + } +} diff --git a/node_modules/concat-map/test/map.js b/node_modules/concat-map/test/map.js new file mode 100644 index 0000000..fdbd702 --- /dev/null +++ b/node_modules/concat-map/test/map.js @@ -0,0 +1,39 @@ +var concatMap = require('../'); +var test = require('tape'); + +test('empty or not', function (t) { + var xs = [ 1, 2, 3, 4, 5, 6 ]; + var ixes = []; + var ys = concatMap(xs, function (x, ix) { + ixes.push(ix); + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; + }); + t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]); + t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]); + t.end(); +}); + +test('always something', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ]; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('scalars', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : x; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('undefs', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function () {}); + t.same(ys, [ undefined, undefined, undefined, undefined ]); + t.end(); +}); diff --git a/node_modules/debug/LICENSE b/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/debug/README.md b/node_modules/debug/README.md new file mode 100644 index 0000000..e9c3e04 --- /dev/null +++ b/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/debug/package.json b/node_modules/debug/package.json new file mode 100644 index 0000000..cb67103 --- /dev/null +++ b/node_modules/debug/package.json @@ -0,0 +1,60 @@ +{ + "name": "debug", + "version": "4.3.5", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon (https://github.com/qix-)", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js test.node.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "sinon": "^14.0.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/debug/src/browser.js b/node_modules/debug/src/browser.js new file mode 100644 index 0000000..cd0fc35 --- /dev/null +++ b/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/debug/src/common.js b/node_modules/debug/src/common.js new file mode 100644 index 0000000..e3291b2 --- /dev/null +++ b/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/debug/src/index.js b/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/debug/src/node.js b/node_modules/debug/src/node.js new file mode 100644 index 0000000..715560a --- /dev/null +++ b/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/fill-range/LICENSE b/node_modules/fill-range/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/fill-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fill-range/README.md b/node_modules/fill-range/README.md new file mode 100644 index 0000000..8d756fe --- /dev/null +++ b/node_modules/fill-range/README.md @@ -0,0 +1,237 @@ +# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) + +> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save fill-range +``` + +## Usage + +Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. + +```js +const fill = require('fill-range'); +// fill(from, to[, step, options]); + +console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] +console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 +``` + +**Params** + +* `from`: **{String|Number}** the number or letter to start with +* `to`: **{String|Number}** the number or letter to end with +* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. +* `options`: **{Object|Function}**: See all available [options](#options) + +## Examples + +By default, an array of values is returned. + +**Alphabetical ranges** + +```js +console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] +console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] +``` + +**Numerical ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] +``` + +**Negative ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] +console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] +``` + +**Steps (increments)** + +```js +// numerical ranges with increments +console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] +console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] +console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] + +// alphabetical ranges with increments +console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] +``` + +## Options + +### options.step + +**Type**: `number` (formatted as a string or number) + +**Default**: `undefined` + +**Description**: The increment to use for the range. Can be used with letters or numbers. + +**Example(s)** + +```js +// numbers +console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] +console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] +console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] + +// letters +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] +console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] +``` + +### options.strictRanges + +**Type**: `boolean` + +**Default**: `false` + +**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. + +**Example(s)** + +The following are all invalid: + +```js +fill('1.1', '2'); // decimals not supported in ranges +fill('a', '2'); // incompatible range values +fill(1, 10, 'foo'); // invalid "step" argument +``` + +### options.stringify + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Cast all returned values to strings. By default, integers are returned as numbers. + +**Example(s)** + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] +``` + +### options.toRegex + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Create a regex-compatible source string, instead of expanding values to an array. + +**Example(s)** + +```js +// alphabetical range +console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' +// alphabetical with step +console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' +// numerical range +console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' +// numerical range with zero padding +console.log(fill('000001', '100000', { toRegex: true })); +//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' +``` + +### options.transform + +**Type**: `function` + +**Default**: `undefined` + +**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. + +**Example(s)** + +```js +// add zero padding +console.log(fill(1, 5, value => String(value).padStart(4, '0'))); +//=> ['0001', '0002', '0003', '0004', '0005'] +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 116 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [paulmillr](https://github.com/paulmillr) | +| 2 | [realityking](https://github.com/realityking) | +| 2 | [bluelovers](https://github.com/bluelovers) | +| 1 | [edorivai](https://github.com/edorivai) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/fill-range/index.js b/node_modules/fill-range/index.js new file mode 100644 index 0000000..ddb212e --- /dev/null +++ b/node_modules/fill-range/index.js @@ -0,0 +1,248 @@ +/*! + * fill-range + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +const util = require('util'); +const toRegexRange = require('to-regex-range'); + +const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); + +const transform = toNumber => { + return value => toNumber === true ? Number(value) : String(value); +}; + +const isValidValue = value => { + return typeof value === 'number' || (typeof value === 'string' && value !== ''); +}; + +const isNumber = num => Number.isInteger(+num); + +const zeros = input => { + let value = `${input}`; + let index = -1; + if (value[0] === '-') value = value.slice(1); + if (value === '0') return false; + while (value[++index] === '0'); + return index > 0; +}; + +const stringify = (start, end, options) => { + if (typeof start === 'string' || typeof end === 'string') { + return true; + } + return options.stringify === true; +}; + +const pad = (input, maxLength, toNumber) => { + if (maxLength > 0) { + let dash = input[0] === '-' ? '-' : ''; + if (dash) input = input.slice(1); + input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); + } + if (toNumber === false) { + return String(input); + } + return input; +}; + +const toMaxLen = (input, maxLength) => { + let negative = input[0] === '-' ? '-' : ''; + if (negative) { + input = input.slice(1); + maxLength--; + } + while (input.length < maxLength) input = '0' + input; + return negative ? ('-' + input) : input; +}; + +const toSequence = (parts, options, maxLen) => { + parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + + let prefix = options.capture ? '' : '?:'; + let positives = ''; + let negatives = ''; + let result; + + if (parts.positives.length) { + positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|'); + } + + if (parts.negatives.length) { + negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`; + } + + if (positives && negatives) { + result = `${positives}|${negatives}`; + } else { + result = positives || negatives; + } + + if (options.wrap) { + return `(${prefix}${result})`; + } + + return result; +}; + +const toRange = (a, b, isNumbers, options) => { + if (isNumbers) { + return toRegexRange(a, b, { wrap: false, ...options }); + } + + let start = String.fromCharCode(a); + if (a === b) return start; + + let stop = String.fromCharCode(b); + return `[${start}-${stop}]`; +}; + +const toRegex = (start, end, options) => { + if (Array.isArray(start)) { + let wrap = options.wrap === true; + let prefix = options.capture ? '' : '?:'; + return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); + } + return toRegexRange(start, end, options); +}; + +const rangeError = (...args) => { + return new RangeError('Invalid range arguments: ' + util.inspect(...args)); +}; + +const invalidRange = (start, end, options) => { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; +}; + +const invalidStep = (step, options) => { + if (options.strictRanges === true) { + throw new TypeError(`Expected step "${step}" to be a number`); + } + return []; +}; + +const fillNumbers = (start, end, step = 1, options = {}) => { + let a = Number(start); + let b = Number(end); + + if (!Number.isInteger(a) || !Number.isInteger(b)) { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; + } + + // fix negative zero + if (a === 0) a = 0; + if (b === 0) b = 0; + + let descending = a > b; + let startString = String(start); + let endString = String(end); + let stepString = String(step); + step = Math.max(Math.abs(step), 1); + + let padded = zeros(startString) || zeros(endString) || zeros(stepString); + let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; + let toNumber = padded === false && stringify(start, end, options) === false; + let format = options.transform || transform(toNumber); + + if (options.toRegex && step === 1) { + return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); + } + + let parts = { negatives: [], positives: [] }; + let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + if (options.toRegex === true && step > 1) { + push(a); + } else { + range.push(pad(format(a, index), maxLen, toNumber)); + } + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return step > 1 + ? toSequence(parts, options, maxLen) + : toRegex(range, null, { wrap: false, ...options }); + } + + return range; +}; + +const fillLetters = (start, end, step = 1, options = {}) => { + if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { + return invalidRange(start, end, options); + } + + let format = options.transform || (val => String.fromCharCode(val)); + let a = `${start}`.charCodeAt(0); + let b = `${end}`.charCodeAt(0); + + let descending = a > b; + let min = Math.min(a, b); + let max = Math.max(a, b); + + if (options.toRegex && step === 1) { + return toRange(min, max, false, options); + } + + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + range.push(format(a, index)); + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return toRegex(range, null, { wrap: false, options }); + } + + return range; +}; + +const fill = (start, end, step, options = {}) => { + if (end == null && isValidValue(start)) { + return [start]; + } + + if (!isValidValue(start) || !isValidValue(end)) { + return invalidRange(start, end, options); + } + + if (typeof step === 'function') { + return fill(start, end, 1, { transform: step }); + } + + if (isObject(step)) { + return fill(start, end, 0, step); + } + + let opts = { ...options }; + if (opts.capture === true) opts.wrap = true; + step = step || opts.step || 1; + + if (!isNumber(step)) { + if (step != null && !isObject(step)) return invalidStep(step, opts); + return fill(start, end, 1, step); + } + + if (isNumber(start) && isNumber(end)) { + return fillNumbers(start, end, step, opts); + } + + return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); +}; + +module.exports = fill; diff --git a/node_modules/fill-range/package.json b/node_modules/fill-range/package.json new file mode 100644 index 0000000..582357f --- /dev/null +++ b/node_modules/fill-range/package.json @@ -0,0 +1,74 @@ +{ + "name": "fill-range", + "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", + "version": "7.1.1", + "homepage": "https://github.com/jonschlinkert/fill-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Edo Rivai (edo.rivai.nl)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Paul Miller (paulmillr.com)", + "Rouven Weßling (www.rouvenwessling.de)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/fill-range", + "bugs": { + "url": "https://github.com/jonschlinkert/fill-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "devDependencies": { + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1", + "nyc": "^15.1.0" + }, + "keywords": [ + "alpha", + "alphabetical", + "array", + "bash", + "brace", + "expand", + "expansion", + "fill", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "regex", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/glob-parent/CHANGELOG.md b/node_modules/glob-parent/CHANGELOG.md new file mode 100644 index 0000000..fb9de96 --- /dev/null +++ b/node_modules/glob-parent/CHANGELOG.md @@ -0,0 +1,110 @@ +### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) + + +### Bug Fixes + +* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) + +### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) + + +### Bug Fixes + +* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) + +## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) + + +### Features + +* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) + +## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* Drop support for node <6 & bump dependencies + +### Miscellaneous Chores + +* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) + +## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* question marks are valid path characters on Windows so avoid flagging as a glob when alone +* Update is-glob dependency + +### Features + +* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) +* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) +* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) + +## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) + + +### Features + +* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) +* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) +* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) +* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) +* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) + + +### Bug Fixes + +* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) + +### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) + + +### Features + +* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) + + +### Bug Fixes + +* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) + +## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* update is-glob dependency + +### Features + +* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) + +## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) + + +### Features + +* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) + +## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) + +## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) + + +### Reverts + +* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) + +## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) + + +### Features + +* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) + +## 1.0.0 (2021-01-27) + diff --git a/node_modules/glob-parent/LICENSE b/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..63222d7 --- /dev/null +++ b/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015, 2019 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob-parent/README.md b/node_modules/glob-parent/README.md new file mode 100644 index 0000000..36a2793 --- /dev/null +++ b/node_modules/glob-parent/README.md @@ -0,0 +1,137 @@ +

+ + + +

+ +# glob-parent + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] + +Extract the non-magic parent path from a glob string. + +## Usage + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) +``` + +## API + +### `globParent(maybeGlobString, [options])` + +Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. + +#### options + +```js +{ + // Disables the automatic conversion of slashes for Windows + flipBackslashes: true +} +``` + +## Escaping + +The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: + +- `?` (question mark) unless used as a path segment alone +- `*` (asterisk) +- `|` (pipe) +- `(` (opening parenthesis) +- `)` (closing parenthesis) +- `{` (opening curly brace) +- `}` (closing curly brace) +- `[` (opening bracket) +- `]` (closing bracket) + +**Example** + +```js +globParent('foo/[bar]/') // 'foo' +globParent('foo/\\[bar]/') // 'foo/[bar]' +``` + +## Limitations + +### Braces & Brackets +This library attempts a quick and imperfect method of determining which path +parts have glob magic without fully parsing/lexing the pattern. There are some +advanced use cases that can trip it up, such as nested braces where the outer +pair is escaped and the inner one contains a path separator. If you find +yourself in the unlikely circumstance of being affected by this or need to +ensure higher-fidelity glob handling in your library, it is recommended that you +pre-process your input with [expand-braces] and/or [expand-brackets]. + +### Windows +Backslashes are not valid path separators for globs. If a path with backslashes +is provided anyway, for simple cases, glob-parent will replace the path +separator for you and return the non-glob parent path (now with +forward-slashes, which are still valid as Windows path separators). + +This cannot be used in conjunction with escape characters. + +```js +// BAD +globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' + +// GOOD +globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' +``` + +If you are using escape characters for a pattern without path parts (i.e. +relative to `cwd`), prefix with `./` to avoid confusing glob-parent. + +```js +// BAD +globParent('foo \\[bar]') // 'foo ' +globParent('foo \\[bar]*') // 'foo ' + +// GOOD +globParent('./foo \\[bar]') // 'foo [bar]' +globParent('./foo \\[bar]*') // '.' +``` + +## License + +ISC + +[expand-braces]: https://github.com/jonschlinkert/expand-braces +[expand-brackets]: https://github.com/jonschlinkert/expand-brackets + +[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg +[npm-url]: https://www.npmjs.com/package/glob-parent +[npm-image]: https://img.shields.io/npm/v/glob-parent.svg + +[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master +[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master + +[travis-url]: https://travis-ci.org/gulpjs/glob-parent +[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci + +[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent +[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor + +[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent +[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg + +[gitter-url]: https://gitter.im/gulpjs/gulp +[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/glob-parent/index.js b/node_modules/glob-parent/index.js new file mode 100644 index 0000000..09e257e --- /dev/null +++ b/node_modules/glob-parent/index.js @@ -0,0 +1,42 @@ +'use strict'; + +var isGlob = require('is-glob'); +var pathPosixDirname = require('path').posix.dirname; +var isWin32 = require('os').platform() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; diff --git a/node_modules/glob-parent/package.json b/node_modules/glob-parent/package.json new file mode 100644 index 0000000..125c971 --- /dev/null +++ b/node_modules/glob-parent/package.json @@ -0,0 +1,48 @@ +{ + "name": "glob-parent", + "version": "5.1.2", + "description": "Extract the non-magic parent path from a glob string.", + "author": "Gulp Team (https://gulpjs.com/)", + "contributors": [ + "Elan Shanker (https://github.com/es128)", + "Blaine Bublitz " + ], + "repository": "gulpjs/glob-parent", + "license": "ISC", + "engines": { + "node": ">= 6" + }, + "main": "index.js", + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "nyc mocha --async-only", + "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "dependencies": { + "is-glob": "^4.0.1" + }, + "devDependencies": { + "coveralls": "^3.0.11", + "eslint": "^2.13.1", + "eslint-config-gulp": "^3.0.1", + "expect": "^1.20.2", + "mocha": "^6.0.2", + "nyc": "^13.3.0" + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "dirname", + "directory", + "base", + "wildcard" + ] +} diff --git a/node_modules/has-flag/index.js b/node_modules/has-flag/index.js new file mode 100644 index 0000000..5139728 --- /dev/null +++ b/node_modules/has-flag/index.js @@ -0,0 +1,8 @@ +'use strict'; +module.exports = (flag, argv) => { + argv = argv || process.argv; + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const pos = argv.indexOf(prefix + flag); + const terminatorPos = argv.indexOf('--'); + return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos); +}; diff --git a/node_modules/has-flag/license b/node_modules/has-flag/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/has-flag/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/has-flag/package.json b/node_modules/has-flag/package.json new file mode 100644 index 0000000..e1eb17a --- /dev/null +++ b/node_modules/has-flag/package.json @@ -0,0 +1,44 @@ +{ + "name": "has-flag", + "version": "3.0.0", + "description": "Check if argv has a specific flag", + "license": "MIT", + "repository": "sindresorhus/has-flag", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "has", + "check", + "detect", + "contains", + "find", + "flag", + "cli", + "command-line", + "argv", + "process", + "arg", + "args", + "argument", + "arguments", + "getopt", + "minimist", + "optimist" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/has-flag/readme.md b/node_modules/has-flag/readme.md new file mode 100644 index 0000000..677893c --- /dev/null +++ b/node_modules/has-flag/readme.md @@ -0,0 +1,70 @@ +# has-flag [![Build Status](https://travis-ci.org/sindresorhus/has-flag.svg?branch=master)](https://travis-ci.org/sindresorhus/has-flag) + +> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag + +Correctly stops looking after an `--` argument terminator. + + +## Install + +``` +$ npm install has-flag +``` + + +## Usage + +```js +// foo.js +const hasFlag = require('has-flag'); + +hasFlag('unicorn'); +//=> true + +hasFlag('--unicorn'); +//=> true + +hasFlag('f'); +//=> true + +hasFlag('-f'); +//=> true + +hasFlag('foo=bar'); +//=> true + +hasFlag('foo'); +//=> false + +hasFlag('rainbow'); +//=> false +``` + +``` +$ node foo.js -f --unicorn --foo=bar -- --rainbow +``` + + +## API + +### hasFlag(flag, [argv]) + +Returns a boolean for whether the flag exists. + +#### flag + +Type: `string` + +CLI flag to look for. The `--` prefix is optional. + +#### argv + +Type: `string[]`
+Default: `process.argv` + +CLI arguments. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/ignore-by-default/LICENSE b/node_modules/ignore-by-default/LICENSE new file mode 100644 index 0000000..ee1e367 --- /dev/null +++ b/node_modules/ignore-by-default/LICENSE @@ -0,0 +1,14 @@ +ISC License (ISC) +Copyright (c) 2016, Mark Wubben + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/node_modules/ignore-by-default/README.md b/node_modules/ignore-by-default/README.md new file mode 100644 index 0000000..ee77191 --- /dev/null +++ b/node_modules/ignore-by-default/README.md @@ -0,0 +1,26 @@ +# ignore-by-default + +This is a package aimed at Node.js development tools. It provides a list of +directories that should probably be ignored by such tools, e.g. when watching +for file changes. + +It's used by [AVA](https://www.npmjs.com/package/ava) and +[nodemon](https://www.npmjs.com/package/nodemon). + +[Please contribute!](./CONTRIBUTING.md) + +## Installation + +``` +npm install --save ignore-by-default +``` + +## Usage + +The `ignore-by-default` module exports a `directories()` function, which will +return an array of directory names. These are the ones you should ignore. + +```js +// ['.git', '.sass_cache', …] +var ignoredDirectories = require('ignore-by-default').directories() +``` diff --git a/node_modules/ignore-by-default/index.js b/node_modules/ignore-by-default/index.js new file mode 100644 index 0000000..c65857d --- /dev/null +++ b/node_modules/ignore-by-default/index.js @@ -0,0 +1,12 @@ +'use strict' + +exports.directories = function () { + return [ + '.git', // Git repository files, see + '.nyc_output', // Temporary directory where nyc stores coverage data, see + '.sass-cache', // Cache folder for node-sass, see + 'bower_components', // Where Bower packages are installed, see + 'coverage', // Standard output directory for code coverage reports, see + 'node_modules' // Where Node modules are installed, see + ] +} diff --git a/node_modules/ignore-by-default/package.json b/node_modules/ignore-by-default/package.json new file mode 100644 index 0000000..38e0d2b --- /dev/null +++ b/node_modules/ignore-by-default/package.json @@ -0,0 +1,34 @@ +{ + "name": "ignore-by-default", + "version": "1.0.1", + "description": "A list of directories you should ignore by default", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && node test.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/novemberborn/ignore-by-default.git" + }, + "keywords": [ + "ignore", + "chokidar", + "watcher", + "exclude", + "glob", + "pattern" + ], + "author": "Mark Wubben (https://novemberborn.net/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/novemberborn/ignore-by-default/issues" + }, + "homepage": "https://github.com/novemberborn/ignore-by-default#readme", + "devDependencies": { + "figures": "^1.4.0", + "standard": "^6.0.4" + } +} diff --git a/node_modules/is-binary-path/index.d.ts b/node_modules/is-binary-path/index.d.ts new file mode 100644 index 0000000..19dcd43 --- /dev/null +++ b/node_modules/is-binary-path/index.d.ts @@ -0,0 +1,17 @@ +/** +Check if a file path is a binary file. + +@example +``` +import isBinaryPath = require('is-binary-path'); + +isBinaryPath('source/unicorn.png'); +//=> true + +isBinaryPath('source/unicorn.txt'); +//=> false +``` +*/ +declare function isBinaryPath(filePath: string): boolean; + +export = isBinaryPath; diff --git a/node_modules/is-binary-path/index.js b/node_modules/is-binary-path/index.js new file mode 100644 index 0000000..ef7548c --- /dev/null +++ b/node_modules/is-binary-path/index.js @@ -0,0 +1,7 @@ +'use strict'; +const path = require('path'); +const binaryExtensions = require('binary-extensions'); + +const extensions = new Set(binaryExtensions); + +module.exports = filePath => extensions.has(path.extname(filePath).slice(1).toLowerCase()); diff --git a/node_modules/is-binary-path/license b/node_modules/is-binary-path/license new file mode 100644 index 0000000..401b1c7 --- /dev/null +++ b/node_modules/is-binary-path/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-binary-path/package.json b/node_modules/is-binary-path/package.json new file mode 100644 index 0000000..a8d005a --- /dev/null +++ b/node_modules/is-binary-path/package.json @@ -0,0 +1,40 @@ +{ + "name": "is-binary-path", + "version": "2.1.0", + "description": "Check if a file path is a binary file", + "license": "MIT", + "repository": "sindresorhus/is-binary-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "binary", + "extensions", + "extension", + "file", + "path", + "check", + "detect", + "is" + ], + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-binary-path/readme.md b/node_modules/is-binary-path/readme.md new file mode 100644 index 0000000..b4ab025 --- /dev/null +++ b/node_modules/is-binary-path/readme.md @@ -0,0 +1,34 @@ +# is-binary-path [![Build Status](https://travis-ci.org/sindresorhus/is-binary-path.svg?branch=master)](https://travis-ci.org/sindresorhus/is-binary-path) + +> Check if a file path is a binary file + + +## Install + +``` +$ npm install is-binary-path +``` + + +## Usage + +```js +const isBinaryPath = require('is-binary-path'); + +isBinaryPath('source/unicorn.png'); +//=> true + +isBinaryPath('source/unicorn.txt'); +//=> false +``` + + +## Related + +- [binary-extensions](https://github.com/sindresorhus/binary-extensions) - List of binary file extensions +- [is-text-path](https://github.com/sindresorhus/is-text-path) - Check if a filepath is a text file + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com), [Paul Miller](https://paulmillr.com) diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE new file mode 100644 index 0000000..842218c --- /dev/null +++ b/node_modules/is-extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md new file mode 100644 index 0000000..0416af5 --- /dev/null +++ b/node_modules/is-extglob/README.md @@ -0,0 +1,107 @@ +# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) + +> Returns true if a string has an extglob. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extglob +``` + +## Usage + +```js +var isExtglob = require('is-extglob'); +``` + +**True** + +```js +isExtglob('?(abc)'); +isExtglob('@(abc)'); +isExtglob('!(abc)'); +isExtglob('*(abc)'); +isExtglob('+(abc)'); +``` + +**False** + +Escaped extglobs: + +```js +isExtglob('\\?(abc)'); +isExtglob('\\@(abc)'); +isExtglob('\\!(abc)'); +isExtglob('\\*(abc)'); +isExtglob('\\+(abc)'); +``` + +Everything else... + +```js +isExtglob('foo.js'); +isExtglob('!foo.js'); +isExtglob('*.js'); +isExtglob('**/abc.js'); +isExtglob('abc/*.js'); +isExtglob('abc/(aaa|bbb).js'); +isExtglob('abc/[a-z].js'); +isExtglob('abc/{a,b}.js'); +isExtglob('abc/?.js'); +isExtglob('abc.js'); +isExtglob('abc/def/ghi.js'); +``` + +## History + +**v2.0** + +Adds support for escaping. Escaped exglobs no longer return true. + +## About + +### Related projects + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js new file mode 100644 index 0000000..c1d986f --- /dev/null +++ b/node_modules/is-extglob/index.js @@ -0,0 +1,20 @@ +/*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + if (typeof str !== 'string' || str === '') { + return false; + } + + var match; + while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { + if (match[2]) return true; + str = str.slice(match.index + match[0].length); + } + + return false; +}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json new file mode 100644 index 0000000..7a90836 --- /dev/null +++ b/node_modules/is-extglob/package.json @@ -0,0 +1,69 @@ +{ + "name": "is-extglob", + "description": "Returns true if a string has an extglob.", + "version": "2.1.1", + "homepage": "https://github.com/jonschlinkert/is-extglob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extglob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-glob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE new file mode 100644 index 0000000..3f2eca1 --- /dev/null +++ b/node_modules/is-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md new file mode 100644 index 0000000..740724b --- /dev/null +++ b/node_modules/is-glob/README.md @@ -0,0 +1,206 @@ +# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) + +> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-glob +``` + +You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). + +## Usage + +```js +var isGlob = require('is-glob'); +``` + +### Default behavior + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js'); +isGlob('*.js'); +isGlob('**/abc.js'); +isGlob('abc/*.js'); +isGlob('abc/(aaa|bbb).js'); +isGlob('abc/[a-z].js'); +isGlob('abc/{a,b}.js'); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js'); +isGlob('abc/!(a).js'); +isGlob('abc/+(a).js'); +isGlob('abc/*(a).js'); +isGlob('abc/?(a).js'); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('abc/\\@(a).js'); +isGlob('abc/\\!(a).js'); +isGlob('abc/\\+(a).js'); +isGlob('abc/\\*(a).js'); +isGlob('abc/\\?(a).js'); +isGlob('\\!foo.js'); +isGlob('\\*.js'); +isGlob('\\*\\*/abc.js'); +isGlob('abc/\\*.js'); +isGlob('abc/\\(aaa|bbb).js'); +isGlob('abc/\\[a-z].js'); +isGlob('abc/\\{a,b}.js'); +//=> false +``` + +Patterns that do not have glob patterns return `false`: + +```js +isGlob('abc.js'); +isGlob('abc/def/ghi.js'); +isGlob('foo.js'); +isGlob('abc/@.js'); +isGlob('abc/+.js'); +isGlob('abc/?.js'); +isGlob(); +isGlob(null); +//=> false +``` + +Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): + +```js +isGlob(['**/*.js']); +isGlob(['foo.js']); +//=> false +``` + +### Option strict + +When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that +some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js', {strict: false}); +isGlob('*.js', {strict: false}); +isGlob('**/abc.js', {strict: false}); +isGlob('abc/*.js', {strict: false}); +isGlob('abc/(aaa|bbb).js', {strict: false}); +isGlob('abc/[a-z].js', {strict: false}); +isGlob('abc/{a,b}.js', {strict: false}); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js', {strict: false}); +isGlob('abc/!(a).js', {strict: false}); +isGlob('abc/+(a).js', {strict: false}); +isGlob('abc/*(a).js', {strict: false}); +isGlob('abc/?(a).js', {strict: false}); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('\\!foo.js', {strict: false}); +isGlob('\\*.js', {strict: false}); +isGlob('\\*\\*/abc.js', {strict: false}); +isGlob('abc/\\*.js', {strict: false}); +isGlob('abc/\\(aaa|bbb).js', {strict: false}); +isGlob('abc/\\[a-z].js', {strict: false}); +isGlob('abc/\\{a,b}.js', {strict: false}); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") +* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") +* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") +* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 47 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [doowb](https://github.com/doowb) | +| 1 | [phated](https://github.com/phated) | +| 1 | [danhper](https://github.com/danhper) | +| 1 | [paulmillr](https://github.com/paulmillr) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js new file mode 100644 index 0000000..620f563 --- /dev/null +++ b/node_modules/is-glob/index.js @@ -0,0 +1,150 @@ +/*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var isExtglob = require('is-extglob'); +var chars = { '{': '}', '(': ')', '[': ']'}; +var strictCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + var pipeIndex = -2; + var closeSquareIndex = -2; + var closeCurlyIndex = -2; + var closeParenIndex = -2; + var backSlashIndex = -2; + while (index < str.length) { + if (str[index] === '*') { + return true; + } + + if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { + return true; + } + + if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { + if (closeSquareIndex < index) { + closeSquareIndex = str.indexOf(']', index); + } + if (closeSquareIndex > index) { + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + } + } + + if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { + closeCurlyIndex = str.indexOf('}', index); + if (closeCurlyIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { + return true; + } + } + } + + if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { + closeParenIndex = str.indexOf(')', index); + if (closeParenIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + + if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { + if (pipeIndex < index) { + pipeIndex = str.indexOf('|', index); + } + if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { + closeParenIndex = str.indexOf(')', pipeIndex); + if (closeParenIndex > pipeIndex) { + backSlashIndex = str.indexOf('\\', pipeIndex); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +var relaxedCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + while (index < str.length) { + if (/[*?{}()[\]]/.test(str[index])) { + return true; + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +module.exports = function isGlob(str, options) { + if (typeof str !== 'string' || str === '') { + return false; + } + + if (isExtglob(str)) { + return true; + } + + var check = strictCheck; + + // optionally relax check + if (options && options.strict === false) { + check = relaxedCheck; + } + + return check(str); +}; diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json new file mode 100644 index 0000000..858af03 --- /dev/null +++ b/node_modules/is-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "is-glob", + "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", + "version": "4.0.3", + "homepage": "https://github.com/micromatch/is-glob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Daniel Perez (https://tuvistavie.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/is-glob", + "bugs": { + "url": "https://github.com/micromatch/is-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha && node benchmark.js" + }, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "layout": "default", + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assemble", + "base", + "update", + "verb" + ] + }, + "reflinks": [ + "assemble", + "bach", + "base", + "composer", + "gulp", + "has-glob", + "is-valid-glob", + "micromatch", + "npm", + "scaffold", + "verb", + "vinyl" + ] + } +} diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/is-number/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md new file mode 100644 index 0000000..eb8149e --- /dev/null +++ b/node_modules/is-number/README.md @@ -0,0 +1,187 @@ +# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) + +> Returns true if the value is a finite number. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-number +``` + +## Why is this needed? + +In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: + +```js +console.log(+[]); //=> 0 +console.log(+''); //=> 0 +console.log(+' '); //=> 0 +console.log(typeof NaN); //=> 'number' +``` + +This library offers a performant way to smooth out edge cases like these. + +## Usage + +```js +const isNumber = require('is-number'); +``` + +See the [tests](./test.js) for more examples. + +### true + +```js +isNumber(5e3); // true +isNumber(0xff); // true +isNumber(-1.1); // true +isNumber(0); // true +isNumber(1); // true +isNumber(1.1); // true +isNumber(10); // true +isNumber(10.10); // true +isNumber(100); // true +isNumber('-1.1'); // true +isNumber('0'); // true +isNumber('012'); // true +isNumber('0xff'); // true +isNumber('1'); // true +isNumber('1.1'); // true +isNumber('10'); // true +isNumber('10.10'); // true +isNumber('100'); // true +isNumber('5e3'); // true +isNumber(parseInt('012')); // true +isNumber(parseFloat('012')); // true +``` + +### False + +Everything else is false, as you would expect: + +```js +isNumber(Infinity); // false +isNumber(NaN); // false +isNumber(null); // false +isNumber(undefined); // false +isNumber(''); // false +isNumber(' '); // false +isNumber('foo'); // false +isNumber([1]); // false +isNumber([]); // false +isNumber(function () {}); // false +isNumber({}); // false +``` + +## Release history + +### 7.0.0 + +* Refactor. Now uses `.isFinite` if it exists. +* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. + +### 6.0.0 + +* Optimizations, thanks to @benaadams. + +### 5.0.0 + +**Breaking changes** + +* removed support for `instanceof Number` and `instanceof String` + +## Benchmarks + +As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. + +``` +# all +v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) +v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) +parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) +fastest is 'v7.0' + +# string +v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) +v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) +parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) +fastest is 'parseFloat,v7.0' + +# number +v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) +v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) +parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) +fastest is 'v6.0' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 49 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [charlike-old](https://github.com/charlike-old) | +| 1 | [benaadams](https://github.com/benaadams) | +| 1 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js new file mode 100644 index 0000000..27f19b7 --- /dev/null +++ b/node_modules/is-number/index.js @@ -0,0 +1,18 @@ +/*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function(num) { + if (typeof num === 'number') { + return num - num === 0; + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); + } + return false; +}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json new file mode 100644 index 0000000..3715072 --- /dev/null +++ b/node_modules/is-number/package.json @@ -0,0 +1,82 @@ +{ + "name": "is-number", + "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", + "version": "7.0.0", + "homepage": "https://github.com/jonschlinkert/is-number", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Olsten Larck (https://i.am.charlike.online)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "jonschlinkert/is-number", + "bugs": { + "url": "https://github.com/jonschlinkert/is-number/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.12.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi": "^0.3.1", + "benchmark": "^2.1.4", + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "cast", + "check", + "coerce", + "coercion", + "finite", + "integer", + "is", + "isnan", + "is-nan", + "is-num", + "is-number", + "isnumber", + "isfinite", + "istype", + "kind", + "math", + "nan", + "num", + "number", + "numeric", + "parseFloat", + "parseInt", + "test", + "type", + "typeof", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "is-plain-object", + "is-primitive", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 0000000..33ede1d --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,230 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### allowWindowsEscape + +Windows path separator `\` is by default converted to `/`, which +prohibits the usage of `\` as a escape character. This flag skips that +behavior and allows using the escape character. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..fda45ad --- /dev/null +++ b/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 0000000..566efdf --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.1.2", + "publishConfig": { + "tag": "v3-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/ms/index.js b/node_modules/ms/index.js new file mode 100644 index 0000000..c4498bc --- /dev/null +++ b/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/ms/license.md b/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json new file mode 100644 index 0000000..eea666e --- /dev/null +++ b/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/ms/readme.md b/node_modules/ms/readme.md new file mode 100644 index 0000000..9a1996b --- /dev/null +++ b/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/nodemon/.prettierrc.json b/node_modules/nodemon/.prettierrc.json new file mode 100644 index 0000000..544138b --- /dev/null +++ b/node_modules/nodemon/.prettierrc.json @@ -0,0 +1,3 @@ +{ + "singleQuote": true +} diff --git a/node_modules/nodemon/LICENSE b/node_modules/nodemon/LICENSE new file mode 100644 index 0000000..19c91a2 --- /dev/null +++ b/node_modules/nodemon/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - present, Remy Sharp, https://remysharp.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/nodemon/README.md b/node_modules/nodemon/README.md new file mode 100644 index 0000000..48054b1 --- /dev/null +++ b/node_modules/nodemon/README.md @@ -0,0 +1,452 @@ +

+ Nodemon Logo +

+ +# nodemon + +nodemon is a tool that helps develop Node.js based applications by automatically restarting the node application when file changes in the directory are detected. + +nodemon does **not** require *any* additional changes to your code or method of development. nodemon is a replacement wrapper for `node`. To use `nodemon`, replace the word `node` on the command line when executing your script. + +[![NPM version](https://badge.fury.io/js/nodemon.svg)](https://npmjs.org/package/nodemon) +[![Backers on Open Collective](https://opencollective.com/nodemon/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/nodemon/sponsors/badge.svg)](#sponsors) + +# Installation + +Either through cloning with git or by using [npm](http://npmjs.org) (the recommended way): + +```bash +npm install -g nodemon # or using yarn: yarn global add nodemon +``` + +And nodemon will be installed globally to your system path. + +You can also install nodemon as a development dependency: + +```bash +npm install --save-dev nodemon # or using yarn: yarn add nodemon -D +``` + +With a local installation, nodemon will not be available in your system path or you can't use it directly from the command line. Instead, the local installation of nodemon can be run by calling it from within an npm script (such as `npm start`) or using `npx nodemon`. + +# Usage + +nodemon wraps your application, so you can pass all the arguments you would normally pass to your app: + +```bash +nodemon [your node app] +``` + +For CLI options, use the `-h` (or `--help`) argument: + +```bash +nodemon -h +``` + +Using nodemon is simple, if my application accepted a host and port as the arguments, I would start it as so: + +```bash +nodemon ./server.js localhost 8080 +``` + +Any output from this script is prefixed with `[nodemon]`, otherwise all output from your application, errors included, will be echoed out as expected. + +You can also pass the `inspect` flag to node through the command line as you would normally: + +```bash +nodemon --inspect ./server.js 80 +``` + +If you have a `package.json` file for your app, you can omit the main script entirely and nodemon will read the `package.json` for the `main` property and use that value as the app ([ref](https://github.com/remy/nodemon/issues/14)). + +nodemon will also search for the `scripts.start` property in `package.json` (as of nodemon 1.1.x). + +Also check out the [FAQ](https://github.com/remy/nodemon/blob/master/faq.md) or [issues](https://github.com/remy/nodemon/issues) for nodemon. + +## Automatic re-running + +nodemon was originally written to restart hanging processes such as web servers, but now supports apps that cleanly exit. If your script exits cleanly, nodemon will continue to monitor the directory (or directories) and restart the script if there are any changes. + +## Manual restarting + +Whilst nodemon is running, if you need to manually restart your application, instead of stopping and restart nodemon, you can type `rs` with a carriage return, and nodemon will restart your process. + +## Config files + +nodemon supports local and global configuration files. These are usually named `nodemon.json` and can be located in the current working directory or in your home directory. An alternative local configuration file can be specified with the `--config ` option. + +The specificity is as follows, so that a command line argument will always override the config file settings: + +- command line arguments +- local config +- global config + +A config file can take any of the command line arguments as JSON key values, for example: + +```json +{ + "verbose": true, + "ignore": ["*.test.js", "**/fixtures/**"], + "execMap": { + "rb": "ruby", + "pde": "processing --sketch={{pwd}} --run" + } +} +``` + +The above `nodemon.json` file might be my global config so that I have support for ruby files and processing files, and I can run `nodemon demo.pde` and nodemon will automatically know how to run the script even though out of the box support for processing scripts. + +A further example of options can be seen in [sample-nodemon.md](https://github.com/remy/nodemon/blob/master/doc/sample-nodemon.md) + +### package.json + +If you want to keep all your package configurations in one place, nodemon supports using `package.json` for configuration. +Specify the config in the same format as you would for a config file but under `nodemonConfig` in the `package.json` file, for example, take the following `package.json`: + +```json +{ + "name": "nodemon", + "homepage": "http://nodemon.io", + "...": "... other standard package.json values", + "nodemonConfig": { + "ignore": ["**/test/**", "**/docs/**"], + "delay": 2500 + } +} +``` + +Note that if you specify a `--config` file or provide a local `nodemon.json` any `package.json` config is ignored. + +*This section needs better documentation, but for now you can also see `nodemon --help config` ([also here](https://github.com/remy/nodemon/blob/master/doc/cli/config.txt))*. + +## Using nodemon as a module + +Please see [doc/requireable.md](doc/requireable.md) + +## Using nodemon as child process + +Please see [doc/events.md](doc/events.md#Using_nodemon_as_child_process) + +## Running non-node scripts + +nodemon can also be used to execute and monitor other programs. nodemon will read the file extension of the script being run and monitor that extension instead of `.js` if there's no `nodemon.json`: + +```bash +nodemon --exec "python -v" ./app.py +``` + +Now nodemon will run `app.py` with python in verbose mode (note that if you're not passing args to the exec program, you don't need the quotes), and look for new or modified files with the `.py` extension. + +### Default executables + +Using the `nodemon.json` config file, you can define your own default executables using the `execMap` property. This is particularly useful if you're working with a language that isn't supported by default by nodemon. + +To add support for nodemon to know about the `.pl` extension (for Perl), the `nodemon.json` file would add: + +```json +{ + "execMap": { + "pl": "perl" + } +} +``` + +Now running the following, nodemon will know to use `perl` as the executable: + +```bash +nodemon script.pl +``` + +It's generally recommended to use the global `nodemon.json` to add your own `execMap` options. However, if there's a common default that's missing, this can be merged in to the project so that nodemon supports it by default, by changing [default.js](https://github.com/remy/nodemon/blob/master/lib/config/defaults.js) and sending a pull request. + +## Monitoring multiple directories + +By default nodemon monitors the current working directory. If you want to take control of that option, use the `--watch` option to add specific paths: + +```bash +nodemon --watch app --watch libs app/server.js +``` + +Now nodemon will only restart if there are changes in the `./app` or `./libs` directory. By default nodemon will traverse sub-directories, so there's no need in explicitly including sub-directories. + +Nodemon also supports unix globbing, e.g `--watch './lib/*'`. The globbing pattern must be quoted. For advanced globbing, [see `picomatch` documentation](https://github.com/micromatch/picomatch#advanced-globbing), the library that nodemon uses through `chokidar` (which in turn uses it through `anymatch`). + +## Specifying extension watch list + +By default, nodemon looks for files with the `.js`, `.mjs`, `.coffee`, `.litcoffee`, and `.json` extensions. If you use the `--exec` option and monitor `app.py` nodemon will monitor files with the extension of `.py`. However, you can specify your own list with the `-e` (or `--ext`) switch like so: + +```bash +nodemon -e js,pug +``` + +Now nodemon will restart on any changes to files in the directory (or subdirectories) with the extensions `.js`, `.pug`. + +## Ignoring files + +By default, nodemon will only restart when a `.js` JavaScript file changes. In some cases you will want to ignore some specific files, directories or file patterns, to prevent nodemon from prematurely restarting your application. + +This can be done via the command line: + +```bash +nodemon --ignore lib/ --ignore tests/ +``` + +Or specific files can be ignored: + +```bash +nodemon --ignore lib/app.js +``` + +Patterns can also be ignored (but be sure to quote the arguments): + +```bash +nodemon --ignore 'lib/*.js' +``` + +**Important** the ignore rules are patterns matched to the full absolute path, and this determines how many files are monitored. If using a wild card glob pattern, it needs to be used as `**` or omitted entirely. For example, `nodemon --ignore '**/test/**'` will work, whereas `--ignore '*/test/*'` will not. + +Note that by default, nodemon will ignore the `.git`, `node_modules`, `bower_components`, `.nyc_output`, `coverage` and `.sass-cache` directories and *add* your ignored patterns to the list. If you want to indeed watch a directory like `node_modules`, you need to [override the underlying default ignore rules](https://github.com/remy/nodemon/blob/master/faq.md#overriding-the-underlying-default-ignore-rules). + +## Application isn't restarting + +In some networked environments (such as a container running nodemon reading across a mounted drive), you will need to use the `legacyWatch: true` which enables Chokidar's polling. + +Via the CLI, use either `--legacy-watch` or `-L` for short: + +```bash +nodemon -L +``` + +Though this should be a last resort as it will poll every file it can find. + +## Delaying restarting + +In some situations, you may want to wait until a number of files have changed. The timeout before checking for new file changes is 1 second. If you're uploading a number of files and it's taking some number of seconds, this could cause your app to restart multiple times unnecessarily. + +To add an extra throttle, or delay restarting, use the `--delay` command: + +```bash +nodemon --delay 10 server.js +``` + +For more precision, milliseconds can be specified. Either as a float: + +```bash +nodemon --delay 2.5 server.js +``` + +Or using the time specifier (ms): + +```bash +nodemon --delay 2500ms server.js +``` + +The delay figure is number of seconds (or milliseconds, if specified) to delay before restarting. So nodemon will only restart your app the given number of seconds after the *last* file change. + +If you are setting this value in `nodemon.json`, the value will always be interpreted in milliseconds. E.g., the following are equivalent: + +```bash +nodemon --delay 2.5 + +{ + "delay": 2500 +} +``` + +## Gracefully reloading down your script + +It is possible to have nodemon send any signal that you specify to your application. + +```bash +nodemon --signal SIGHUP server.js +``` + +Your application can handle the signal as follows. + +```js +process.once("SIGHUP", function () { + reloadSomeConfiguration(); +}) +``` + +Please note that nodemon will send this signal to every process in the process tree. + +If you are using `cluster`, then each workers (as well as the master) will receive the signal. If you wish to terminate all workers on receiving a `SIGHUP`, a common pattern is to catch the `SIGHUP` in the master, and forward `SIGTERM` to all workers, while ensuring that all workers ignore `SIGHUP`. + +```js +if (cluster.isMaster) { + process.on("SIGHUP", function () { + for (const worker of Object.values(cluster.workers)) { + worker.process.kill("SIGTERM"); + } + }); +} else { + process.on("SIGHUP", function() {}) +} +``` + +## Controlling shutdown of your script + +nodemon sends a kill signal to your application when it sees a file update. If you need to clean up on shutdown inside your script you can capture the kill signal and handle it yourself. + +The following example will listen once for the `SIGUSR2` signal (used by nodemon to restart), run the clean up process and then kill itself for nodemon to continue control: + +```js +// important to use `on` and not `once` as nodemon can re-send the kill signal +process.on('SIGUSR2', function () { + gracefulShutdown(function () { + process.kill(process.pid, 'SIGTERM'); + }); +}); +``` + +Note that the `process.kill` is *only* called once your shutdown jobs are complete. Hat tip to [Benjie Gillam](http://www.benjiegillam.com/2011/08/node-js-clean-restart-and-faster-development-with-nodemon/) for writing this technique up. + +## Triggering events when nodemon state changes + +If you want growl like notifications when nodemon restarts or to trigger an action when an event happens, then you can either `require` nodemon or add event actions to your `nodemon.json` file. + +For example, to trigger a notification on a Mac when nodemon restarts, `nodemon.json` looks like this: + +```json +{ + "events": { + "restart": "osascript -e 'display notification \"app restarted\" with title \"nodemon\"'" + } +} +``` + +A full list of available events is listed on the [event states wiki](https://github.com/remy/nodemon/wiki/Events#states). Note that you can bind to both states and messages. + +## Pipe output to somewhere else + +```js +nodemon({ + script: ..., + stdout: false // important: this tells nodemon not to output to console +}).on('readable', function() { // the `readable` event indicates that data is ready to pick up + this.stdout.pipe(fs.createWriteStream('output.txt')); + this.stderr.pipe(fs.createWriteStream('err.txt')); +}); +``` + +## Using nodemon in your gulp workflow + +Check out the [gulp-nodemon](https://github.com/JacksonGariety/gulp-nodemon) plugin to integrate nodemon with the rest of your project's gulp workflow. + +## Using nodemon in your Grunt workflow + +Check out the [grunt-nodemon](https://github.com/ChrisWren/grunt-nodemon) plugin to integrate nodemon with the rest of your project's grunt workflow. + +## Pronunciation + +> nodemon, is it pronounced: node-mon, no-demon or node-e-mon (like pokémon)? + +Well...I've been asked this many times before. I like that I've been asked this before. There's been bets as to which one it actually is. + +The answer is simple, but possibly frustrating. I'm not saying (how I pronounce it). It's up to you to call it as you like. All answers are correct :) + +## Design principles + +- Fewer flags is better +- Works across all platforms +- Fewer features +- Let individuals build on top of nodemon +- Offer all CLI functionality as an API +- Contributions must have and pass tests + +Nodemon is not perfect, and CLI arguments has sprawled beyond where I'm completely happy, but perhaps it can be reduced a little one day. + +## FAQ + +See the [FAQ](https://github.com/remy/nodemon/blob/master/faq.md) and please add your own questions if you think they would help others. + +## Backers + +Thank you to all [our backers](https://opencollective.com/nodemon#backer)! 🙏 + +[![nodemon backers](https://opencollective.com/nodemon/backers.svg?width=890)](https://opencollective.com/nodemon#backers) + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [Sponsor this project today ❤️](https://opencollective.com/nodemon#sponsor) + +
buy instagram followers on skweezer.net today +Netpositive +KasynoHEX +Casinoonlineaams.com +Best online casinos not on GamStop in the UK +TheCasinoDB +Marketing +Rating of best betting sites in Australia +inkedin +casino online stranieri +Goread.io +Best Australian online casinos. Reviewed by Correct Casinos. +Casino utan svensk licens + +Do My Online Class - NoNeedToStudy.com +Slotmachineweb.com +Website dedicated to finding the best and safest licensed online casinos in India +Italianonlinecasino.net +nongamstopcasinos.net +Scommesse777 +Twicsy +At Casinoaustraliaonline.com, we review, compare and list all the best gambling sites for Aussies.
+ +Casinon utan svensk licens erbjuder generösa bonusar och kampanjer. Besök coolspins.net för att utforska säkra och pålitliga alternativ. +BestUSCasinos +TightPoker +Buy Instagram Likes +Norway's biggest and most reliable online casino portal +OnlineCasinosSpelen +Beoordelen van nieuwe online casino's 2023 +CasinoZonderRegistratie.net - Nederlandse Top Casino's +Ilmaiset Pitkävetovihjeet +Famoid is a digital marketing agency that specializes in social media services and tools. +LookSlots +Gives a fun for our users +We are the leading Nearshore Technology Solutions company. We architect and engineer scalable and high-performing software solutions. +Buy real Instagram followers from Twicsy starting at only $2.97. Twicsy has been voted the best site to buy followers from the likes of US Magazine. +SocialWick offers the best Instagram Followers in the market. If you are looking to boost your organic growth, buy Instagram followers from SocialWick +Online United States Casinos +Aviators +Online iGaming platform with reliable and trusted reviews. +Online Casinos Australia +Looking to boost your YouTube channel? Buy YouTube subscribers with Views4You and watch your audience grow! +Buy Telegram Members +We review the entire iGaming industry from A to Z +Helping Swedes finding safe unlicensed casinos +free spins no deposit +aussiecasinoreviewer.com +MEGAFAMOUS.com +PopularityBazaar helps you quickly grow your social media accounts. Buy 100% real likes, followers, views, comments, and more to kickstart your online presence. +Non-GamStop NonStop Casino +philippinescasinos.ph +Incognito +NonGamStopBets Casinos not on GamStop +Buy real Instagram followers from Stormlikes starting at only $2.97. Stormlikes has been voted the best site to buy followers from the likes of US Magazine. +UpGrow is the Best Instagram Growth Service in 2024. Get more real Instagram followers with our AI-powered growth engine to get 10x faster results. +Analysis of payment methods for use in the iGaming +30 Best Casinos Not on Gamstop in 2024 +No deposit casino promo Codes 2024 - The best online Casinos websites. No deposit bonus codes, Free Spins and Promo Codes. Stake, Roobet, Jackpotcity and more. +Online casino. +Listing no deposit bonus offers from various internet sites . +Fortune Tiger +Parimatch +ExpressFollowers +SidesMedia +BuitenlandseOnlineCasinos +Find the social proof you need to reach your audience! Boost conversions. Quickly buy Twitter Followers & more with no sign-up. Taking you to the next +Buy YouTube Views, Likes and Comments +At Graming, we offer top-notch quality TikTok views at the best prices! Check out our deals below and order now! +Insfollowpro sells Instagram followers, likes, views. +top 10 online casinos in Canada +
+ +Please note that links to the sponsors above are not direct endorsements nor affiliated with any of contributors of the nodemon project. + +# License + +MIT [http://rem.mit-license.org](http://rem.mit-license.org) diff --git a/node_modules/nodemon/bin/nodemon.js b/node_modules/nodemon/bin/nodemon.js new file mode 100755 index 0000000..3d490f1 --- /dev/null +++ b/node_modules/nodemon/bin/nodemon.js @@ -0,0 +1,16 @@ +#!/usr/bin/env node + +const cli = require('../lib/cli'); +const nodemon = require('../lib/'); +const options = cli.parse(process.argv); + +nodemon(options); + +const fs = require('fs'); + +// checks for available update and returns an instance +const pkg = JSON.parse(fs.readFileSync(__dirname + '/../package.json')); + +if (pkg.version.indexOf('0.0.0') !== 0 && options.noUpdateNotifier !== true) { + require('simple-update-notifier')({ pkg }); +} diff --git a/node_modules/nodemon/bin/windows-kill.exe b/node_modules/nodemon/bin/windows-kill.exe new file mode 100644 index 0000000000000000000000000000000000000000..98d7d7f7ed95fd8662f82dda3d16203f0d76e580 GIT binary patch literal 80384 zcmeHw3qaIW_WvDVU=(pia}>*ROk62bQ7rMvjDUX8K~eEhsemvDiGah5;#)&Yn#SC= zw%Q(2xn>^&6@w`-0$~0d}ka`(zb2?T)lJdJ@?#m z&OP`2m?7pPPfe(>pR^t@(W5$CHCT3 z_Ut*Poa~~aVuxv_&182LnF@+b$>S!P<`n1J`b0&wj*(eU*z>|y9XEckMe$wtr*4Tx zy!Q`BHn->S@Ma^2pKNZ+;m4aV1MIl*$d*}vdnUEp(hhLt4+l11!C{Xrop^d0hbwe!VmGNdhIBY`;~VN1+_1|BwR2v9hLv2c!R zfUFX!WJI`7mbF63BOc#qxV0Mg0>X7gjFl)j z6|662EEe%Bz`Imo53UfET`q@`#RRr}?77*FY{mv$3v3qr zb?~F%Rn82R`$%N0UWbHAc;dAjUggY;dHR${guIL>D>vv$6%m4H(vz#)zw0GYD5dWr7ksOmQxW0287Tm`7 zGAJqPt8~W1G&EJgwF}s1fE9=PqAMxd*B33IsL2=o60OW@tOJMgpES-`WWQZvdN(nx zM_Z7XZsl~lfIKz`$nu{;^sC5*7KLSouTYp6evbf3e|#J`jPr4L9az!qi>^Vm zwk@b%oXDxS5_LC9b{oHvtl%|%3*@nxsG;(oLiMYtssz^z6f1PF_A!#yMgLL~igiDQ z4G5!HYZ2D3r0xVp6^IC~Jz_*6W2-!l`8?-xRZgcW=S>t2i}%(}iubzqA@_|y=({rg zOJR|MciZ6P)?TMhC_klheqGxi$%RNpF}0+4K38p*;5r~qmxFF&cMu#Kk2doLP|UKq zHCHMlMi5+M4PN72$mlvGxRQYwGE(QzE>B>No?uM*!1$@LV60adPc|G22P@a&7{N6S z{ve9=9O@qQmr3-CiT)GFa2-U`@ER#UQorW*-6g(7DF=AUol?pkIps;DtVOSISp+jW^w}Rw#cn?nWO!a7$2eYu7}s1j-~-SYvdPr&uy9lP%OW%?+6= z9*9|v!dP9W1=qLnN6=Ht5-xRWQ{qq5wofg8*Q@WE%vh>-=ZJ(Z{hcSFjilbiSZsaKywy`F<+pbtqRjb3yqPp0zC-C7)?iby|gQ_jBH#@E-+1=zk`j;~LP31DZ#v@iD z@H)l|-hH(ntUBTxFB7G?cBWD#Lo=`OK~SX*+3D;>Z2AUabC<-%sV(0VR@<6*{gZ1# z){)6!!G1K=Qjc^wVoFf?WmZ=%m7bSG2z@st-s3vpHO>OFwO(U102L|KwbyGLLby~{ zSPa=^w9DE}WiM2=Yp39n>IL;~wq#mnSf*vp@Kyg_-kwZU8Lq?6wdOSvy2opLeGGcv zOV#MUUgO&c^FSRB>;NN^uK}&jh%xXsF(pWY;A6Fe3M#dSe5>me5u;?N{B&xYtySut zvwDlU0zs-*zw%kwPjFFND)(xZ(LfRkWBaPwLL)V=@HkZEe5$RUL2B#E(Y(?v`s9yM zLHSf&umKnN++exNxBJt89dEzggRr}&ft0h7DmV)j3`;C-s9>c5oK@lTzyKBA6F{o) zeO}{e!lk+*9O^O} zZ>I94fJ=MNt%ytdmk>T(FB1rWY-e&z0tS9sSz{!AnGd_xm;|v^1Pn+jCkW+>W6}+R zK8eEasCsH%Xr89{x20B!FfUdXSmkR3cLu5q#mMp+FHa?=+Q%@9P`OPIFkv>KkX!r? zYudGP7gPGzVZpUsa8J}h%%8cKND|Uos2B~Dpz|8{Kxc5w;2c8*m)y;Q_A-~{ya|$fyp}E~>xr(Vep&}pswj>&@VEipeBP*_q*#t7zxBZXA?{|;ZdyV6; zco$st@%t^C2k^y#HO3*fMd`q)B!OcxXre-^yauG0>dSS>mJK_!=@3o7TP^F0H@_6~Fp7RiyA`$Nr zcgH|zRZ=wXN9Bv64Tyj%_@YH!wlkFZjs2vRZ#*#xt_23@lscq-{s5uM!m|nG3!)A7 z4jArtQfao)!X}P*WbQ!Oh+hLD4b3M>$^c=lQhU`qqDH$rXsoU(ukr6G$j=q3SY)tc z;t$;!mcZ@Pz<@Z04GBcjct>jqwD!d@(U!>=`b>gr1S;@UnyU_b2ClQG6szH=`RG|* zFUt zGWum6alV#k>C3YWS`b~@)9OM-*Fo_(QDL~u7Rr~xR4Dm%yqxwPSj^RSN0RdkEa+SY_xkQhU{?~tPioVI8l3@ zu;LUiNDsknK|!|7$AakquQnbU%+b^w8}* zF89zECE{ITENP|mP`PvZ^pGnX6czD8#iD3yMJZY*x-qq@k4Nkbi>D5pV40CQLs`J_ z{>%FZEebH4p%$r2pba=~5M1L?g3|^&%BbNDNf=URM<)de99s#YV+>e%&+DfUNeT$P z>a4DPRqoYFr=CaPKunp&%!AXdF?mL{rCh?GuZ;5`-|{$fxcOXaM7}0MHJ@ z2n>@8`jqW{-DR(NlHe|kc>+`TT4Gq0(vvrH?6o#<0ouPT1)65>lu>*}SfFG2RdJZ5 zgo>!Hw=$Lz|2B3H7Wa5f{M*>C45tKGc$LSQBxbfj%yr0XTmy7P%0QuFdZIoyWgwP# zDFdx;?LnG@+>-_h?zE%gh;|e)=Hw7SyiIS^*((~-!X5Qb2*i2;Y`{2Od>?44!QnuQ zry?l=DY3z&+^3fEA?6K8`FSP*NO|lUNQvK18{1REWbS1$_ZmR)euevPBnK(OY&G|# z68CiqcY(N<@Ae9=F*p;b0|#vRpnpeM!cvkjNRf~qT!KX{p^GG8m?GiJUevP77g4Ra zVJq=fUTu5C?j)ir#mlGKs+5yJvz;kNv0&{(6mH|CoGnanJ^Bk4J1Iwn@+Dqcts>-( z8e;)fMVg+Lk!hXayP^pU+?kSyL2ptg*lwWLIG<|Ut&OF|Rgt0x(^c7oNz;pCq1RfG zi$Xp9l`EAtidH(OJA(wvaNfRFyh+V&dPAGUAdw%xAM!r~-fNsblrQ_V=-K%@Xe`MR zTeqhCsO~{XBUCI}(h|(y(jqIvx0}G?n(rk{;?cIGl~Ko)16tg4HSqv9X}y$1E!&lf zR{OT2z1LVhgbQt}EKVRi>&6}8pIga-JxEss4`@KV^Gd9xW!LR=4f3%zQ<>R$^Hl9toxjSw-DEg*J7rE+H1T7cvJvpLRWpVdju$@ zenF!(Y-^=r5VKgU(c`9aHTPAmj9HFQbp#jz9g8qHZQ=dNaixI4B%N!rk8GarHQs|1 zp<)JB8%l~KofgDgXEc)ooj|kVO)8vv3Bl(RK9vX<6W!YN)C92@c8}N5As5EV5WzhL zE3y$7Ee}#fxwVy876}y*7$x-vOqn!N>f4RN%tPx$zuNF$Zxa9Hrufg7_?u<^ZtYlv zd@^K_{*sjSwQ9fiDYr)8`usA9_ueGZ9h0H|e$A~7r35tF3eAb_8b|9_U?(Qd+Ca2oUsc6B8mEu8h*$FQ~-Pq!{yr@?SL0snYZ+@I|$b)#lO^`zftA zrIjtFSXy-QDVA1^s2_pqW(KBM9#`gB9~SG)NLJfL;_I7Kh0tAC6U6Tq_x_0#rQHS1 z;J$Qekt%Ui3Gr_L+&+^gW6|h3jby!Xr1ovDhzDsX)&qZ6pek5F9 zUWA@0xZWdsmBo88?20t(rk1~r3xy>$m>glVC_Ttl?OP{aD#mi zhF%fNk|B0{AQ2T_v84JKnbGP>sYxyOXdG=Y?pD*J*AD%QC(k1*Q^wMv*hKIafDOgL z`LR~l4l9JEo;+mjwa0=Z?#}v`B3Fl6R=4RaxNp>8vk{Hj#=_sVq`EfViUTMFtQZ28 zbAeti51Jg{DnHzl3P%J(tS*U0aN#I1rADYg&J+=oLrtoyZ7O0FX<{w9b(%;SuopDg z$740Dk(Z{!PNY=x-Q!5!Zd|nasiNKEbv)jy9?gsI9{?{%2(`u)pBk2Mw zLq;9VCQn^LhzDX;b6dQbZkaJdHJe~RJZbV2sVcKjExVM(4HXns`;rqts#*)3)}M|) zQQ`C`^(Ry~Ys{;i)u_K$yvsmI6?i|dz%&$&YD}M$R!zqjlq_b%fTk*?MxsSE$|Nb( z=GB`dBx+DbPO6r2(Fy21xSU+&(XYCbw5l2+TY;uhAqJ}JdQ1a z{+8E}%WHfHh_x=XI@I-$>iO7MO{(Fo*H^NgVFp&)M(8EA?uUM&)(tDYNNeyWOCq)A zec~7`KM9cvU{Ci`-gny2F+lO~0L4$hfLH}drwzw=#I;MD)`4t=|(#zkBQ_V6MQ(|p`E35}iov(yZWx8~(u-1dLk1b+s=|~K~4zr*}k%Pc+6OO&;da1uhf~fSy?0FN`LEN6%2#7;rmGWVe5R~jWlopRw z+VMbO)k5~XlZLqp?GU0q-HD@mQuq-XVcgp82nVqzwHM4Yg)!0c$c6jb z_nwJnDGU+M+TOl%_V!k^y56qZzru7PqTy5yGj?#1R#(V7k)^=Jl^7 zOW0DSh917h7o6)nqw85B=BX9a&9K|k7T+( z`v5xu9hTSN2af!*l`P8pgKrVz8^26K`9iFKDA%dd_EtKPq*cVFSeK*7we6|CF?)y5 z;<+kBVu^iw#VF*#BDn(VW91l#SE+JPiyPcfOhnZGz9m~jG!KAOe% z1=0zc)?9@AH0$D~#djb?2Y2a8=~Bzf49wBivY8a45%q4GPcSy14ZSOh;Ioz%SD2XP z5%)zL<*b!%+v&_2m#Rp?P9@+>>|I{l4HMv29&eZcpXPCS9(hU*)_9qdm98GAsc2 zq~^1~pT*6SCQhLfoh@`JLs_a)=(O0Yzv_jUO`L^xTOrV7{*<14@qd~|tE!YY<_A`Qj+V0ob;lJ2wOeYaQ@ zDV=tvbj40Dt&`XIb8k?%wNKNSO1qsFOETdlip2RQt<3E?4_T ziFlWIc^fW+kJ}ZY`gWz5l~UeN%XvINPH}*o%m6v#)N)2Ba$cl{;nucS%7P}7){|OH zwGO+kqH-Dmpr-7#WQ-JN>Fp(Br(uk8w;Qd9-x98 ze-&O!%JU;UuegjV(rv69N4PYvn2Kcyo)CDD6$c7d_ejGlCMfAO4hM#$ts|B3UWN`{ zCgPrmqj7NfCq?R{;8KUHrM8l!Uad$y5DlsEM{vMk5I;I8tKTL#qjzXor>O7YgN*Uh zv9kKN5hJwJQ7F?7AfyA%#a96yf$l5&W2s%0G7z6A0%$5yhE$~_@c|2SS}b*JQ?eVA zqdR3F9jMT&ps~p9H7>^|mA;N=4OiF2gGln##YFIy#XDv3KVulcy6n0D!4=b!NM3NE zhmpr?j3mw&JbQsDT1h9~RV$|JS5No)Ecrh zu3Hzy;>xw5lkxdCi$=(X^TAD6DWVbr&j;uJD7UprzLJyDy2fD+K`ylO$a{s21Hz z=W(>nNUa#Nnil35j;S?{iS`;3xSmNS-tiwK?m$yl9Pc&K845qczaJY+J^<_nrUK(E z+6>O{Bj^l20w=GbRMYNB5i~A~OHc9gtQ*7a-ANR%i@_APC|b-=*pvrn^BPTk3Y&ov zo2d#L4QE4J6E||>2wlJGq`$hCE9xHSTQ7`Pmj#!TL9IfOvyX-ux3O-ttnPi-Bye?M z2v~(1Vn+|z5O*q^w*eEx5c}07a7vv2s&Gz}Ih({W3Y&$&+0^nrDBH7-#AckrrXG6{ z`98tqHCA%f^eY2w(N590<~zCS^ua|5YK?c$Na;3yo+{hIO`@pkRJM3IMz+N*3g;Jr z31W+#>OvGqoNrS&$IF~eVxq#PC^#Fvn$49Gn;qCJ%WZ?H3 zQv&5nx-`7k5r#xrW-|7jv%Rown<4MyHm~NT1GgG@XF}VlIH|ziF#c{YQeyfoGVL|@KVe>tjkNeU(r!ECilY0}@>3dTYl&zt zi11)xbd`1_63A?*zh%^M+)+~j#65am;MSYHNIQ01BDhW>&=bLW5L9V($c>6%wL0@l z*aJi2ZF|J-u&sClB*5rGIUYP;iDe(3XJMC)c;Q1mawiIiNyy?(55a=!fY@I}kGBxJ zM2nW&5p!6iI0wSS$@su+lqhg`NbCY=rE7~$pcqn(chiT&p)ez!Fh)Rt9?9mFC!AWb zJ3{gMpVf#@gQaUnZ8s2JA`$99c)TqPwtR7n3ELd;XGEm)2{XOGP>!{Doh(EB@yeKo zxp(j0W1%QG>f{*RD+0>x)pfYG8x`;%uxLQ-h4P=g&Iqw18d$AX{07CQ)Hjfd3D@yY zscHDvD{kmKsEAqFI2){l^5RcXpms<7sXHy9!DYN_2D*+J ziLUMCHHp|V9oQ$tpB4`65#m1(Du(@-1@el^PY`fNjRT!XFJbxV0E*J^E<$=HAl*ND zjq|RcY}#ii8%*_Pt=xqBz|e9R=}tkZkAU+ePIh!UOGify zO+y+752E3rF?Qp2i{rKubhTPx$Sy~Rcn?nbo{g#mC+ENU(QoXIO`6)1zP_h_N&6}i zSH1lzjcEGh9jmoZA}+o{itOsy#;@ZwT5br*l zG^q!6S>v}`D^hCUMZJN;y6^Ocv^sq|?OW5)5YI|$7xg;u3e6TCZWpx;kZhTb24tST zV++PO%z>f2+mKD8t*&jG({8{u{~+yKuXw%A1wxLt9syM4iyX8utQbUJEOcgWj>KS| zH^>t0%&_*_u7BCm0XOQ=LTELD;exx(a;v+|ZlR(>OzM#L9MP`PZLE0q*&1j4q4XFI z?*_}a$JTy=jRl_eaYL6cjA2VC)EX5P4j97gEt*dKr8a)sVevCmPyBw=dgRI@&IsXF z<5o05Xn$fs@3y*&8QoZE)EAG{_RyBL7$8sMXw8qA0P7{r*DIuJan1o`O@&F7IwYz-#;vM;#c5UuhM_JW7X7Clb8zhINi7Y{BlAiHA{Qls94v z&7JSy$z#-$q{l|Y==3rHgW{c3t)dOmP4B%jtR3p)DO2)&Z;mCNp8TqqMn-ye1$d4q#&_@jQYERM8>NwhlcpF zEe-?Tr7+5pHBip0{P0!~c4#t>xlR+W207C#GtX8E|3C(3=(B$V5tneb}=M?UF&UoQx8 zX}_KVFl2|b7~4?nY=6N@QE;W;_$vjcE+S%BF7%`DKcuib%_PNNqj-u5*$l$A8j}!@ zaD0kEh~Fk`L6PCWsXHR^im)L&&|0>^{0m4X`kz48Fcn%mMbUt!@8Iur(9q*?>$RDg z`RyX?_-7iAy*=!Zh2mwDl_KEil(sluN3@k){OwM#)??W@9~u3J7!UdFQI~9&w;YEu_`ag>^NiBk@tt62fa- z(g`8ggCeK6`w$Y*wlR*Qol^>!3{`5;UBgp3QA15aF}FwwRF}k)0{Ef>=26ai)h${ zD-Lyz@rCMqIJnAd8JA{7LQd(N8fO=rc!xPVx&)JZMa}8LFU#vg=IQZ-*nFR+;T>2M ztQI(L0)>IpW*KRNYuN`Oc zLbpE$FK>^pQMtY~i}a=`svY(Fz|(tt2Sx0Hxe4vJ6;H>Zy)yG+p-9i%-C8fY27>fp z^>ZG?P_#`o?(*-rL_N!s0&WM0|7#f9ko7&D9*log1OKN~{E=t69K|EsW5r-2*-3h5 z=GG=ss_GrCXa$M?GhC4$>)@?{>RJYEN}xt)5-+9x>^Anjjua$BUoOL!euI==>`T`x zIvDuZ3Y3Ab+$LhaipN;rVWPDE(m#^y+E%62AWB_h#kV<@wB%&h?qt_~%ZhJpMUDc? zith@FY^jzNbwv(aVM=6mvg=5)OWO<6fn~+_7N;XWCGxQAKyu{a72l1r6*=)Rzdku~ zKOe;Ok5r`8CA;2Eu1KrLV~FWLR7^UPf+>E2P#$&P1T+sz`L3`Yb%|bma)tIVmKwu6 z;Tqzd&_BAPG!of0$hi(VQ?R(|yy8=) zucJ<&&ti5;4MXY*q;94iG#;NokMmCssnjv>fH%N~pV zl}|gA7FB6KA1~ReO8YTF7!!HYWu+5|qD!e2%rw=)wtty~$ZEWBF<|B}#DotGclLK_ zHz7y)E?t#&9Rg^->_m!as?gqtnB$W2U8XASEeK*1VHMhy2vGw_pq&rBGvl)X%>#q9 z4y0jR*s+}k0+sXj7=hz3Z;t`|^H2QKz&}Iy=MOl2^7i+aC}}nDjptu@HJGL%YuEy?o`)%BDS&q&sWV8Q(QP&{J>KFS-sWuw&es1 zv%21MTw-yVb!qtOCZ4W^z9;1DvUYzjHD||YR|nKel7UXPL&xY&SVHxV*7&A-Xn9SY zH53bK2zmR)W9x8x58L8SOi+IX4R>JDyMk2L^-$bYScF!8JzWf z7pbaM=U?T=4OXn9tu$|^#UHU^xfE^fRh_0EalmGEb?JfQ9+o!bz;~sc@+>R<#p=@B z_cI*KS@2~hLOZ{)x{w>K46DjF4d3n9>+PX~)xAAh^UrHg531%R2OH#jYy4CBS7jzn zqUld?eI>Z217PUf<4xDa@5lM$YnXWTUCr<`$07Vu8-w88ZW(Mfc%2=wv<`P3$H?{| z$gpgDA7>V|*ukkeJcOhM`Df*i;Q3cc`Tt7!d-(FxVF$kFFQqL+n)ngU3u$`BSMm2# z&~3bb2ghJPj&$5vX$^$Ox@kJCeUkCPtm!>D7bHnMYB5g4@#87WfZ2Gqh@L1&2mzS^1}h4yDghE{NbF}_%bAu%00-|(qOCb zSgGO{8-!n0Fn+USelL+teEbqfzt4B_p+#y@D!uYCVc>h*+r{p0lhP?=P^Z`KhU`)U zY^YzzmA|z$jlWTe#CrlSeMd4LR@xizo}eqoHN0EU^LQ_8N2M=WdZ8~yTIt#2cRYgg zQ(G;>2Ys%BmPKE(jNgybkN?9R+gjR(+-9Qkg(>B8-$sWo$9{NAF0bphbY6bMLCez) z$!S!HzzLN8rj*XZn-RwMij=y;BJMqxP83L>Y*nPvV1us=tw_M!>CxY{&9#lM!vp6> z#Qoe-mlVB8hJTfzLS&-B8E&Wy$I38EhD9=5D#JB0d`yO8|03l-B*#COA(Q2Fkl~dw zOpu`3}f67k6`^d1p3~!P1EtX-C46|goTIMraj*pPx02#*0FkFV8 z%k;ZtI8%m?$?z^2u9fK(eLt7u69VwMyA=uvhRf}>zCwzNGE}HMW0{%O@8cfQb6BIW;lm;`4t8?&<_d=|2J)}QrZxd;{d*rgT~ zI0~{03l`dvii>g)E-sp!Z7;~4S!m-l6ANv&lCzSMCMpc3WY4x)3JcGoP||B2k@Q&p zs04S-m0*oYf^pp>c(1H?zSSQ;GdtImU5KCZ$|ltqi4pVWSDSKXaK_OYx zR9Y~rD7&!K*IU;p?4tvQ|YaecAV zRGMGxEX*~{Dwt<8Ir0^*oT(|hD3{|(3IK~ur8YS(N$$g$^+xr#itT@uDOM75m1(dk zDPw}Emnq37tW|73DSgC*6w83h}c6+gXuqhoZbBjUU6V+RW`l4D%NgX@cVkK@qIz_x> zMpgaE2^P6c_}63aLaB8@?!*P94%-|q)x`Oxr%XsgK-I2FZ1y-R$tv{RKh@aE`kEto}~_ZLD4KzNpUIf`%I2Iii=H!#YMANWh4D{Qqk<9;`v1| zT8^zm>Y7}?dVBExo#U_<_O=&09XNX#oQJk<$<4K+at0@%K{;$G^H5>Lpi#PsHU|co z(&9o}5?Bdn=Y=-Aqj7p1-Pr8ltjgpQ(v+Mj*#(Y~#rBD?N}+As%$saEP3x7DZ_Al2 z7bdk7)j8H#i2mhQ4!dnm@jP3Sv(!;MN2QNk2yQn&`kX>E$&tlQpOqxdOL-gOR%D;ZB0ZEtSqx(*bpeR|1*<$tiRM z*P?uiBe{nVpDH7oqPw>dy0~QuH|aRGYz1Rk0sN>OU4T~{*&z<`SQ(+nRniqm_+85z z%AvMj3Gj*|eA)7|`BDBr{Su&W7VzX0xuo9#aO5WiiBm&hohQ#<81e<@!o{5A97 zvm^>7jq0icd>PWp0@9)g598mCwA%yH+9FNu?ovlCVpvxxjc0)@m*+_M)iMlB=SCxB zwM3_rp<{y-7ya_Z`N%1Yxe|wBFe>&4jXJV*mMXnO6{bwYLvda>3Y+1HuiB~WEUOaz z2I*MiH|nS^52@1Yh@K|407twO-fH>8U+okc!mX}^j&u3mK~3TproK28G>R)jxG^1w zhIo?B zkZy_&>hh4jYS$P~%3GqMr7|c^&5z2Zc6BJ@xpXCZsvnOkuc9lZkxi)l^{RBjlUofv z6k4?&S*23C@@Yamny>T}j6WoM3938_Pog3j#1Ctsvz(f)9&`*cuvR2 zpi^TyQr+SGfVJ2}4p z_1EpEd?|ycsW+0-%B~wQZGx@PmW|^LoR9b=;&W+rWmuU3zs-&? z|6Tf+TzY@O=*Mnq%nEJ}UvI~aXUv2&iEd6&Ok5mG2W~A2CgI9h%_PPy@zE^EMUXEZ zmZS3e6wvR_$6QM&%$X#X^@)qi$(scph?7ozk|t!tOii*7&AkCQLRKpIBsxgLBRYRN zT$^Kj@x!HZex3jvA$JAfl7OrB#}SGh^2c!*HGViNmw~xlRSqkstMkL9bGmwe9H+xE zpDJ$#r_=f4IGq7F5esVN?*k}*hd_$QWRSkJUTA~Gx{TvJi`HBphWc;|7cR`53DYy( zLRqfekrR_@;c^XrxEr|KXf-ZFqB8-<>yh$iNOW<)c~F^s%A~(HPqTs-0Ofm5}e0feLTTq~7dYERb@j#XmJ15o}Jf*ElG$>dGW z&e9)YYy;V2W~nsdZUVg)arti|Fpr#{|LudA1z<(%HQr~HzjF|#ipL-s>o`76PGgJ>)b1xb;&ib4g=b<@aIBGV$v~&&!K&7 z!?>r6QwmQF0%7nasEbyBG~d$Rf%>ZeNH`jE;sEMBuKwM2Zt{ax(x)epm1yHyn`STjlsiay*j?0Jj*B(l^WT&XJ)kazsRW zIFxG-UR2i3QCb!?qE(45D*%sIM&T1e;)pH|bfj1HSk9JZ$$H%iNbV6i-Wk6z6E;)g ztwWwo3jRtRyK;bmwX;XnMb@wXN(RA4zfFzfAk^2#l zQP#P22y5LxJY6Y+1}~Yy`-jAJ?xABna`nuF_SD|qwysT0>k^{bESJ-S{KWSfK&5;N zBs`U`v2xa6k8hU5^UnnEP|B&G>twy@MIku{An{)VNaej85ArBJ5s>7}0;KXT10?yx zlUxcQm3O8bSC^N{(iyw_)~L_c`@>5%jKf$0+Rj&nuy&eujQUVD>fR0<@hkz4^k^x( z0gxO$@}soQ_Lv@dCwWsuwca=A?tKjKADsS5=X zj-1lg%H(qmQcJw)wS?9YVL=);*FHqgTc`u>q6ov+cdD+U-@TTOcwg1VvlxfiRBo57FD zojwWm(@hFb?G7Z|{{d2(rdufMHlPg)+YY|e#-BqQ2Trfig@j~b+(+HhPx5usg)p5X zGQ`k>u_^GmIM?8~0j)zQ{ycmc;lMjEuFfOXXDh1DtZv#6rbT_4donf$-ox|5Dgn68 zSA;XefOgDqXRDfstnhTw$qYGCem%>IW$Z!t9~7N_F!1;dS79DsF6l(+>bN>j=c~fm zWoYY>&xMtQW`(4a{Yqdz8gj`-S+F0@cX{}AKyt=jQoIz9-1~A|DxZ$Eo)BKr87F?7 zb6a8V(a|`|#|Z&^$>E$>vOn1zW3c949rg>T&kX#M=K$1g|Hv#&ZflkcoDn>Xz#D<5 zHs9G4#&nXdaipsWJZ}I`(zOJ-l3OgpP5mVNmw@D2^_SXT7eI240aCus`0AA@Kav?V z(deI{aY{R?M|*n&bf$QT?jAsLO9sgK`n6)+9i3U%+-TUa6WUD&76zS2FAwzk8)%9D zTIfUY0a-7@wNiZcKuI4DAf^8skX#Zv7{xpHj$mCe$X$l^tw;MN-c+2}o zM@HGh(>1L$tkq1U>0o<1Zzpk5dMg%%c|v2dz2FVu3#t zkY)frt5TvQJ5q>-=DWs7IQ6H(za)SM(S*XU1wNPzg~#ejDWsP3Lx3FO6T1>0@PWEY zxv}Pa_-sC=%aqgxNM|?X)bi3Xe*`*Ow~YrrKyzZw%Ed#3Iwn1^$S5u@BxHR5iAC8ZctPaA{1PVLaL4x_Szs@9OkwRCFk|s|C8uDY z#FFSSf_!!RlFW_2M?mW25L^>mvr8TPxd~(Y!-783U{7coe~$60HrN%lmpI7}Hl7Ka z;3!PWE^#>Rwuz4HoY^DbGwj(pHarF=4U(L8JKm2dcmCgos$T$nDC_KhOZ`-s|3&%$ z;uCP;Jtk$s*c5Ag|2||$);>hVRelrje;R9X@oZ;Fy1igtL7{DyZE|*@(>8^5WtQBV z@Z5uw`4UT+C3&(XJyqfOLP%l`Uc{J79l2(6{N%)#nc1ZUIhnjIbmpo3V8MG%T^S#OY|rE2%WonRhM@7E9*n^pp|FnWKr1mn9}%r^r87 zxnvzI7E6j{a$G;NKpe8e5}}B%49()vuRm-C9v@4k$?oFBIksq3vSLiIwZ+>pF1WExOZ=nn}2cD5QY(6&UQK1&gR3URLj1aE5fSO3ZPN@^Q zYRor~T|+K}=NRJPf?cQLiDxE%;;A%Wve_C+k)Cm8+USjwFOgao<-LO_(0wwC^D^z( zMYC*YQIvuL@9uc^%sJU53ITDT#AZjX4P{a_!(GLy>6@O-FveoDjBy~#-vCXLNVO-u2WB$q);2i}>oK`c{#ESOo6 zjlTsqdeA*orJbCT zhiQbr-@F3c(2gV>S$0W5?>VKtaWB{#*xsm_d3bKwyRcvHe!Zo}9zcP=5l#tZKM`f@Y1L{5k3sq0?|zYR z(o$liTpv)}j?v&XN|{_>cQ~^PCl%2(En|0TClyHpK(4P3kK~;VXLQ<$HbEa!z9tEw;H4PGl3f3HXnhhblWB;?Xqi`RHU~`)n6NpkV0<_oeJ6tt zWzu9W&B4LtD$|Ve1q+3i1O($Ry@ysW4}#PAW>^)&VEp7E%#UF(y29dFn1WTSl1B8S z#Rne!OOzO@9kyA;_5~^>8q3EwONxl#8H4F1Eo7P@N3EZ~T4eGT_Xn^*x&e3EmtY;Z z5XX&x8{n;rqG@(>$37&x8gfN{?kB29n=?wZ+In2+QaVJM*#CI%MFFCBftD`%+ zui^KC55dq_=!-DH336C{&!)bEqr13U;7!XVsKdS50E7wlfKNu4?%c**gZmGJ@x2W8 zGJGY%9>D&%KO=k6J=(qSPXUkbcrbk*K!go|H^URnTEKmMaqmDhfbhuxCHQXc&e8=z^8@+0>23` zW|)MJ16(hMJ%IE_zYI4cAE17?WM_OglI@Yh_vn^?=1@v<>7f0lXQW z%6J!GH9XZ9!GBw%c2x%$Jwl=(SVkT+1V53(HGo5sB>Ygo1Mt)){{a}4Eaf!-UJp-s zrvdgzVXW|G>_h;!!6zg9CZGsUGz2dhDTO-#=E&h(z(w$+^J2hFGM?ayQJ@DO`rE=k z!Y@a-9&o*YHjMBg!0+KHuOXGONuyEsz)uBy3ZC$<0sbI|2@c0S6qUscc$*xiJFo-r zq$k}|#apE?zGKNY!c!Y~3UDv{2Jk0%Tn-a_ZY=Z$p6(&vf+xAV0K28b2EfzZ=cDj@ z5%vJK8xMORjPJ~{5%3;_>AerV9WpJFAiWtHfG|OND@1sLRdSf#snFY!eCSMXIOxsA zROm_X3h3=Y4Rjzlb`tbQnC?93K65B!5&RfF7vVp;y95Pj4Ic$>fJZaq?@$Og6Mi5( zs)xbs9AZnt;dH=A_!jp+VS(lU6QyV=#06R)l!byrEQZ0hfXnee8t`5oM8M}-F%80- zfDZxO1xWd70K)+50kwbz4O<=#7z-EyXa>{)P6ccQSON&3avC-pvDHEFB(E|E{wfIM zKQN`@LZ_63-$j}HoP0ZorD*DZMv-1RaDJ~!Habia(F|5PJXxN4x4e^Azh|>fi zyP6dmmaob$+@hhUC_Yz>_>!~ih9?Q>V@E}`zLZZnsX{VlXCl%t9W|6`_6>OetxAOJ zOYsAcJIHIQiZ|rqr{o>NAPIs|9|KyF@qaRui3}9)?rY{Wfe4%42TV3$~ zk7g_ksoFI4nzl!yXT5ks)SfTC`?5>UxAu>{FPCarcF0BjKk&eS=?mL_^U~qMHO~$F zu;}H>cFtY=-M6p4`fB!d_g|i~A^Y_i%Qg=Ca>nczZ@B!6VT%?o?sCNyS3IWTXxA*6ziN{a=^yux+A1-XW?%S0MpZI#-qQB1V z=z8n%Gc{MZZe+SkF2?^?H@YorUJR|CIPcYuuF`v#{ql161F7DJ?)WPH^>+_$J+^Do zoT!(kUUO&H=;2SFTDh48r3jX%kU;2Kz38kt1@$^4- zEG|2G!_XX92&4OPRzW&n6L0xwj zUU%7F_k8i{yS2*(JrK`|G$t!Mum8uNN{rY#^5n^r-~7@!{^+;gAKkYD?(dJ!oit4| z^s`}+_m<4ad}HbBKXiC{&4t#>(43c#Okddgwd&74`|R(JmpG3{*4;k0%dK;}=MZ4|jRL_Q$_JzOC2hkRWYu)03yK@Z9v`!UNwg{rU8@=r!H<-Lg-7 z@#vvz7bkx9#Ix}(ykPXt2SSHG_l14I^V9n5yY8W0 z=iRSfz3;JO`%77$H;orORAI3DIcj#tHwN?^)ETojzV@6lHu>ohSvS*M&%bw(1&C1? zpKvreD^41oc}%O-GM!Gx+O%oI3c z2?=c2F#L#+*~|n%U{^YayHEwIG`Ef)AM zvjDD#{>wD_UDRn4C{?xG4osqWll1M#`R8hk-jB57B|fQV(Q7ZE(;R;%E&0R9`RIAf zpUfBIOM0B;pRa2!J=fJ<^Q6CU{xEV|AD6R=%v0Ke z#IGhAr$37UM>LoIJPUueFyFAHB%Pyc_?3$CoMX^9i~l+3o5epYjQP=ru`s-{3hMBE zrl2;B06JI#9gGEYe-wN$-Tw!1^hSo3LFa@u`nm|BlVJqG8piMq#5#={hI+l*DBc)k z*pG2~e+cyXIsNho4^aldI*RLYr*#fUD?SaCBM{anp6N>XbHQ@2x18wC1<%v2v`;m` zo8f)?RCRw+?Ne!T>eQ(di;j+FCXTR1uDOQA;der=z4lr*eE4uSYSbu} znwrYSj~~w_PoB*8rFg2zva+)Hp0or%UNwLId{$Og2B|Hs#R4rB_;0oVAE5s;3Whg` z3A&CsS9(Zbe!ulKtu>`1#NqhQeaLrDq>ll?gE59m@o(ibX2USV* zhTtMPx>6(u)5w;P;>u@GEsRFr(qB~va+WBgQOOJ#G`}kS?EF$_y@~woqO^V{4QbWocp|qFAw{6zoIrQg}H_7`soeA$#yGgzV=OEwjll;-6*iX`) z68_h|BMG}vc}McmoXbW1gmKe8~? zGw(v~=g-dD=cSK5A@kx(pRbvA@Qt!FKkfan?+foAtQzmQHa@fC-)eu}yZFpopUsai zotFQ9pWR&gZTQz!?mr#e)b*S1@2}Baq(>?D+*n+*s%_7Qmd*dysbw#%`*&KedA*)~ zaovHf`%aeboL@HhG$X!RaFv5c+DJoVcB_nq2QGrZor z@Yt;5zjU~N>Ygpyn!4RbzaGWP9@_E#WmEssUG6fbyI*i@O>qY{-tS;GtX0;sTn*K`RB>{qLQ_g|NQHo>gm6={u7~FFyHvLOKL`cr1~G&NExZz z-bOrj!A-h9Qu#k=q>PP~o$!I>CmRwU#sAn$o|&LOA7=s)$#0HVbRu{)e4rzIbNCT$ zVEGe&1rN2%%s{0YiJLA^PQw{{gMd$d1<&S|-v$}un(&u5sMBPKF2|bUQ-XB0T?50v zSNTm%mg?dqQuzYQtd6&o|7=@-at@oR?WcsB+x|^wi~UV7E%o2h{*^Ww4E|pI|DR~> z)MtKce+V*ym0Q96UgcNh|K6~r{AUllpPZx31go>=j{m`U{2sWARsW}8GqRiFaU0lV zzxzGP8cgOFDxDSGfBJ*y9;`eqxIad_YpJLIKkMlmj4i=Djh`3NCVpvzFUdN6fxIt7 zufV)eC8%2mY#?oO;#AJY+%rgM4zJ&mwlN*oU7(IFP%TKufw)!W1&eye+zoq_L+W+~rfyVmOAG!ap zK%q!~wSO7$U~M81_+Y*<`d~bQ;l6K#FUi``h%UtES@?B9d=79I;z%y>|8Muf&VMeX z+)Cb-^8feOJ+;l2`d3=)#f9_@4pR`{2e`(@geQa02jkHg?!hK_4{d~Z6W>Ebj*IVT z4c%Dz*Il5vJDT8axF^^-u5N=v+$e=OBI| zZYDCT`BAtIp5&FntM!S2#1Uo`Dfdn>Y~N||Ct8Z(*9c7U*OpH z|5N|FAKmPYApY_HO!s5IxBUN5F0a?SNII;TE~`oy#Y`o z#h3e_hv*`fYtDjQz1}*)p9@l{dH$fV`8GMjH27gH{6)_uVZD^z_1uta(ADc97~wr9 z^fK=+8nW;?U>(vK+2&Vz644AtzY{?R=2`H{??f!`fk=yMu|SIjevJk2OHypvIsCcq zVtbt@v__5`>4MzhXavBvXk~_Nl_B(oE|;(8(wC{u}(UTUw_|C@j`qN8`l^bRA?zYbhej?SV3^HBUGyjs2z z?yh1+`YZTANaawh!;XCL)MG*BvSSIA5@Q`M|6BE8#%>6+A)32bP86!Sq+t z-22knK&0h%z>aXAfp}H&-4yHfhDIdY#LVq=XB^;=~f+whWMJILI|GNI~qx|<@ zM893OXP_<5*ZzU+DVY5OY5o)OK>Zt&8Q7MB>91~If%JiCB|wpa2h;p+o<3__ev;gxiS-!|aX`aFra7QQ)->MXGS z|5I`G`m;GYt7#Se6`Lrw42%cbPN6#&zNz%zLjR_0-yEGAq?5@CvgYXCi?qa(!U>K-B^|9u-*&tT*R^UC&VQ>Moq(8}2`(LZS zn#X@q|38}j=}&3Udw%j$;7NZPuTRRJ_RGz2bY7_BCHONueWyc_L6FWGFP8fU7%B3U z{d|AmB4tnCVQG$|IY`M%e{SSMcv@R3G6?<&{$jZ!U_>@o`Wl6MUM%L|)55=LW0HN; zV+DmTw*7x={(t25lMH*03-I!v`2qIA-EYnR!ZmE+&|-oALJLSwiZD2G7X*6rHT>^T z2f$E@dl(C4`WhAt;_p!}Cww3z8*qAmNa*u&dKS8z2m@dZ#c{IID5N|{r$L%N8ijR< z^<61F5H3%w*AQNTE4Y786Sy{vmNWbs`M;*;-UE5+^8JzJ&qZ0NZkprijPPQW zU(M&&hR?_TPe5mCA8!Zf{%d8b^DU8iKm8lT95NawP$yVGH1e&5%(ryJE`OS^c1MPDcOFoUKid=;`^=(BbHB{0GUM%_J)J)~@ z?=3$=E+3WgYk0*5T@hA1{bsy^4+dAOXbS^q4g-Kp-Ha-)!_&=UML>xMx7znA=n zpvW`uhv6&XX*{FyHRY&{5gqw&;9r0z9#;6~x}(UT*s{2}nL+$S6|~Le@0)Ect-V4Ei)EOiLnV@Ij1m(A@bY0Yr%e*?CIW6aXAPm_^t|4hIatw z!ncEW!Fy$Y51`_z5K!>+J2!M!H4C2d68?GkadMc};WS_LgIAmd0m9L}5z)iI{GEfs zQ=NYu(&8KO$~qx1{|?a4gID;S3z9zSGM$UQIq5^dTWv?my9b`|Tj2wFB>|__1yOEW zHQlR(gnJTR!4ogaOYRZ)f5TIn9-j7<3LU{WWc(%>F9NFT7g1mRsq-}!Ci@3EY7^*A z>@E1Ca+u=euZAaHWEYAz=BSN$;O`G0p!tu={uF#-K$<%GJt8lw5dqwC5GZ_h8wlw_=P~F8RPrIurhOxduiFg^ytDGteG|b;(m+8eLlPOd1&W4lTvw z;^IM;fuNR|&6H+hu_G7@m${hOh8}bfEO9{$ z`E@cE6U#?FN&3*lvN?sOc{Y1#L2=Qrp8fj7^)%Uva*A^cie?S#IVodg?}VPFQb%@C zZgydDk!@Jd1-856vzuwauAXxWEJkMWw@fI_*V+OLOvVbFxc&&nd{U7nc_2 zIeO<5&l#LuI;YRPemzZdvWp7xY^9FLYH46)G7WXuou!V{qP$|6+m+|w7Jntrhn%Ih z9H+g&u|SRkW4Fz9LW(Un-Ci)SpwKqUR_aSsC#RGls{_Th+UD5`O@;J1tY>yo=_5w0`q?&-z{KtJfb|U$?$~J^Ops zhWrgB8`f^PcSGd{-A2R4=#8e0u^ZzyCTtWorf;0OF>7P~#*&R?8<%ffy>acvdpA~Y z+^})e#?2c&8+UE2-gszZ%|>x!-NyQjo1dwE=Fl_s&p>#Iy!ffqRT?T0hc^M3E3+zB fS8lEpD-91PJe>9L>W4iK*FPLfb4-i7NEY~iij|%i literal 0 HcmV?d00001 diff --git a/node_modules/nodemon/doc/cli/authors.txt b/node_modules/nodemon/doc/cli/authors.txt new file mode 100644 index 0000000..6c77a12 --- /dev/null +++ b/node_modules/nodemon/doc/cli/authors.txt @@ -0,0 +1,8 @@ + + Remy Sharp - author and maintainer + https://github.com/remy + https://twitter.com/rem + + Contributors: https://github.com/remy/nodemon/graphs/contributors ❤︎ + + Please help make nodemon better: https://github.com/remy/nodemon/ diff --git a/node_modules/nodemon/doc/cli/config.txt b/node_modules/nodemon/doc/cli/config.txt new file mode 100644 index 0000000..5de9bba --- /dev/null +++ b/node_modules/nodemon/doc/cli/config.txt @@ -0,0 +1,44 @@ + + Typically the options to control nodemon are passed in via the CLI and are + listed under: nodemon --help options + + nodemon can also be configured via a local and global config file: + + * $HOME/nodemon.json + * $PWD/nodemon.json OR --config + * nodemonConfig in package.json + + All config options in the .json file map 1-to-1 with the CLI options, so a + config could read as: + + { + "ext": "*.pde", + "verbose": true, + "exec": "processing --sketch=game --run" + } + + There are a limited number of variables available in the config (since you + could use backticks on the CLI to use a variable, backticks won't work in + the .json config). + + * {{pwd}} - the current directory + * {{filename}} - the filename you pass to nodemon + + For example: + + { + "ext": "*.pde", + "verbose": true, + "exec": "processing --sketch={{pwd}} --run" + } + + The global config file is useful for setting up default executables + instead of repeating the same option in each of your local configs: + + { + "verbose": true, + "execMap": { + "rb": "ruby", + "pde": "processing --sketch={{pwd}} --run" + } + } diff --git a/node_modules/nodemon/doc/cli/help.txt b/node_modules/nodemon/doc/cli/help.txt new file mode 100644 index 0000000..7ba4ff2 --- /dev/null +++ b/node_modules/nodemon/doc/cli/help.txt @@ -0,0 +1,29 @@ + Usage: nodemon [options] [script.js] [args] + + Options: + + --config file ............ alternate nodemon.json config file to use + -e, --ext ................ extensions to look for, ie. js,pug,hbs. + -x, --exec app ........... execute script with "app", ie. -x "python -v". + -w, --watch path ......... watch directory "path" or files. use once for + each directory or file to watch. + -i, --ignore ............. ignore specific files or directories. + -V, --verbose ............ show detail on what is causing restarts. + -- ........... to tell nodemon stop slurping arguments. + + Note: if the script is omitted, nodemon will try to read "main" from + package.json and without a nodemon.json, nodemon will monitor .js, .mjs, .coffee, + .litcoffee, and .json by default. + + For advanced nodemon configuration use nodemon.json: nodemon --help config + See also the sample: https://github.com/remy/nodemon/wiki/Sample-nodemon.json + + Examples: + + $ nodemon server.js + $ nodemon -w ../foo server.js apparg1 apparg2 + $ nodemon --exec python app.py + $ nodemon --exec "make build" -e "styl hbs" + $ nodemon app.js -- --config # pass config to app.js + + \x1B[1mAll options are documented under: \x1B[4mnodemon --help options\x1B[0m diff --git a/node_modules/nodemon/doc/cli/logo.txt b/node_modules/nodemon/doc/cli/logo.txt new file mode 100644 index 0000000..150f97f --- /dev/null +++ b/node_modules/nodemon/doc/cli/logo.txt @@ -0,0 +1,20 @@ + ; ; + kO. x0 + KMX, .:x0kc. 'KMN + 0MMM0: 'oKMMMMMMMXd, ;OMMMX + oMMMMMWKOONMMMMMMMMMMMMMWOOKWMMMMMx + OMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMK. + .oWMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMd. + KMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN + KMMMMMMMMMMMMMMW0k0WMMMMMMMMMMMMMMW + KMMMMMMMMMMMNk:. :xNMMMMMMMMMMMW + KMMMMMMMMMMK OMMMMMMMMMMW + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMNc ;NMMMMMMMMMN + KMMMMMW0o' .lOWMMMMMN + KMMKd; ,oKMMN + kX: ,K0 \ No newline at end of file diff --git a/node_modules/nodemon/doc/cli/options.txt b/node_modules/nodemon/doc/cli/options.txt new file mode 100644 index 0000000..598ae63 --- /dev/null +++ b/node_modules/nodemon/doc/cli/options.txt @@ -0,0 +1,36 @@ + +Configuration + --config .......... alternate nodemon.json config file to use + --exitcrash .............. exit on crash, allows nodemon to work with other watchers + -i, --ignore ............. ignore specific files or directories + --no-colors .............. disable color output + --signal ........ use specified kill signal instead of default (ex. SIGTERM) + -w, --watch path ......... watch directory "dir" or files. use once for each + directory or file to watch + --no-update-notifier ..... opt-out of update version check + +Execution + -C, --on-change-only ..... execute script on change only, not startup + --cwd .............. change into before running the script + -e, --ext ................ extensions to look for, ie. "js,pug,hbs" + -I, --no-stdin ........... nodemon passes stdin directly to child process + --spawn .................. force nodemon to use spawn (over fork) [node only] + -x, --exec app ........... execute script with "app", ie. -x "python -v" + -- ........... to tell nodemon stop slurping arguments + +Watching + -d, --delay n ............ debounce restart for "n" seconds + -L, --legacy-watch ....... use polling to watch for changes (typically needed + when watching over a network/Docker) + -P, --polling-interval ... combined with -L, milliseconds to poll for (default 100) + +Information + --dump ................... print full debug configuration + -h, --help ............... default help + --help ........... help on a specific feature. Try "--help topics" + -q, --quiet .............. minimise nodemon messages to start/stop only + -v, --version ............ current nodemon version + -V, --verbose ............ show detail on what is causing restarts + + +> Note that any unrecognised arguments are passed to the executing command. diff --git a/node_modules/nodemon/doc/cli/topics.txt b/node_modules/nodemon/doc/cli/topics.txt new file mode 100644 index 0000000..9fe3e2b --- /dev/null +++ b/node_modules/nodemon/doc/cli/topics.txt @@ -0,0 +1,8 @@ + + options .................. show all available nodemon options + config ................... default config options using nodemon.json + authors .................. contributors to this project + logo ..................... <3 + whoami ................... I, AM, NODEMON \o/ + + Please support https://github.com/remy/nodemon/ diff --git a/node_modules/nodemon/doc/cli/usage.txt b/node_modules/nodemon/doc/cli/usage.txt new file mode 100644 index 0000000..bca98b5 --- /dev/null +++ b/node_modules/nodemon/doc/cli/usage.txt @@ -0,0 +1,3 @@ + Usage: nodemon [nodemon options] [script.js] [args] + + See "nodemon --help" for more. diff --git a/node_modules/nodemon/doc/cli/whoami.txt b/node_modules/nodemon/doc/cli/whoami.txt new file mode 100644 index 0000000..efc3382 --- /dev/null +++ b/node_modules/nodemon/doc/cli/whoami.txt @@ -0,0 +1,9 @@ +__/\\\\\_____/\\\_______/\\\\\_______/\\\\\\\\\\\\_____/\\\\\\\\\\\\\\\__/\\\\____________/\\\\_______/\\\\\_______/\\\\\_____/\\\_ + _\/\\\\\\___\/\\\_____/\\\///\\\____\/\\\////////\\\__\/\\\///////////__\/\\\\\\________/\\\\\\_____/\\\///\\\____\/\\\\\\___\/\\\_ + _\/\\\/\\\__\/\\\___/\\\/__\///\\\__\/\\\______\//\\\_\/\\\_____________\/\\\//\\\____/\\\//\\\___/\\\/__\///\\\__\/\\\/\\\__\/\\\_ + _\/\\\//\\\_\/\\\__/\\\______\//\\\_\/\\\_______\/\\\_\/\\\\\\\\\\\_____\/\\\\///\\\/\\\/_\/\\\__/\\\______\//\\\_\/\\\//\\\_\/\\\_ + _\/\\\\//\\\\/\\\_\/\\\_______\/\\\_\/\\\_______\/\\\_\/\\\///////______\/\\\__\///\\\/___\/\\\_\/\\\_______\/\\\_\/\\\\//\\\\/\\\_ + _\/\\\_\//\\\/\\\_\//\\\______/\\\__\/\\\_______\/\\\_\/\\\_____________\/\\\____\///_____\/\\\_\//\\\______/\\\__\/\\\_\//\\\/\\\_ + _\/\\\__\//\\\\\\__\///\\\__/\\\____\/\\\_______/\\\__\/\\\_____________\/\\\_____________\/\\\__\///\\\__/\\\____\/\\\__\//\\\\\\_ + _\/\\\___\//\\\\\____\///\\\\\/_____\/\\\\\\\\\\\\/___\/\\\\\\\\\\\\\\\_\/\\\_____________\/\\\____\///\\\\\/_____\/\\\___\//\\\\\_ + _\///_____\/////_______\/////_______\////////////_____\///////////////__\///______________\///_______\/////_______\///_____\/////__ \ No newline at end of file diff --git a/node_modules/nodemon/index.d.ts b/node_modules/nodemon/index.d.ts new file mode 100644 index 0000000..d56c053 --- /dev/null +++ b/node_modules/nodemon/index.d.ts @@ -0,0 +1,141 @@ +export type NodemonEventHandler = + | 'start' + | 'crash' + | 'exit' + | 'quit' + | 'restart' + | 'config:update' + | 'log' + | 'readable' + | 'stdout' + | 'stderr'; + +export type NodemonEventListener = { + on(event: 'start' | 'crash' | 'readable', listener: () => void): Nodemon; + on(event: 'log', listener: (e: NodemonEventLog) => void): Nodemon; + on(event: 'stdout' | 'stderr', listener: (e: string) => void): Nodemon; + on(event: 'restart', listener: (e?: NodemonEventRestart) => void): Nodemon; + on(event: 'quit', listener: (e?: NodemonEventQuit) => void): Nodemon; + on(event: 'exit', listener: (e?: NodemonEventExit) => void): Nodemon; + on( + event: 'config:update', + listener: (e?: NodemonEventConfig) => void + ): Nodemon; +}; + +export type Nodemon = { + (options?: NodemonSettings): Nodemon; + on(event: 'start' | 'crash', listener: () => void): Nodemon; + on(event: 'log', listener: (e: NodemonEventLog) => void): Nodemon; + on(event: 'restart', listener: (e?: NodemonEventRestart) => void): Nodemon; + on(event: 'quit', listener: (e?: NodemonEventQuit) => void): Nodemon; + on(event: 'exit', listener: (e?: NodemonEventExit) => void): Nodemon; + on( + event: 'config:update', + listener: (e?: NodemonEventConfig) => void + ): Nodemon; + + // this is repeated because VS Code doesn't autocomplete otherwise + addEventListener(event: 'start' | 'crash', listener: () => void): Nodemon; + addEventListener( + event: 'log', + listener: (e: NodemonEventLog) => void + ): Nodemon; + addEventListener( + event: 'restart', + listener: (e?: NodemonEventRestart) => void + ): Nodemon; + addEventListener( + event: 'quit', + listener: (e?: NodemonEventQuit) => void + ): Nodemon; + addEventListener( + event: 'exit', + listener: (e?: NodemonEventExit) => void + ): Nodemon; + addEventListener( + event: 'config:update', + listener: (e?: NodemonEventConfig) => void + ): Nodemon; + + once(event: 'start' | 'crash', listener: () => void): Nodemon; + once(event: 'log', listener: (e: NodemonEventLog) => void): Nodemon; + once(event: 'restart', listener: (e?: NodemonEventRestart) => void): Nodemon; + once(event: 'quit', listener: (e?: NodemonEventQuit) => void): Nodemon; + once(event: 'exit', listener: (e?: NodemonEventExit) => void): Nodemon; + once( + event: 'config:update', + listener: (e?: NodemonEventConfig) => void + ): Nodemon; + + removeAllListeners(event: NodemonEventHandler): Nodemon; + emit(type: NodemonEventHandler, event?: any): Nodemon; + reset(callback: Function): Nodemon; + restart(): Nodemon; + config: NodemonSettings; +}; + +export type NodemonEventLog = { + /** + detail*: what you get with nodemon --verbose. + status: subprocess starting, restarting. + fail: is the subprocess crashing. + error: is a nodemon system error. + */ + type: 'detail' | 'log' | 'status' | 'error' | 'fail'; + /** the plain text message */ + message: String; + /** contains the terminal escape codes to add colour, plus the "[nodemon]" prefix */ + colour: String; +}; + +export interface NodemonEventRestart { + matched?: { + result: string[]; + total: number; + }; +} + +export type NodemonEventQuit = 143 | 130; +export type NodemonEventExit = number; + +// TODO: Define the type of NodemonEventConfig +export type NodemonEventConfig = any; + +export interface NodemonConfig { + /* restartable defaults to "rs" as a string the user enters */ + restartable?: false | String; + colours?: Boolean; + execMap?: { [key: string]: string }; + ignoreRoot?: string[]; + watch?: string[]; + stdin?: boolean; + runOnChangeOnly?: boolean; + verbose?: boolean; + signal?: string; + stdout?: boolean; + watchOptions?: WatchOptions; +} + +export interface NodemonSettings extends NodemonConfig { + script: string; + ext?: string; // "js,mjs" etc (should really support an array of strings, but I don't think it does right now) + events?: { [key: string]: string }; + env?: { [key: string]: string }; + exec?: string; // node, python, etc + execArgs?: string[]; // args passed to node, etc, + nodeArgs?: string[]; // args passed to node, etc, + delay?: number; +} + +export interface WatchOptions { + ignorePermissionErrors: boolean; + ignored: string; + persistent: boolean; + usePolling: boolean; + interval: number; +} + +const nodemon: Nodemon = (settings: NodemonSettings): Nodemon => {}; + +export default nodemon; diff --git a/node_modules/nodemon/jsconfig.json b/node_modules/nodemon/jsconfig.json new file mode 100644 index 0000000..d77141c --- /dev/null +++ b/node_modules/nodemon/jsconfig.json @@ -0,0 +1,7 @@ +{ + "compilerOptions": { + "typeRoots": ["./index.d.ts", "./node_modules/@types"], + "checkJs": true + }, + "exclude": ["node_modules"] +} diff --git a/node_modules/nodemon/lib/cli/index.js b/node_modules/nodemon/lib/cli/index.js new file mode 100644 index 0000000..bf9e809 --- /dev/null +++ b/node_modules/nodemon/lib/cli/index.js @@ -0,0 +1,49 @@ +var parse = require('./parse'); + +/** + * Converts a string to command line args, in particular + * groups together quoted values. + * This is a utility function to allow calling nodemon as a required + * library, but with the CLI args passed in (instead of an object). + * + * @param {String} string + * @return {Array} + */ +function stringToArgs(string) { + var args = []; + + var parts = string.split(' '); + var length = parts.length; + var i = 0; + var open = false; + var grouped = ''; + var lead = ''; + + for (; i < length; i++) { + lead = parts[i].substring(0, 1); + if (lead === '"' || lead === '\'') { + open = lead; + grouped = parts[i].substring(1); + } else if (open && parts[i].slice(-1) === open) { + open = false; + grouped += ' ' + parts[i].slice(0, -1); + args.push(grouped); + } else if (open) { + grouped += ' ' + parts[i]; + } else { + args.push(parts[i]); + } + } + + return args; +} + +module.exports = { + parse: function (argv) { + if (typeof argv === 'string') { + argv = stringToArgs(argv); + } + + return parse(argv); + }, +}; \ No newline at end of file diff --git a/node_modules/nodemon/lib/cli/parse.js b/node_modules/nodemon/lib/cli/parse.js new file mode 100644 index 0000000..ad74003 --- /dev/null +++ b/node_modules/nodemon/lib/cli/parse.js @@ -0,0 +1,230 @@ +/* + +nodemon is a utility for node, and replaces the use of the executable +node. So the user calls `nodemon foo.js` instead. + +nodemon can be run in a number of ways: + +`nodemon` - tries to use package.json#main property to run +`nodemon` - if no package, looks for index.js +`nodemon app.js` - runs app.js +`nodemon --arg app.js --apparg` - eats arg1, and runs app.js with apparg +`nodemon --apparg` - as above, but passes apparg to package.json#main (or + index.js) +`nodemon --debug app.js + +*/ + +var fs = require('fs'); +var path = require('path'); +var existsSync = fs.existsSync || path.existsSync; + +module.exports = parse; + +/** + * Parses the command line arguments `process.argv` and returns the + * nodemon options, the user script and the executable script. + * + * @param {Array} full process arguments, including `node` leading arg + * @return {Object} { options, script, args } + */ +function parse(argv) { + if (typeof argv === 'string') { + argv = argv.split(' '); + } + + var eat = function (i, args) { + if (i <= args.length) { + return args.splice(i + 1, 1).pop(); + } + }; + + var args = argv.slice(2); + var script = null; + var nodemonOptions = { scriptPosition: null }; + + var nodemonOpt = nodemonOption.bind(null, nodemonOptions); + var lookForArgs = true; + + // move forward through the arguments + for (var i = 0; i < args.length; i++) { + // if the argument looks like a file, then stop eating + if (!script) { + if (args[i] === '.' || existsSync(args[i])) { + script = args.splice(i, 1).pop(); + + // we capture the position of the script because we'll reinsert it in + // the right place in run.js:command (though I'm not sure we should even + // take it out of the array in the first place, but this solves passing + // arguments to the exec process for now). + nodemonOptions.scriptPosition = i; + i--; + continue; + } + } + + if (lookForArgs) { + // respect the standard way of saying: hereafter belongs to my script + if (args[i] === '--') { + args.splice(i, 1); + nodemonOptions.scriptPosition = i; + // cycle back one argument, as we just ate this one up + i--; + + // ignore all further nodemon arguments + lookForArgs = false; + + // move to the next iteration + continue; + } + + if (nodemonOpt(args[i], eat.bind(null, i, args)) !== false) { + args.splice(i, 1); + // cycle back one argument, as we just ate this one up + i--; + } + } + } + + nodemonOptions.script = script; + nodemonOptions.args = args; + + return nodemonOptions; +} + + +/** + * Given an argument (ie. from process.argv), sets nodemon + * options and can eat up the argument value + * + * @param {Object} options object that will be updated + * @param {Sting} current argument from argv + * @param {Function} the callback to eat up the next argument in argv + * @return {Boolean} false if argument was not a nodemon arg + */ +function nodemonOption(options, arg, eatNext) { + // line separation on purpose to help legibility + if (arg === '--help' || arg === '-h' || arg === '-?') { + var help = eatNext(); + options.help = help ? help : true; + } else + + if (arg === '--version' || arg === '-v') { + options.version = true; + } else + + if (arg === '--no-update-notifier') { + options.noUpdateNotifier = true; + } else + + if (arg === '--spawn') { + options.spawn = true; + } else + + if (arg === '--dump') { + options.dump = true; + } else + + if (arg === '--verbose' || arg === '-V') { + options.verbose = true; + } else + + if (arg === '--legacy-watch' || arg === '-L') { + options.legacyWatch = true; + } else + + if (arg === '--polling-interval' || arg === '-P') { + options.pollingInterval = parseInt(eatNext(), 10); + } else + + // Depricated as this is "on" by default + if (arg === '--js') { + options.js = true; + } else + + if (arg === '--quiet' || arg === '-q') { + options.quiet = true; + } else + + if (arg === '--config') { + options.configFile = eatNext(); + } else + + if (arg === '--watch' || arg === '-w') { + if (!options.watch) { options.watch = []; } + options.watch.push(eatNext()); + } else + + if (arg === '--ignore' || arg === '-i') { + if (!options.ignore) { options.ignore = []; } + options.ignore.push(eatNext()); + } else + + if (arg === '--exitcrash') { + options.exitcrash = true; + } else + + if (arg === '--delay' || arg === '-d') { + options.delay = parseDelay(eatNext()); + } else + + if (arg === '--exec' || arg === '-x') { + options.exec = eatNext(); + } else + + if (arg === '--no-stdin' || arg === '-I') { + options.stdin = false; + } else + + if (arg === '--on-change-only' || arg === '-C') { + options.runOnChangeOnly = true; + } else + + if (arg === '--ext' || arg === '-e') { + options.ext = eatNext(); + } else + + if (arg === '--no-colours' || arg === '--no-colors') { + options.colours = false; + } else + + if (arg === '--signal' || arg === '-s') { + options.signal = eatNext(); + } else + + if (arg === '--cwd') { + options.cwd = eatNext(); + + // go ahead and change directory. This is primarily for nodemon tools like + // grunt-nodemon - we're doing this early because it will affect where the + // user script is searched for. + process.chdir(path.resolve(options.cwd)); + } else { + + // this means we didn't match + return false; + } +} + +/** + * Given an argument (ie. from nodemonOption()), will parse and return the + * equivalent millisecond value or 0 if the argument cannot be parsed + * + * @param {String} argument value given to the --delay option + * @return {Number} millisecond equivalent of the argument + */ +function parseDelay(value) { + var millisPerSecond = 1000; + var millis = 0; + + if (value.match(/^\d*ms$/)) { + // Explicitly parse for milliseconds when using ms time specifier + millis = parseInt(value, 10); + } else { + // Otherwise, parse for seconds, with or without time specifier then convert + millis = parseFloat(value) * millisPerSecond; + } + + return isNaN(millis) ? 0 : millis; +} + diff --git a/node_modules/nodemon/lib/config/command.js b/node_modules/nodemon/lib/config/command.js new file mode 100644 index 0000000..9839b5c --- /dev/null +++ b/node_modules/nodemon/lib/config/command.js @@ -0,0 +1,43 @@ +module.exports = command; + +/** + * command constructs the executable command to run in a shell including the + * user script, the command arguments. + * + * @param {Object} settings Object as: + * { execOptions: { + * exec: String, + * [script: String], + * [scriptPosition: Number], + * [execArgs: Array] + * } + * } + * @return {Object} an object with the node executable and the + * arguments to the command + */ +function command(settings) { + var options = settings.execOptions; + var executable = options.exec; + var args = []; + + // after "executable" go the exec args (like --debug, etc) + if (options.execArgs) { + [].push.apply(args, options.execArgs); + } + + // then goes the user's script arguments + if (options.args) { + [].push.apply(args, options.args); + } + + // after the "executable" goes the user's script + if (options.script) { + args.splice((options.scriptPosition || 0) + + options.execArgs.length, 0, options.script); + } + + return { + executable: executable, + args: args, + }; +} diff --git a/node_modules/nodemon/lib/config/defaults.js b/node_modules/nodemon/lib/config/defaults.js new file mode 100644 index 0000000..dc95d34 --- /dev/null +++ b/node_modules/nodemon/lib/config/defaults.js @@ -0,0 +1,34 @@ +var ignoreRoot = require('ignore-by-default').directories(); + +// default options for config.options +const defaults = { + restartable: 'rs', + colours: true, + execMap: { + py: 'python', + rb: 'ruby', + ts: 'ts-node', + // more can be added here such as ls: lsc - but please ensure it's cross + // compatible with linux, mac and windows, or make the default.js + // dynamically append the `.cmd` for node based utilities + }, + ignoreRoot: ignoreRoot.map((_) => `**/${_}/**`), + watch: ['*.*'], + stdin: true, + runOnChangeOnly: false, + verbose: false, + signal: 'SIGUSR2', + // 'stdout' refers to the default behaviour of a required nodemon's child, + // but also includes stderr. If this is false, data is still dispatched via + // nodemon.on('stdout/stderr') + stdout: true, + watchOptions: {}, +}; + +const nodeOptions = process.env.NODE_OPTIONS || ''; // ? + +if (/--(loader|import)\b/.test(nodeOptions)) { + delete defaults.execMap.ts; +} + +module.exports = defaults; diff --git a/node_modules/nodemon/lib/config/exec.js b/node_modules/nodemon/lib/config/exec.js new file mode 100644 index 0000000..6d17eab --- /dev/null +++ b/node_modules/nodemon/lib/config/exec.js @@ -0,0 +1,234 @@ +const path = require('path'); +const fs = require('fs'); +const existsSync = fs.existsSync; +const utils = require('../utils'); + +module.exports = exec; +module.exports.expandScript = expandScript; + +/** + * Reads the cwd/package.json file and looks to see if it can load a script + * and possibly an exec first from package.main, then package.start. + * + * @return {Object} exec & script if found + */ +function execFromPackage() { + // doing a try/catch because we can't use the path.exist callback pattern + // or we could, but the code would get messy, so this will do exactly + // what we're after - if the file doesn't exist, it'll throw. + try { + // note: this isn't nodemon's package, it's the user's cwd package + var pkg = require(path.join(process.cwd(), 'package.json')); + if (pkg.main !== undefined) { + // no app found to run - so give them a tip and get the feck out + return { exec: null, script: pkg.main }; + } + + if (pkg.scripts && pkg.scripts.start) { + return { exec: pkg.scripts.start }; + } + } catch (e) {} + + return null; +} + +function replace(map, str) { + var re = new RegExp('{{(' + Object.keys(map).join('|') + ')}}', 'g'); + return str.replace(re, function (all, m) { + return map[m] || all || ''; + }); +} + +function expandScript(script, ext) { + if (!ext) { + ext = '.js'; + } + if (script.indexOf(ext) !== -1) { + return script; + } + + if (existsSync(path.resolve(script))) { + return script; + } + + if (existsSync(path.resolve(script + ext))) { + return script + ext; + } + + return script; +} + +/** + * Discovers all the options required to run the script + * and if a custom exec has been passed in, then it will + * also try to work out what extensions to monitor and + * whether there's a special way of running that script. + * + * @param {Object} nodemonOptions + * @param {Object} execMap + * @return {Object} new and updated version of nodemonOptions + */ +function exec(nodemonOptions, execMap) { + if (!execMap) { + execMap = {}; + } + + var options = utils.clone(nodemonOptions || {}); + var script; + + // if there's no script passed, try to get it from the first argument + if (!options.script && (options.args || []).length) { + script = expandScript( + options.args[0], + options.ext && '.' + (options.ext || 'js').split(',')[0] + ); + + // if the script was found, shift it off our args + if (script !== options.args[0]) { + options.script = script; + options.args.shift(); + } + } + + // if there's no exec found yet, then try to read it from the local + // package.json this logic used to sit in the cli/parse, but actually the cli + // should be parsed first, then the user options (via nodemon.json) then + // finally default down to pot shots at the directory via package.json + if (!options.exec && !options.script) { + var found = execFromPackage(); + if (found !== null) { + if (found.exec) { + options.exec = found.exec; + } + if (!options.script) { + options.script = found.script; + } + if (Array.isArray(options.args) && options.scriptPosition === null) { + options.scriptPosition = options.args.length; + } + } + } + + // var options = utils.clone(nodemonOptions || {}); + script = path.basename(options.script || ''); + + var scriptExt = path.extname(script).slice(1); + + var extension = options.ext; + if (extension === undefined) { + var isJS = scriptExt === 'js' || scriptExt === 'mjs' || scriptExt === 'cjs'; + extension = isJS || !scriptExt ? 'js,mjs,cjs' : scriptExt; + extension += ',json'; // Always watch JSON files + } + + var execDefined = !!options.exec; + + // allows the user to simplify cli usage: + // https://github.com/remy/nodemon/issues/195 + // but always give preference to the user defined argument + if (!options.exec && execMap[scriptExt] !== undefined) { + options.exec = execMap[scriptExt]; + execDefined = true; + } + + options.execArgs = nodemonOptions.execArgs || []; + + if (Array.isArray(options.exec)) { + options.execArgs = options.exec; + options.exec = options.execArgs.shift(); + } + + if (options.exec === undefined) { + options.exec = 'node'; + } else { + // allow variable substitution for {{filename}} and {{pwd}} + var substitution = replace.bind(null, { + filename: options.script, + pwd: process.cwd(), + }); + + var newExec = substitution(options.exec); + if ( + newExec !== options.exec && + options.exec.indexOf('{{filename}}') !== -1 + ) { + options.script = null; + } + options.exec = newExec; + + var newExecArgs = options.execArgs.map(substitution); + if (newExecArgs.join('') !== options.execArgs.join('')) { + options.execArgs = newExecArgs; + delete options.script; + } + } + + if (options.exec === 'node' && options.nodeArgs && options.nodeArgs.length) { + options.execArgs = options.execArgs.concat(options.nodeArgs); + } + + // note: indexOf('coffee') handles both .coffee and .litcoffee + if ( + !execDefined && + options.exec === 'node' && + scriptExt.indexOf('coffee') !== -1 + ) { + options.exec = 'coffee'; + + // we need to get execArgs set before the script + // for example, in `nodemon --debug my-script.coffee --my-flag`, debug is an + // execArg, while my-flag is a script arg + var leadingArgs = (options.args || []).splice(0, options.scriptPosition); + options.execArgs = options.execArgs.concat(leadingArgs); + options.scriptPosition = 0; + + if (options.execArgs.length > 0) { + // because this is the coffee executable, we need to combine the exec args + // into a single argument after the nodejs flag + options.execArgs = ['--nodejs', options.execArgs.join(' ')]; + } + } + + if (options.exec === 'coffee') { + // don't override user specified extension tracking + if (options.ext === undefined) { + if (extension) { + extension += ','; + } + extension += 'coffee,litcoffee'; + } + + // because windows can't find 'coffee', it needs the real file 'coffee.cmd' + if (utils.isWindows) { + options.exec += '.cmd'; + } + } + + // allow users to make a mistake on the extension to monitor + // converts .js, pug => js,pug + // BIG NOTE: user can't do this: nodemon -e *.js + // because the terminal will automatically expand the glob against + // the file system :( + extension = (extension.match(/[^,*\s]+/g) || []) + .map((ext) => ext.replace(/^\./, '')) + .join(','); + + options.ext = extension; + + if (options.script) { + options.script = expandScript( + options.script, + extension && '.' + extension.split(',')[0] + ); + } + + options.env = {}; + // make sure it's an object (and since we don't have ) + if ({}.toString.apply(nodemonOptions.env) === '[object Object]') { + options.env = utils.clone(nodemonOptions.env); + } else if (nodemonOptions.env !== undefined) { + throw new Error('nodemon env values must be an object: { PORT: 8000 }'); + } + + return options; +} diff --git a/node_modules/nodemon/lib/config/index.js b/node_modules/nodemon/lib/config/index.js new file mode 100644 index 0000000..c78c435 --- /dev/null +++ b/node_modules/nodemon/lib/config/index.js @@ -0,0 +1,93 @@ +/** + * Manages the internal config of nodemon, checking for the state of support + * with fs.watch, how nodemon can watch files (using find or fs methods). + * + * This is *not* the user's config. + */ +var debug = require('debug')('nodemon'); +var load = require('./load'); +var rules = require('../rules'); +var utils = require('../utils'); +var pinVersion = require('../version').pin; +var command = require('./command'); +var rulesToMonitor = require('../monitor/match').rulesToMonitor; +var bus = utils.bus; + +function reset() { + rules.reset(); + + config.dirs = []; + config.options = { ignore: [], watch: [], monitor: [] }; + config.lastStarted = 0; + config.loaded = []; +} + +var config = { + run: false, + system: { + cwd: process.cwd(), + }, + required: false, + dirs: [], + timeout: 1000, + options: {}, +}; + +/** + * Take user defined settings, then detect the local machine capability, then + * look for local and global nodemon.json files and merge together the final + * settings with the config for nodemon. + * + * @param {Object} settings user defined settings for nodemon (typically on + * the cli) + * @param {Function} ready callback fired once the config is loaded + */ +config.load = function (settings, ready) { + reset(); + var config = this; + load(settings, config.options, config, function (options) { + config.options = options; + + if (options.watch.length === 0) { + // this is to catch when the watch is left blank + options.watch.push('*.*'); + } + + if (options['watch_interval']) { // jshint ignore:line + options.watchInterval = options['watch_interval']; // jshint ignore:line + } + + config.watchInterval = options.watchInterval || null; + if (options.signal) { + config.signal = options.signal; + } + + var cmd = command(config.options); + config.command = { + raw: cmd, + string: utils.stringify(cmd.executable, cmd.args), + }; + + // now run automatic checks on system adding to the config object + options.monitor = rulesToMonitor(options.watch, options.ignore, config); + + var cwd = process.cwd(); + debug('config: dirs', config.dirs); + if (config.dirs.length === 0) { + config.dirs.unshift(cwd); + } + + bus.emit('config:update', config); + pinVersion().then(function () { + ready(config); + }).catch(e => { + // this doesn't help testing, but does give exposure on syntax errors + console.error(e.stack); + setTimeout(() => { throw e; }, 0); + }); + }); +}; + +config.reset = reset; + +module.exports = config; diff --git a/node_modules/nodemon/lib/config/load.js b/node_modules/nodemon/lib/config/load.js new file mode 100644 index 0000000..75d8443 --- /dev/null +++ b/node_modules/nodemon/lib/config/load.js @@ -0,0 +1,223 @@ +var debug = require('debug')('nodemon'); +var fs = require('fs'); +var path = require('path'); +var exists = fs.exists || path.exists; +var utils = require('../utils'); +var rules = require('../rules'); +var parse = require('../rules/parse'); +var exec = require('./exec'); +var defaults = require('./defaults'); + +module.exports = load; +module.exports.mutateExecOptions = mutateExecOptions; + +var existsSync = fs.existsSync || path.existsSync; + +function findAppScript() { + // nodemon has been run alone, so try to read the package file + // or try to read the index.js file + + var pkg = + existsSync(path.join(process.cwd(), 'package.json')) && + require(path.join(process.cwd(), 'package.json')); + if ((!pkg || pkg.main == undefined) && existsSync('./index.js')) { + return 'index.js'; + } +} + +/** + * Load the nodemon config, first reading the global root/nodemon.json, then + * the local nodemon.json to the exec and then overwriting using any user + * specified settings (i.e. from the cli) + * + * @param {Object} settings user defined settings + * @param {Function} ready callback that receives complete config + */ +function load(settings, options, config, callback) { + config.loaded = []; + // first load the root nodemon.json + loadFile(options, config, utils.home, function (options) { + // then load the user's local configuration file + if (settings.configFile) { + options.configFile = path.resolve(settings.configFile); + } + loadFile(options, config, process.cwd(), function (options) { + // Then merge over with the user settings (parsed from the cli). + // Note that merge protects and favours existing values over new values, + // and thus command line arguments get priority + options = utils.merge(settings, options); + + // legacy support + if (!Array.isArray(options.ignore)) { + options.ignore = [options.ignore]; + } + + if (!options.ignoreRoot) { + options.ignoreRoot = defaults.ignoreRoot; + } + + // blend the user ignore and the default ignore together + if (options.ignoreRoot && options.ignore) { + if (!Array.isArray(options.ignoreRoot)) { + options.ignoreRoot = [options.ignoreRoot]; + } + options.ignore = options.ignoreRoot.concat(options.ignore); + } else { + options.ignore = defaults.ignore.concat(options.ignore); + } + + // add in any missing defaults + options = utils.merge(options, defaults); + + if (!options.script && !options.exec) { + var found = findAppScript(); + if (found) { + if (!options.args) { + options.args = []; + } + // if the script is found as a result of not being on the command + // line, then we move any of the pre double-dash args in execArgs + const n = + options.scriptPosition === null + ? options.args.length + : options.scriptPosition; + + options.execArgs = (options.execArgs || []).concat( + options.args.splice(0, n) + ); + options.scriptPosition = null; + + options.script = found; + } + } + + mutateExecOptions(options); + + if (options.quiet) { + utils.quiet(); + } + + if (options.verbose) { + utils.debug = true; + } + + // simplify the ready callback to be called after the rules are normalised + // from strings to regexp through the rules lib. Note that this gets + // created *after* options is overwritten twice in the lines above. + var ready = function (options) { + normaliseRules(options, callback); + }; + + ready(options); + }); + }); +} + +function normaliseRules(options, ready) { + // convert ignore and watch options to rules/regexp + rules.watch.add(options.watch); + rules.ignore.add(options.ignore); + + // normalise the watch and ignore arrays + options.watch = options.watch === false ? false : rules.rules.watch; + options.ignore = rules.rules.ignore; + + ready(options); +} + +/** + * Looks for a config in the current working directory, and a config in the + * user's home directory, merging the two together, giving priority to local + * config. This can then be overwritten later by command line arguments + * + * @param {Function} ready callback to pass loaded settings to + */ +function loadFile(options, config, dir, ready) { + if (!ready) { + ready = function () {}; + } + + var callback = function (settings) { + // prefer the local nodemon.json and fill in missing items using + // the global options + ready(utils.merge(settings, options)); + }; + + if (!dir) { + return callback({}); + } + + var filename = options.configFile || path.join(dir, 'nodemon.json'); + + if (config.loaded.indexOf(filename) !== -1) { + // don't bother re-parsing the same config file + return callback({}); + } + + fs.readFile(filename, 'utf8', function (err, data) { + if (err) { + if (err.code === 'ENOENT') { + if (!options.configFile && dir !== utils.home) { + // if no specified local config file and local nodemon.json + // doesn't exist, try the package.json + return loadPackageJSON(config, callback); + } + } + return callback({}); + } + + var settings = {}; + + try { + settings = JSON.parse(data.toString('utf8').replace(/^\uFEFF/, '')); + if (!filename.endsWith('package.json') || settings.nodemonConfig) { + config.loaded.push(filename); + } + } catch (e) { + utils.log.fail('Failed to parse config ' + filename); + console.error(e); + process.exit(1); + } + + // options values will overwrite settings + callback(settings); + }); +} + +function loadPackageJSON(config, ready) { + if (!ready) { + ready = () => {}; + } + + const dir = process.cwd(); + const filename = path.join(dir, 'package.json'); + const packageLoadOptions = { configFile: filename }; + return loadFile(packageLoadOptions, config, dir, (settings) => { + ready(settings.nodemonConfig || {}); + }); +} + +function mutateExecOptions(options) { + // work out the execOptions based on the final config we have + options.execOptions = exec( + { + script: options.script, + exec: options.exec, + args: options.args, + scriptPosition: options.scriptPosition, + nodeArgs: options.nodeArgs, + execArgs: options.execArgs, + ext: options.ext, + env: options.env, + }, + options.execMap + ); + + // clean up values that we don't need at the top level + delete options.scriptPosition; + delete options.script; + delete options.args; + delete options.ext; + + return options; +} diff --git a/node_modules/nodemon/lib/help/index.js b/node_modules/nodemon/lib/help/index.js new file mode 100644 index 0000000..1054b60 --- /dev/null +++ b/node_modules/nodemon/lib/help/index.js @@ -0,0 +1,27 @@ +var fs = require('fs'); +var path = require('path'); +const supportsColor = require('supports-color'); + +module.exports = help; + +const highlight = supportsColor.stdout ? '\x1B\[$1m' : ''; + +function help(item) { + if (!item) { + item = 'help'; + } else if (item === true) { // if used with -h or --help and no args + item = 'help'; + } + + // cleanse the filename to only contain letters + // aka: /\W/g but figured this was eaiser to read + item = item.replace(/[^a-z]/gi, ''); + + try { + var dir = path.join(__dirname, '..', '..', 'doc', 'cli', item + '.txt'); + var body = fs.readFileSync(dir, 'utf8'); + return body.replace(/\\x1B\[(.)m/g, highlight); + } catch (e) { + return '"' + item + '" help can\'t be found'; + } +} diff --git a/node_modules/nodemon/lib/index.js b/node_modules/nodemon/lib/index.js new file mode 100644 index 0000000..0eca5c4 --- /dev/null +++ b/node_modules/nodemon/lib/index.js @@ -0,0 +1 @@ +module.exports = require('./nodemon'); \ No newline at end of file diff --git a/node_modules/nodemon/lib/monitor/index.js b/node_modules/nodemon/lib/monitor/index.js new file mode 100644 index 0000000..89db029 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/index.js @@ -0,0 +1,4 @@ +module.exports = { + run: require('./run'), + watch: require('./watch').watch, +}; diff --git a/node_modules/nodemon/lib/monitor/match.js b/node_modules/nodemon/lib/monitor/match.js new file mode 100644 index 0000000..2ac3b29 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/match.js @@ -0,0 +1,276 @@ +const minimatch = require('minimatch'); +const path = require('path'); +const fs = require('fs'); +const debug = require('debug')('nodemon:match'); +const utils = require('../utils'); + +module.exports = match; +module.exports.rulesToMonitor = rulesToMonitor; + +function rulesToMonitor(watch, ignore, config) { + var monitor = []; + + if (!Array.isArray(ignore)) { + if (ignore) { + ignore = [ignore]; + } else { + ignore = []; + } + } + + if (!Array.isArray(watch)) { + if (watch) { + watch = [watch]; + } else { + watch = []; + } + } + + if (watch && watch.length) { + monitor = utils.clone(watch); + } + + if (ignore) { + [].push.apply(monitor, (ignore || []).map(function (rule) { + return '!' + rule; + })); + } + + var cwd = process.cwd(); + + // next check if the monitored paths are actual directories + // or just patterns - and expand the rule to include *.* + monitor = monitor.map(function (rule) { + var not = rule.slice(0, 1) === '!'; + + if (not) { + rule = rule.slice(1); + } + + if (rule === '.' || rule === '.*') { + rule = '*.*'; + } + + var dir = path.resolve(cwd, rule); + + try { + var stat = fs.statSync(dir); + if (stat.isDirectory()) { + rule = dir; + if (rule.slice(-1) !== '/') { + rule += '/'; + } + rule += '**/*'; + + // `!not` ... sorry. + if (!not) { + config.dirs.push(dir); + } + } else { + // ensures we end up in the check that tries to get a base directory + // and then adds it to the watch list + throw new Error(); + } + } catch (e) { + var base = tryBaseDir(dir); + if (!not && base) { + if (config.dirs.indexOf(base) === -1) { + config.dirs.push(base); + } + } + } + + if (rule.slice(-1) === '/') { + // just slap on a * anyway + rule += '*'; + } + + // if the url ends with * but not **/* and not *.* + // then convert to **/* - somehow it was missed :-\ + if (rule.slice(-4) !== '**/*' && + rule.slice(-1) === '*' && + rule.indexOf('*.') === -1) { + + if (rule.slice(-2) !== '**') { + rule += '*/*'; + } + } + + + return (not ? '!' : '') + rule; + }); + + return monitor; +} + +function tryBaseDir(dir) { + var stat; + if (/[?*\{\[]+/.test(dir)) { // if this is pattern, then try to find the base + try { + var base = path.dirname(dir.replace(/([?*\{\[]+.*$)/, 'foo')); + stat = fs.statSync(base); + if (stat.isDirectory()) { + return base; + } + } catch (error) { + // console.log(error); + } + } else { + try { + stat = fs.statSync(dir); + // if this path is actually a single file that exists, then just monitor + // that, *specifically*. + if (stat.isFile() || stat.isDirectory()) { + return dir; + } + } catch (e) { } + } + + return false; +} + +function match(files, monitor, ext) { + // sort the rules by highest specificity (based on number of slashes) + // ignore rules (!) get sorted highest as they take precedent + const cwd = process.cwd(); + var rules = monitor.sort(function (a, b) { + var r = b.split(path.sep).length - a.split(path.sep).length; + var aIsIgnore = a.slice(0, 1) === '!'; + var bIsIgnore = b.slice(0, 1) === '!'; + + if (aIsIgnore || bIsIgnore) { + if (aIsIgnore) { + return -1; + } + + return 1; + } + + if (r === 0) { + return b.length - a.length; + } + return r; + }).map(function (s) { + var prefix = s.slice(0, 1); + + if (prefix === '!') { + if (s.indexOf('!' + cwd) === 0) { + return s; + } + + // if it starts with a period, then let's get the relative path + if (s.indexOf('!.') === 0) { + return '!' + path.resolve(cwd, s.substring(1)); + } + + return '!**' + (prefix !== path.sep ? path.sep : '') + s.slice(1); + } + + // if it starts with a period, then let's get the relative path + if (s.indexOf('.') === 0) { + return path.resolve(cwd, s); + } + + if (s.indexOf(cwd) === 0) { + return s; + } + + return '**' + (prefix !== path.sep ? path.sep : '') + s; + }); + + debug('rules', rules); + + var good = []; + var whitelist = []; // files that we won't check against the extension + var ignored = 0; + var watched = 0; + var usedRules = []; + var minimatchOpts = { + dot: true, + }; + + // enable case-insensitivity on Windows + if (utils.isWindows) { + minimatchOpts.nocase = true; + } + + files.forEach(function (file) { + file = path.resolve(cwd, file); + + var matched = false; + for (var i = 0; i < rules.length; i++) { + if (rules[i].slice(0, 1) === '!') { + if (!minimatch(file, rules[i], minimatchOpts)) { + debug('ignored', file, 'rule:', rules[i]); + ignored++; + matched = true; + break; + } + } else { + debug('matched', file, 'rule:', rules[i]); + if (minimatch(file, rules[i], minimatchOpts)) { + watched++; + + // don't repeat the output if a rule is matched + if (usedRules.indexOf(rules[i]) === -1) { + usedRules.push(rules[i]); + utils.log.detail('matched rule: ' + rules[i]); + } + + // if the rule doesn't match the WATCH EVERYTHING + // but *does* match a rule that ends with *.*, then + // white list it - in that we don't run it through + // the extension check too. + if (rules[i] !== '**' + path.sep + '*.*' && + rules[i].slice(-3) === '*.*') { + whitelist.push(file); + } else if (path.basename(file) === path.basename(rules[i])) { + // if the file matches the actual rule, then it's put on whitelist + whitelist.push(file); + } else { + good.push(file); + } + matched = true; + break; + } else { + // utils.log.detail('no match: ' + rules[i], file); + } + } + } + if (!matched) { + ignored++; + } + }); + + debug('good', good) + + // finally check the good files against the extensions that we're monitoring + if (ext) { + if (ext.indexOf(',') === -1) { + ext = '**/*.' + ext; + } else { + ext = '**/*.{' + ext + '}'; + } + + good = good.filter(function (file) { + // only compare the filename to the extension test + return minimatch(path.basename(file), ext, minimatchOpts); + }); + } // else assume *.* + + var result = good.concat(whitelist); + + if (utils.isWindows) { + // fix for windows testing - I *think* this is okay to do + result = result.map(function (file) { + return file.slice(0, 1).toLowerCase() + file.slice(1); + }); + } + + return { + result: result, + ignored: ignored, + watched: watched, + total: files.length, + }; +} diff --git a/node_modules/nodemon/lib/monitor/run.js b/node_modules/nodemon/lib/monitor/run.js new file mode 100644 index 0000000..5244203 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/run.js @@ -0,0 +1,555 @@ +var debug = require('debug')('nodemon:run'); +const statSync = require('fs').statSync; +var utils = require('../utils'); +var bus = utils.bus; +var childProcess = require('child_process'); +var spawn = childProcess.spawn; +var exec = childProcess.exec; +var execSync = childProcess.execSync; +var fork = childProcess.fork; +var watch = require('./watch').watch; +var config = require('../config'); +var child = null; // the actual child process we spawn +var killedAfterChange = false; +var noop = () => {}; +var restart = null; +var psTree = require('pstree.remy'); +var path = require('path'); +var signals = require('./signals'); +const undefsafe = require('undefsafe'); +const osRelease = parseInt(require('os').release().split('.')[0], 10); + +function run(options) { + var cmd = config.command.raw; + // moved up + // we need restart function below in the global scope for run.kill + /*jshint validthis:true*/ + restart = run.bind(this, options); + run.restart = restart; + + // binding options with instance of run + // so that we can use it in run.kill + run.options = options; + + var runCmd = !options.runOnChangeOnly || config.lastStarted !== 0; + if (runCmd) { + utils.log.status('starting `' + config.command.string + '`'); + } else { + // should just watch file if command is not to be run + // had another alternate approach + // to stop process being forked/spawned in the below code + // but this approach does early exit and makes code cleaner + debug('start watch on: %s', config.options.watch); + if (config.options.watch !== false) { + watch(); + return; + } + } + + config.lastStarted = Date.now(); + + var stdio = ['pipe', 'pipe', 'pipe']; + + if (config.options.stdout) { + stdio = ['pipe', process.stdout, process.stderr]; + } + + if (config.options.stdin === false) { + stdio = [process.stdin, process.stdout, process.stderr]; + } + + var sh = 'sh'; + var shFlag = '-c'; + + const binPath = process.cwd() + '/node_modules/.bin'; + + const spawnOptions = { + env: Object.assign({}, options.execOptions.env, process.env, { + PATH: + binPath + + path.delimiter + + (undefsafe(options, '.execOptions.env.PATH') || process.env.PATH), + }), + stdio: stdio, + }; + + var executable = cmd.executable; + + if (utils.isWindows) { + // if the exec includes a forward slash, reverse it for windows compat + // but *only* apply to the first command, and none of the arguments. + // ref #1251 and #1236 + if (executable.indexOf('/') !== -1) { + executable = executable + .split(' ') + .map((e, i) => { + if (i === 0) { + return path.normalize(e); + } + return e; + }) + .join(' '); + } + // taken from npm's cli: https://git.io/vNFD4 + sh = process.env.comspec || 'cmd'; + shFlag = '/d /s /c'; + spawnOptions.windowsVerbatimArguments = true; + spawnOptions.windowsHide = true; + } + + var args = runCmd ? utils.stringify(executable, cmd.args) : ':'; + var spawnArgs = [sh, [shFlag, args], spawnOptions]; + + const firstArg = cmd.args[0] || ''; + + var inBinPath = false; + try { + inBinPath = statSync(`${binPath}/${executable}`).isFile(); + } catch (e) {} + + // hasStdio allows us to correctly handle stdin piping + // see: https://git.io/vNtX3 + const hasStdio = utils.satisfies('>= 6.4.0 || < 5'); + + // forking helps with sub-process handling and tends to clean up better + // than spawning, but it should only be used under specific conditions + const shouldFork = + !config.options.spawn && + !inBinPath && + !(firstArg.indexOf('-') === 0) && // don't fork if there's a node exec arg + firstArg !== 'inspect' && // don't fork it's `inspect` debugger + executable === 'node' && // only fork if node + utils.version.major > 4; // only fork if node version > 4 + + if (shouldFork) { + // this assumes the first argument is the script and slices it out, since + // we're forking + var forkArgs = cmd.args.slice(1); + var env = utils.merge(options.execOptions.env, process.env); + stdio.push('ipc'); + const forkOptions = { + env: env, + stdio: stdio, + silent: !hasStdio, + }; + if (utils.isWindows) { + forkOptions.windowsHide = true; + } + child = fork(options.execOptions.script, forkArgs, forkOptions); + utils.log.detail('forking'); + debug('fork', sh, shFlag, args); + } else { + utils.log.detail('spawning'); + child = spawn.apply(null, spawnArgs); + debug('spawn', sh, shFlag, args); + } + + if (config.required) { + var emit = { + stdout: function (data) { + bus.emit('stdout', data); + }, + stderr: function (data) { + bus.emit('stderr', data); + }, + }; + + // now work out what to bind to... + if (config.options.stdout) { + child.on('stdout', emit.stdout).on('stderr', emit.stderr); + } else { + child.stdout.on('data', emit.stdout); + child.stderr.on('data', emit.stderr); + + bus.stdout = child.stdout; + bus.stderr = child.stderr; + } + + if (shouldFork) { + child.on('message', function (message, sendHandle) { + bus.emit('message', message, sendHandle); + }); + } + } + + bus.emit('start'); + + utils.log.detail('child pid: ' + child.pid); + + child.on('error', function (error) { + bus.emit('error', error); + if (error.code === 'ENOENT') { + utils.log.error('unable to run executable: "' + cmd.executable + '"'); + process.exit(1); + } else { + utils.log.error('failed to start child process: ' + error.code); + throw error; + } + }); + + child.on('exit', function (code, signal) { + if (child && child.stdin) { + process.stdin.unpipe(child.stdin); + } + + if (code === 127) { + utils.log.error( + 'failed to start process, "' + cmd.executable + '" exec not found' + ); + bus.emit('error', code); + process.exit(); + } + + // If the command failed with code 2, it may or may not be a syntax error + // See: http://git.io/fNOAR + // We will only assume a parse error, if the child failed quickly + if (code === 2 && Date.now() < config.lastStarted + 500) { + utils.log.error('process failed, unhandled exit code (2)'); + utils.log.error(''); + utils.log.error('Either the command has a syntax error,'); + utils.log.error('or it is exiting with reserved code 2.'); + utils.log.error(''); + utils.log.error('To keep nodemon running even after a code 2,'); + utils.log.error('add this to the end of your command: || exit 1'); + utils.log.error(''); + utils.log.error('Read more here: https://git.io/fNOAG'); + utils.log.error(''); + utils.log.error('nodemon will stop now so that you can fix the command.'); + utils.log.error(''); + bus.emit('error', code); + process.exit(); + } + + // In case we killed the app ourselves, set the signal thusly + if (killedAfterChange) { + killedAfterChange = false; + signal = config.signal; + } + // this is nasty, but it gives it windows support + if (utils.isWindows && signal === 'SIGTERM') { + signal = config.signal; + } + + if (signal === config.signal || code === 0) { + // this was a clean exit, so emit exit, rather than crash + debug('bus.emit(exit) via ' + config.signal); + bus.emit('exit', signal); + + // exit the monitor, but do it gracefully + if (signal === config.signal) { + return restart(); + } + + if (code === 0) { + // clean exit - wait until file change to restart + if (runCmd) { + utils.log.status('clean exit - waiting for changes before restart'); + } + child = null; + } + } else { + bus.emit('crash'); + if (options.exitcrash) { + utils.log.fail('app crashed'); + if (!config.required) { + process.exit(1); + } + } else { + utils.log.fail( + 'app crashed - waiting for file changes before' + ' starting...' + ); + child = null; + } + } + + if (config.options.restartable) { + // stdin needs to kick in again to be able to listen to the + // restart command + process.stdin.resume(); + } + }); + + // moved the run.kill outside to handle both the cases + // intial start + // no start + + // connect stdin to the child process (options.stdin is on by default) + if (options.stdin) { + process.stdin.resume(); + // FIXME decide whether or not we need to decide the encoding + // process.stdin.setEncoding('utf8'); + + // swallow the stdin error if it happens + // ref: https://github.com/remy/nodemon/issues/1195 + if (hasStdio) { + child.stdin.on('error', () => {}); + process.stdin.pipe(child.stdin); + } else { + if (child.stdout) { + child.stdout.pipe(process.stdout); + } else { + utils.log.error( + 'running an unsupported version of node ' + process.version + ); + utils.log.error( + 'nodemon may not work as expected - ' + + 'please consider upgrading to LTS' + ); + } + } + + bus.once('exit', function () { + if (child && process.stdin.unpipe) { + // node > 0.8 + process.stdin.unpipe(child.stdin); + } + }); + } + + debug('start watch on: %s', config.options.watch); + if (config.options.watch !== false) { + watch(); + } +} + +function waitForSubProcesses(pid, callback) { + debug('checking ps tree for pids of ' + pid); + psTree(pid, (err, pids) => { + if (!pids.length) { + return callback(); + } + + utils.log.status( + `still waiting for ${pids.length} sub-process${ + pids.length > 2 ? 'es' : '' + } to finish...` + ); + setTimeout(() => waitForSubProcesses(pid, callback), 1000); + }); +} + +function kill(child, signal, callback) { + if (!callback) { + callback = noop; + } + + if (utils.isWindows) { + const taskKill = () => { + try { + exec('taskkill /pid ' + child.pid + ' /T /F'); + } catch (e) { + utils.log.error('Could not shutdown sub process cleanly'); + } + }; + + // We are handling a 'SIGKILL' , 'SIGUSR2' and 'SIGUSR1' POSIX signal under Windows the + // same way it is handled on a UNIX system: We are performing + // a hard shutdown without waiting for the process to clean-up. + if ( + signal === 'SIGKILL' || + osRelease < 10 || + signal === 'SIGUSR2' || + signal === 'SIGUSR1' + ) { + debug('terminating process group by force: %s', child.pid); + + // We are using the taskkill utility to terminate the whole + // process group ('/t') of the child ('/pid') by force ('/f'). + // We need to end all sub processes, because the 'child' + // process in this context is actually a cmd.exe wrapper. + taskKill(); + callback(); + return; + } + + try { + // We are using the Windows Management Instrumentation Command-line + // (wmic.exe) to resolve the sub-child process identifier, because the + // 'child' process in this context is actually a cmd.exe wrapper. + // We want to send the termination signal directly to the node process. + // The '2> nul' silences the no process found error message. + const resultBuffer = execSync( + `wmic process where (ParentProcessId=${child.pid}) get ProcessId 2> nul` + ); + const result = resultBuffer.toString().match(/^[0-9]+/m); + + // If there is no sub-child process we fall back to the child process. + const processId = Array.isArray(result) ? result[0] : child.pid; + + debug('sending kill signal SIGINT to process: %s', processId); + + // We are using the standalone 'windows-kill' executable to send the + // standard POSIX signal 'SIGINT' to the node process. This fixes #1720. + const windowsKill = path.normalize( + `${__dirname}/../../bin/windows-kill.exe` + ); + + // We have to detach the 'windows-kill' execution completely from this + // process group to avoid terminating the nodemon process itself. + // See: https://github.com/alirdn/windows-kill#how-it-works--limitations + // + // Therefore we are using 'start' to create a new cmd.exe context. + // The '/min' option hides the new terminal window and the '/wait' + // option lets the process wait for the command to finish. + + execSync( + `start "windows-kill" /min /wait "${windowsKill}" -SIGINT ${processId}` + ); + } catch (e) { + taskKill(); + } + callback(); + } else { + // we use psTree to kill the full subtree of nodemon, because when + // spawning processes like `coffee` under the `--debug` flag, it'll spawn + // it's own child, and that can't be killed by nodemon, so psTree gives us + // an array of PIDs that have spawned under nodemon, and we send each the + // configured signal (default: SIGUSR2) signal, which fixes #335 + // note that psTree also works if `ps` is missing by looking in /proc + let sig = signal.replace('SIG', ''); + + psTree(child.pid, function (err, pids) { + // if ps isn't native to the OS, then we need to send the numeric value + // for the signal during the kill, `signals` is a lookup table for that. + if (!psTree.hasPS) { + sig = signals[signal]; + } + + // the sub processes need to be killed from smallest to largest + debug('sending kill signal to ' + pids.join(', ')); + + child.kill(signal); + + pids.sort().forEach((pid) => exec(`kill -${sig} ${pid}`, noop)); + + waitForSubProcesses(child.pid, () => { + // finally kill the main user process + exec(`kill -${sig} ${child.pid}`, callback); + }); + }); + } +} + +run.kill = function (noRestart, callback) { + // I hate code like this :( - Remy (author of said code) + if (typeof noRestart === 'function') { + callback = noRestart; + noRestart = false; + } + + if (!callback) { + callback = noop; + } + + if (child !== null) { + // if the stdin piping is on, we need to unpipe, but also close stdin on + // the child, otherwise linux can throw EPIPE or ECONNRESET errors. + if (run.options.stdin) { + process.stdin.unpipe(child.stdin); + } + + // For the on('exit', ...) handler above the following looks like a + // crash, so we set the killedAfterChange flag if a restart is planned + if (!noRestart) { + killedAfterChange = true; + } + + /* Now kill the entire subtree of processes belonging to nodemon */ + var oldPid = child.pid; + if (child) { + kill(child, config.signal, function () { + // this seems to fix the 0.11.x issue with the "rs" restart command, + // though I'm unsure why. it seems like more data is streamed in to + // stdin after we close. + if (child && run.options.stdin && child.stdin && oldPid === child.pid) { + child.stdin.end(); + } + callback(); + }); + } + } else if (!noRestart) { + // if there's no child, then we need to manually start the process + // this is because as there was no child, the child.on('exit') event + // handler doesn't exist which would normally trigger the restart. + bus.once('start', callback); + run.restart(); + } else { + callback(); + } +}; + +run.restart = noop; + +bus.on('quit', function onQuit(code) { + if (code === undefined) { + code = 0; + } + + // remove event listener + var exitTimer = null; + var exit = function () { + clearTimeout(exitTimer); + exit = noop; // null out in case of race condition + child = null; + if (!config.required) { + // Execute all other quit listeners. + bus.listeners('quit').forEach(function (listener) { + if (listener !== onQuit) { + listener(); + } + }); + process.exit(code); + } else { + bus.emit('exit'); + } + }; + + // if we're not running already, don't bother with trying to kill + if (config.run === false) { + return exit(); + } + + // immediately try to stop any polling + config.run = false; + + if (child) { + // give up waiting for the kids after 10 seconds + exitTimer = setTimeout(exit, 10 * 1000); + child.removeAllListeners('exit'); + child.once('exit', exit); + + kill(child, 'SIGINT'); + } else { + exit(); + } +}); + +bus.on('restart', function () { + // run.kill will send a SIGINT to the child process, which will cause it + // to terminate, which in turn uses the 'exit' event handler to restart + run.kill(); +}); + +// remove the child file on exit +process.on('exit', function () { + utils.log.detail('exiting'); + if (child) { + child.kill(); + } +}); + +// because windows borks when listening for the SIG* events +if (!utils.isWindows) { + bus.once('boot', () => { + // usual suspect: ctrl+c exit + process.once('SIGINT', () => bus.emit('quit', 130)); + process.once('SIGTERM', () => { + bus.emit('quit', 143); + if (child) { + child.kill('SIGTERM'); + } + }); + }); +} + +module.exports = run; diff --git a/node_modules/nodemon/lib/monitor/signals.js b/node_modules/nodemon/lib/monitor/signals.js new file mode 100644 index 0000000..daff6e0 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/signals.js @@ -0,0 +1,34 @@ +module.exports = { + SIGHUP: 1, + SIGINT: 2, + SIGQUIT: 3, + SIGILL: 4, + SIGTRAP: 5, + SIGABRT: 6, + SIGBUS: 7, + SIGFPE: 8, + SIGKILL: 9, + SIGUSR1: 10, + SIGSEGV: 11, + SIGUSR2: 12, + SIGPIPE: 13, + SIGALRM: 14, + SIGTERM: 15, + SIGSTKFLT: 16, + SIGCHLD: 17, + SIGCONT: 18, + SIGSTOP: 19, + SIGTSTP: 20, + SIGTTIN: 21, + SIGTTOU: 22, + SIGURG: 23, + SIGXCPU: 24, + SIGXFSZ: 25, + SIGVTALRM: 26, + SIGPROF: 27, + SIGWINCH: 28, + SIGIO: 29, + SIGPWR: 30, + SIGSYS: 31, + SIGRTMIN: 35, +} diff --git a/node_modules/nodemon/lib/monitor/watch.js b/node_modules/nodemon/lib/monitor/watch.js new file mode 100644 index 0000000..d0ac7fe --- /dev/null +++ b/node_modules/nodemon/lib/monitor/watch.js @@ -0,0 +1,244 @@ +module.exports.watch = watch; +module.exports.resetWatchers = resetWatchers; + +var debug = require('debug')('nodemon:watch'); +var debugRoot = require('debug')('nodemon'); +var chokidar = require('chokidar'); +var undefsafe = require('undefsafe'); +var config = require('../config'); +var path = require('path'); +var utils = require('../utils'); +var bus = utils.bus; +var match = require('./match'); +var watchers = []; +var debouncedBus; + +bus.on('reset', resetWatchers); + +function resetWatchers() { + debugRoot('resetting watchers'); + watchers.forEach(function (watcher) { + watcher.close(); + }); + watchers = []; +} + +function watch() { + if (watchers.length) { + debug('early exit on watch, still watching (%s)', watchers.length); + return; + } + + var dirs = [].slice.call(config.dirs); + + debugRoot('start watch on: %s', dirs.join(', ')); + const rootIgnored = config.options.ignore; + debugRoot('ignored', rootIgnored); + + var watchedFiles = []; + + const promise = new Promise(function (resolve) { + const dotFilePattern = /[/\\]\./; + var ignored = match.rulesToMonitor( + [], // not needed + Array.from(rootIgnored), + config + ).map(pattern => pattern.slice(1)); + + const addDotFile = dirs.filter(dir => dir.match(dotFilePattern)); + + // don't ignore dotfiles if explicitly watched. + if (addDotFile.length === 0) { + ignored.push(dotFilePattern); + } + + var watchOptions = { + ignorePermissionErrors: true, + ignored: ignored, + persistent: true, + usePolling: config.options.legacyWatch || false, + interval: config.options.pollingInterval, + // note to future developer: I've gone back and forth on adding `cwd` + // to the props and in some cases it fixes bugs but typically it causes + // bugs elsewhere (since nodemon is used is so many ways). the final + // decision is to *not* use it at all and work around it + // cwd: ... + }; + + if (utils.isWindows) { + watchOptions.disableGlobbing = true; + } + + if (utils.isIBMi) { + watchOptions.usePolling = true; + } + + if (process.env.TEST) { + watchOptions.useFsEvents = false; + } + + var watcher = chokidar.watch( + dirs, + Object.assign({}, watchOptions, config.options.watchOptions || {}) + ); + + watcher.ready = false; + + var total = 0; + + watcher.on('change', filterAndRestart); + watcher.on('unlink', filterAndRestart); + watcher.on('add', function (file) { + if (watcher.ready) { + return filterAndRestart(file); + } + + watchedFiles.push(file); + bus.emit('watching', file); + debug('chokidar watching: %s', file); + }); + watcher.on('ready', function () { + watchedFiles = Array.from(new Set(watchedFiles)); // ensure no dupes + total = watchedFiles.length; + watcher.ready = true; + resolve(total); + debugRoot('watch is complete'); + }); + + watcher.on('error', function (error) { + if (error.code === 'EINVAL') { + utils.log.error( + 'Internal watch failed. Likely cause: too many ' + + 'files being watched (perhaps from the root of a drive?\n' + + 'See https://github.com/paulmillr/chokidar/issues/229 for details' + ); + } else { + utils.log.error('Internal watch failed: ' + error.message); + process.exit(1); + } + }); + + watchers.push(watcher); + }); + + return promise.catch(e => { + // this is a core error and it should break nodemon - so I have to break + // out of a promise using the setTimeout + setTimeout(() => { + throw e; + }); + }).then(function () { + utils.log.detail(`watching ${watchedFiles.length} file${ + watchedFiles.length === 1 ? '' : 's'}`); + return watchedFiles; + }); +} + +function filterAndRestart(files) { + if (!Array.isArray(files)) { + files = [files]; + } + + if (files.length) { + var cwd = process.cwd(); + if (this.options && this.options.cwd) { + cwd = this.options.cwd; + } + + utils.log.detail( + 'files triggering change check: ' + + files + .map(file => { + const res = path.relative(cwd, file); + return res; + }) + .join(', ') + ); + + // make sure the path is right and drop an empty + // filenames (sometimes on windows) + files = files.filter(Boolean).map(file => { + return path.relative(process.cwd(), path.relative(cwd, file)); + }); + + if (utils.isWindows) { + // ensure the drive letter is in uppercase (c:\foo -> C:\foo) + files = files.map(f => { + if (f.indexOf(':') === -1) { return f; } + return f[0].toUpperCase() + f.slice(1); + }); + } + + + debug('filterAndRestart on', files); + + var matched = match( + files, + config.options.monitor, + undefsafe(config, 'options.execOptions.ext') + ); + + debug('matched?', JSON.stringify(matched)); + + // if there's no matches, then test to see if the changed file is the + // running script, if so, let's allow a restart + if (config.options.execOptions && config.options.execOptions.script) { + const script = path.resolve(config.options.execOptions.script); + if (matched.result.length === 0 && script) { + const length = script.length; + files.find(file => { + if (file.substr(-length, length) === script) { + matched = { + result: [file], + total: 1, + }; + return true; + } + }); + } + } + + utils.log.detail( + 'changes after filters (before/after): ' + + [files.length, matched.result.length].join('/') + ); + + // reset the last check so we're only looking at recently modified files + config.lastStarted = Date.now(); + + if (matched.result.length) { + if (config.options.delay > 0) { + utils.log.detail('delaying restart for ' + config.options.delay + 'ms'); + if (debouncedBus === undefined) { + debouncedBus = debounce(restartBus, config.options.delay); + } + debouncedBus(matched); + } else { + return restartBus(matched); + } + } + } +} + +function restartBus(matched) { + utils.log.status('restarting due to changes...'); + matched.result.map(file => { + utils.log.detail(path.relative(process.cwd(), file)); + }); + + if (config.options.verbose) { + utils.log._log(''); + } + + bus.emit('restart', matched.result); +} + +function debounce(fn, delay) { + var timer = null; + return function () { + const context = this; + const args = arguments; + clearTimeout(timer); + timer = setTimeout(() =>fn.apply(context, args), delay); + }; +} diff --git a/node_modules/nodemon/lib/nodemon.js b/node_modules/nodemon/lib/nodemon.js new file mode 100644 index 0000000..278ea65 --- /dev/null +++ b/node_modules/nodemon/lib/nodemon.js @@ -0,0 +1,315 @@ +var debug = require('debug')('nodemon'); +var path = require('path'); +var monitor = require('./monitor'); +var cli = require('./cli'); +var version = require('./version'); +var util = require('util'); +var utils = require('./utils'); +var bus = utils.bus; +var help = require('./help'); +var config = require('./config'); +var spawn = require('./spawn'); +const defaults = require('./config/defaults') +var eventHandlers = {}; + +// this is fairly dirty, but theoretically sound since it's part of the +// stable module API +config.required = utils.isRequired; + +/** + * @param {NodemonSettings} settings + * @returns {Nodemon} + */ +function nodemon(settings) { + bus.emit('boot'); + nodemon.reset(); + + // allow the cli string as the argument to nodemon, and allow for + // `node nodemon -V app.js` or just `-V app.js` + if (typeof settings === 'string') { + settings = settings.trim(); + if (settings.indexOf('node') !== 0) { + if (settings.indexOf('nodemon') !== 0) { + settings = 'nodemon ' + settings; + } + settings = 'node ' + settings; + } + settings = cli.parse(settings); + } + + // set the debug flag as early as possible to get all the detailed logging + if (settings.verbose) { + utils.debug = true; + } + + if (settings.help) { + if (process.stdout.isTTY) { + process.stdout._handle.setBlocking(true); // nodejs/node#6456 + } + console.log(help(settings.help)); + if (!config.required) { + process.exit(0); + } + } + + if (settings.version) { + version().then(function (v) { + console.log(v); + if (!config.required) { + process.exit(0); + } + }); + return; + } + + // nodemon tools like grunt-nodemon. This affects where + // the script is being run from, and will affect where + // nodemon looks for the nodemon.json files + if (settings.cwd) { + // this is protection to make sure we haven't dont the chdir already... + // say like in cli/parse.js (which is where we do this once already!) + if (process.cwd() !== path.resolve(config.system.cwd, settings.cwd)) { + process.chdir(settings.cwd); + } + } + + const cwd = process.cwd(); + + config.load(settings, function (config) { + if (!config.options.dump && !config.options.execOptions.script && + config.options.execOptions.exec === 'node') { + if (!config.required) { + console.log(help('usage')); + process.exit(); + } + return; + } + + // before we print anything, update the colour setting on logging + utils.colours = config.options.colours; + + // always echo out the current version + utils.log.info(version.pinned); + + const cwd = process.cwd(); + + if (config.options.cwd) { + utils.log.detail('process root: ' + cwd); + } + + config.loaded.map(file => file.replace(cwd, '.')).forEach(file => { + utils.log.detail('reading config ' + file); + }); + + if (config.options.stdin && config.options.restartable) { + // allow nodemon to restart when the use types 'rs\n' + process.stdin.resume(); + process.stdin.setEncoding('utf8'); + process.stdin.on('data', data => { + const str = data.toString().trim().toLowerCase(); + + // if the keys entered match the restartable value, then restart! + if (str === config.options.restartable) { + bus.emit('restart'); + } else if (data.charCodeAt(0) === 12) { // ctrl+l + console.clear(); + } + }); + } else if (config.options.stdin) { + // so let's make sure we don't eat the key presses + // but also, since we're wrapping, watch out for + // special keys, like ctrl+c x 2 or '.exit' or ctrl+d or ctrl+l + var ctrlC = false; + var buffer = ''; + + process.stdin.on('data', function (data) { + data = data.toString(); + buffer += data; + const chr = data.charCodeAt(0); + + // if restartable, echo back + if (chr === 3) { + if (ctrlC) { + process.exit(0); + } + + ctrlC = true; + return; + } else if (buffer === '.exit' || chr === 4) { // ctrl+d + process.exit(); + } else if (chr === 13 || chr === 10) { // enter / carriage return + buffer = ''; + } else if (chr === 12) { // ctrl+l + console.clear(); + buffer = ''; + } + ctrlC = false; + }); + if (process.stdin.setRawMode) { + process.stdin.setRawMode(true); + } + } + + if (config.options.restartable) { + utils.log.info('to restart at any time, enter `' + + config.options.restartable + '`'); + } + + if (!config.required) { + const restartSignal = config.options.signal === 'SIGUSR2' ? 'SIGHUP' : 'SIGUSR2'; + process.on(restartSignal, nodemon.restart); + utils.bus.on('error', () => { + utils.log.fail((new Error().stack)); + }); + utils.log.detail((config.options.restartable ? 'or ' : '') + 'send ' + + restartSignal + ' to ' + process.pid + ' to restart'); + } + + const ignoring = config.options.monitor.map(function (rule) { + if (rule.slice(0, 1) !== '!') { + return false; + } + + rule = rule.slice(1); + + // don't notify of default ignores + if (defaults.ignoreRoot.indexOf(rule) !== -1) { + return false; + return rule.slice(3).slice(0, -3); + } + + if (rule.startsWith(cwd)) { + return rule.replace(cwd, '.'); + } + + return rule; + }).filter(Boolean).join(' '); + if (ignoring) utils.log.detail('ignoring: ' + ignoring); + + utils.log.info('watching path(s): ' + config.options.monitor.map(function (rule) { + if (rule.slice(0, 1) !== '!') { + try { + rule = path.relative(process.cwd(), rule); + } catch (e) {} + + return rule; + } + + return false; + }).filter(Boolean).join(' ')); + + utils.log.info('watching extensions: ' + (config.options.execOptions.ext || '(all)')); + + if (config.options.dump) { + utils.log._log('log', '--------------'); + utils.log._log('log', 'node: ' + process.version); + utils.log._log('log', 'nodemon: ' + version.pinned); + utils.log._log('log', 'command: ' + process.argv.join(' ')); + utils.log._log('log', 'cwd: ' + cwd); + utils.log._log('log', ['OS:', process.platform, process.arch].join(' ')); + utils.log._log('log', '--------------'); + utils.log._log('log', util.inspect(config, { depth: null })); + utils.log._log('log', '--------------'); + if (!config.required) { + process.exit(); + } + + return; + } + + config.run = true; + + if (config.options.stdout === false) { + nodemon.on('start', function () { + nodemon.stdout = bus.stdout; + nodemon.stderr = bus.stderr; + + bus.emit('readable'); + }); + } + + if (config.options.events && Object.keys(config.options.events).length) { + Object.keys(config.options.events).forEach(function (key) { + utils.log.detail('bind ' + key + ' -> `' + + config.options.events[key] + '`'); + nodemon.on(key, function () { + if (config.options && config.options.events) { + spawn(config.options.events[key], config, + [].slice.apply(arguments)); + } + }); + }); + } + + monitor.run(config.options); + + }); + + return nodemon; +} + +nodemon.restart = function () { + utils.log.status('restarting child process'); + bus.emit('restart'); + return nodemon; +}; + +nodemon.addListener = nodemon.on = function (event, handler) { + if (!eventHandlers[event]) { eventHandlers[event] = []; } + eventHandlers[event].push(handler); + bus.on(event, handler); + return nodemon; +}; + +nodemon.once = function (event, handler) { + if (!eventHandlers[event]) { eventHandlers[event] = []; } + eventHandlers[event].push(handler); + bus.once(event, function () { + debug('bus.once(%s)', event); + eventHandlers[event].splice(eventHandlers[event].indexOf(handler), 1); + handler.apply(this, arguments); + }); + return nodemon; +}; + +nodemon.emit = function () { + bus.emit.apply(bus, [].slice.call(arguments)); + return nodemon; +}; + +nodemon.removeAllListeners = function (event) { + // unbind only the `nodemon.on` event handlers + Object.keys(eventHandlers).filter(function (e) { + return event ? e === event : true; + }).forEach(function (event) { + eventHandlers[event].forEach(function (handler) { + bus.removeListener(event, handler); + eventHandlers[event].splice(eventHandlers[event].indexOf(handler), 1); + }); + }); + + return nodemon; +}; + +nodemon.reset = function (done) { + bus.emit('reset', done); +}; + +bus.on('reset', function (done) { + debug('reset'); + nodemon.removeAllListeners(); + monitor.run.kill(true, function () { + utils.reset(); + config.reset(); + config.run = false; + if (done) { + done(); + } + }); +}); + +// expose the full config +nodemon.config = config; + +module.exports = nodemon; + diff --git a/node_modules/nodemon/lib/rules/add.js b/node_modules/nodemon/lib/rules/add.js new file mode 100644 index 0000000..de85bb7 --- /dev/null +++ b/node_modules/nodemon/lib/rules/add.js @@ -0,0 +1,89 @@ +'use strict'; + +var utils = require('../utils'); + +// internal +var reEscComments = /\\#/g; +// note that '^^' is used in place of escaped comments +var reUnescapeComments = /\^\^/g; +var reComments = /#.*$/; +var reEscapeChars = /[.|\-[\]()\\]/g; +var reAsterisk = /\*/g; + +module.exports = add; + +/** + * Converts file patterns or regular expressions to nodemon + * compatible RegExp matching rules. Note: the `rules` argument + * object is modified to include the new rule and new RegExp + * + * ### Example: + * + * var rules = { watch: [], ignore: [] }; + * add(rules, 'watch', '*.js'); + * add(rules, 'ignore', '/public/'); + * add(rules, 'watch', ':(\d)*\.js'); // note: string based regexp + * add(rules, 'watch', /\d*\.js/); + * + * @param {Object} rules containing `watch` and `ignore`. Also updated during + * execution + * @param {String} which must be either "watch" or "ignore" + * @param {String|RegExp} the actual rule. + */ +function add(rules, which, rule) { + if (!{ ignore: 1, watch: 1}[which]) { + throw new Error('rules/index.js#add requires "ignore" or "watch" as the ' + + 'first argument'); + } + + if (Array.isArray(rule)) { + rule.forEach(function (rule) { + add(rules, which, rule); + }); + return; + } + + // support the rule being a RegExp, but reformat it to + // the custom : format that we're working with. + if (rule instanceof RegExp) { + // rule = ':' + rule.toString().replace(/^\/(.*?)\/$/g, '$1'); + utils.log.error('RegExp format no longer supported, but globs are.'); + return; + } + + // remove comments and trim lines + // this mess of replace methods is escaping "\#" to allow for emacs temp files + + // first up strip comments and remove blank head or tails + rule = (rule || '').replace(reEscComments, '^^') + .replace(reComments, '') + .replace(reUnescapeComments, '#').trim(); + + var regexp = false; + + if (typeof rule === 'string' && rule.substring(0, 1) === ':') { + rule = rule.substring(1); + utils.log.error('RegExp no longer supported: ' + rule); + regexp = true; + } else if (rule.length === 0) { + // blank line (or it was a comment) + return; + } + + if (regexp) { + // rules[which].push(rule); + } else { + // rule = rule.replace(reEscapeChars, '\\$&') + // .replace(reAsterisk, '.*'); + + rules[which].push(rule); + // compile a regexp of all the rules for this ignore or watch + var re = rules[which].map(function (rule) { + return rule.replace(reEscapeChars, '\\$&') + .replace(reAsterisk, '.*'); + }).join('|'); + + // used for the directory matching + rules[which].re = new RegExp(re); + } +} diff --git a/node_modules/nodemon/lib/rules/index.js b/node_modules/nodemon/lib/rules/index.js new file mode 100644 index 0000000..04aa92f --- /dev/null +++ b/node_modules/nodemon/lib/rules/index.js @@ -0,0 +1,53 @@ +'use strict'; +var utils = require('../utils'); +var add = require('./add'); +var parse = require('./parse'); + +// exported +var rules = { ignore: [], watch: [] }; + +/** + * Loads a nodemon config file and populates the ignore + * and watch rules with it's contents, and calls callback + * with the new rules + * + * @param {String} filename + * @param {Function} callback + */ +function load(filename, callback) { + parse(filename, function (err, result) { + if (err) { + // we should have bombed already, but + utils.log.error(err); + callback(err); + } + + if (result.raw) { + result.raw.forEach(add.bind(null, rules, 'ignore')); + } else { + result.ignore.forEach(add.bind(null, rules, 'ignore')); + result.watch.forEach(add.bind(null, rules, 'watch')); + } + + callback(null, rules); + }); +} + +module.exports = { + reset: function () { // just used for testing + rules.ignore.length = rules.watch.length = 0; + delete rules.ignore.re; + delete rules.watch.re; + }, + load: load, + ignore: { + test: add.bind(null, rules, 'ignore'), + add: add.bind(null, rules, 'ignore'), + }, + watch: { + test: add.bind(null, rules, 'watch'), + add: add.bind(null, rules, 'watch'), + }, + add: add.bind(null, rules), + rules: rules, +}; \ No newline at end of file diff --git a/node_modules/nodemon/lib/rules/parse.js b/node_modules/nodemon/lib/rules/parse.js new file mode 100644 index 0000000..6e1cace --- /dev/null +++ b/node_modules/nodemon/lib/rules/parse.js @@ -0,0 +1,43 @@ +'use strict'; +var fs = require('fs'); + +/** + * Parse the nodemon config file, supporting both old style + * plain text config file, and JSON version of the config + * + * @param {String} filename + * @param {Function} callback + */ +function parse(filename, callback) { + var rules = { + ignore: [], + watch: [], + }; + + fs.readFile(filename, 'utf8', function (err, content) { + + if (err) { + return callback(err); + } + + var json = null; + try { + json = JSON.parse(content); + } catch (e) {} + + if (json !== null) { + rules = { + ignore: json.ignore || [], + watch: json.watch || [], + }; + + return callback(null, rules); + } + + // otherwise return the raw file + return callback(null, { raw: content.split(/\n/) }); + }); +} + +module.exports = parse; + diff --git a/node_modules/nodemon/lib/spawn.js b/node_modules/nodemon/lib/spawn.js new file mode 100644 index 0000000..256734a --- /dev/null +++ b/node_modules/nodemon/lib/spawn.js @@ -0,0 +1,74 @@ +const path = require('path'); +const utils = require('./utils'); +const merge = utils.merge; +const bus = utils.bus; +const spawn = require('child_process').spawn; + +module.exports = function spawnCommand(command, config, eventArgs) { + var stdio = ['pipe', 'pipe', 'pipe']; + + if (config.options.stdout) { + stdio = ['pipe', process.stdout, process.stderr]; + } + + const env = merge(process.env, { FILENAME: eventArgs[0] }); + + var sh = 'sh'; + var shFlag = '-c'; + var spawnOptions = { + env: merge(config.options.execOptions.env, env), + stdio: stdio, + }; + + if (!Array.isArray(command)) { + command = [command]; + } + + if (utils.isWindows) { + // if the exec includes a forward slash, reverse it for windows compat + // but *only* apply to the first command, and none of the arguments. + // ref #1251 and #1236 + command = command.map(executable => { + if (executable.indexOf('/') === -1) { + return executable; + } + + return executable.split(' ').map((e, i) => { + if (i === 0) { + return path.normalize(e); + } + return e; + }).join(' '); + }); + // taken from npm's cli: https://git.io/vNFD4 + sh = process.env.comspec || 'cmd'; + shFlag = '/d /s /c'; + spawnOptions.windowsVerbatimArguments = true; + spawnOptions.windowsHide = true; + } + + const args = command.join(' '); + const child = spawn(sh, [shFlag, args], spawnOptions); + + if (config.required) { + var emit = { + stdout: function (data) { + bus.emit('stdout', data); + }, + stderr: function (data) { + bus.emit('stderr', data); + }, + }; + + // now work out what to bind to... + if (config.options.stdout) { + child.on('stdout', emit.stdout).on('stderr', emit.stderr); + } else { + child.stdout.on('data', emit.stdout); + child.stderr.on('data', emit.stderr); + + bus.stdout = child.stdout; + bus.stderr = child.stderr; + } + } +}; diff --git a/node_modules/nodemon/lib/utils/bus.js b/node_modules/nodemon/lib/utils/bus.js new file mode 100644 index 0000000..4e120c5 --- /dev/null +++ b/node_modules/nodemon/lib/utils/bus.js @@ -0,0 +1,44 @@ +var events = require('events'); +var debug = require('debug')('nodemon'); +var util = require('util'); + +var Bus = function () { + events.EventEmitter.call(this); +}; + +util.inherits(Bus, events.EventEmitter); + +var bus = new Bus(); + +// /* +var collected = {}; +bus.on('newListener', function (event) { + debug('bus new listener: %s (%s)', event, bus.listeners(event).length); + if (!collected[event]) { + collected[event] = true; + bus.on(event, function () { + debug('bus emit: %s', event); + }); + } +}); + +// */ + +// proxy process messages (if forked) to the bus +process.on('message', function (event) { + debug('process.message(%s)', event); + bus.emit(event); +}); + +var emit = bus.emit; + +// if nodemon was spawned via a fork, allow upstream communication +// via process.send +if (process.send) { + bus.emit = function (event, data) { + process.send({ type: event, data: data }); + emit.apply(bus, arguments); + }; +} + +module.exports = bus; diff --git a/node_modules/nodemon/lib/utils/clone.js b/node_modules/nodemon/lib/utils/clone.js new file mode 100644 index 0000000..6ba6330 --- /dev/null +++ b/node_modules/nodemon/lib/utils/clone.js @@ -0,0 +1,40 @@ +module.exports = clone; + +// via http://stackoverflow.com/a/728694/22617 +function clone(obj) { + // Handle the 3 simple types, and null or undefined + if (null === obj || 'object' !== typeof obj) { + return obj; + } + + var copy; + + // Handle Date + if (obj instanceof Date) { + copy = new Date(); + copy.setTime(obj.getTime()); + return copy; + } + + // Handle Array + if (obj instanceof Array) { + copy = []; + for (var i = 0, len = obj.length; i < len; i++) { + copy[i] = clone(obj[i]); + } + return copy; + } + + // Handle Object + if (obj instanceof Object) { + copy = {}; + for (var attr in obj) { + if (obj.hasOwnProperty && obj.hasOwnProperty(attr)) { + copy[attr] = clone(obj[attr]); + } + } + return copy; + } + + throw new Error('Unable to copy obj! Its type isn\'t supported.'); +} \ No newline at end of file diff --git a/node_modules/nodemon/lib/utils/colour.js b/node_modules/nodemon/lib/utils/colour.js new file mode 100644 index 0000000..8c1b590 --- /dev/null +++ b/node_modules/nodemon/lib/utils/colour.js @@ -0,0 +1,26 @@ +/** + * Encodes a string in a colour: red, yellow or green + * @param {String} c colour to highlight in + * @param {String} str the string to encode + * @return {String} coloured string for terminal printing + */ +function colour(c, str) { + return (colour[c] || colour.black) + str + colour.black; +} + +function strip(str) { + re.lastIndex = 0; // reset position + return str.replace(re, ''); +} + +colour.red = '\x1B[31m'; +colour.yellow = '\x1B[33m'; +colour.green = '\x1B[32m'; +colour.black = '\x1B[39m'; + +var reStr = Object.keys(colour).map(key => colour[key]).join('|'); +var re = new RegExp(('(' + reStr + ')').replace(/\[/g, '\\['), 'g'); + +colour.strip = strip; + +module.exports = colour; diff --git a/node_modules/nodemon/lib/utils/index.js b/node_modules/nodemon/lib/utils/index.js new file mode 100644 index 0000000..9265121 --- /dev/null +++ b/node_modules/nodemon/lib/utils/index.js @@ -0,0 +1,103 @@ +var noop = function () { }; +var path = require('path'); +const semver = require('semver'); +var version = process.versions.node.split('.') || [null, null, null]; + +var utils = (module.exports = { + semver: semver, + satisfies: test => semver.satisfies(process.versions.node, test), + version: { + major: parseInt(version[0] || 0, 10), + minor: parseInt(version[1] || 0, 10), + patch: parseInt(version[2] || 0, 10), + }, + clone: require('./clone'), + merge: require('./merge'), + bus: require('./bus'), + isWindows: process.platform === 'win32', + isMac: process.platform === 'darwin', + isLinux: process.platform === 'linux', + isIBMi: require('os').type() === 'OS400', + isRequired: (function () { + var p = module.parent; + while (p) { + // in electron.js engine it happens + if (!p.filename) { + return true; + } + if (p.filename.indexOf('bin' + path.sep + 'nodemon.js') !== -1) { + return false; + } + p = p.parent; + } + + return true; + })(), + home: process.env.HOME || process.env.HOMEPATH, + quiet: function () { + // nukes the logging + if (!this.debug) { + for (var method in utils.log) { + if (typeof utils.log[method] === 'function') { + utils.log[method] = noop; + } + } + } + }, + reset: function () { + if (!this.debug) { + for (var method in utils.log) { + if (typeof utils.log[method] === 'function') { + delete utils.log[method]; + } + } + } + this.debug = false; + }, + regexpToText: function (t) { + return t + .replace(/\.\*\\./g, '*.') + .replace(/\\{2}/g, '^^') + .replace(/\\/g, '') + .replace(/\^\^/g, '\\'); + }, + stringify: function (exec, args) { + // serializes an executable string and array of arguments into a string + args = args || []; + + return [exec] + .concat( + args.map(function (arg) { + // if an argument contains a space, we want to show it with quotes + // around it to indicate that it is a single argument + if (arg.length > 0 && arg.indexOf(' ') === -1) { + return arg; + } + // this should correctly escape nested quotes + return JSON.stringify(arg); + }) + ) + .join(' ') + .trim(); + }, +}); + +utils.log = require('./log')(utils.isRequired); + +Object.defineProperty(utils, 'debug', { + set: function (value) { + this.log.debug = value; + }, + get: function () { + return this.log.debug; + }, +}); + +Object.defineProperty(utils, 'colours', { + set: function (value) { + this.log.useColours = value; + }, + get: function () { + return this.log.useColours; + }, +}); diff --git a/node_modules/nodemon/lib/utils/log.js b/node_modules/nodemon/lib/utils/log.js new file mode 100644 index 0000000..6580087 --- /dev/null +++ b/node_modules/nodemon/lib/utils/log.js @@ -0,0 +1,82 @@ +var colour = require('./colour'); +var bus = require('./bus'); +var required = false; +var useColours = true; + +var coding = { + log: 'black', + info: 'yellow', + status: 'green', + detail: 'yellow', + fail: 'red', + error: 'red', +}; + +function log(type, text) { + var msg = '[nodemon] ' + (text || ''); + + if (useColours) { + msg = colour(coding[type], msg); + } + + // always push the message through our bus, using nextTick + // to help testing and get _out of_ promises. + process.nextTick(() => { + bus.emit('log', { type: type, message: text, colour: msg }); + }); + + // but if we're running on the command line, also echo out + // question: should we actually just consume our own events? + if (!required) { + if (type === 'error') { + console.error(msg); + } else { + console.log(msg || ''); + } + } +} + +var Logger = function (r) { + if (!(this instanceof Logger)) { + return new Logger(r); + } + this.required(r); + return this; +}; + +Object.keys(coding).forEach(function (type) { + Logger.prototype[type] = log.bind(null, type); +}); + +// detail is for messages that are turned on during debug +Logger.prototype.detail = function (msg) { + if (this.debug) { + log('detail', msg); + } +}; + +Logger.prototype.required = function (val) { + required = val; +}; + +Logger.prototype.debug = false; +Logger.prototype._log = function (type, msg) { + if (required) { + bus.emit('log', { type: type, message: msg || '', colour: msg || '' }); + } else if (type === 'error') { + console.error(msg); + } else { + console.log(msg || ''); + } +}; + +Object.defineProperty(Logger.prototype, 'useColours', { + set: function (val) { + useColours = val; + }, + get: function () { + return useColours; + }, +}); + +module.exports = Logger; diff --git a/node_modules/nodemon/lib/utils/merge.js b/node_modules/nodemon/lib/utils/merge.js new file mode 100644 index 0000000..1f3440b --- /dev/null +++ b/node_modules/nodemon/lib/utils/merge.js @@ -0,0 +1,47 @@ +var clone = require('./clone'); + +module.exports = merge; + +function typesMatch(a, b) { + return (typeof a === typeof b) && (Array.isArray(a) === Array.isArray(b)); +} + +/** + * A deep merge of the source based on the target. + * @param {Object} source [description] + * @param {Object} target [description] + * @return {Object} [description] + */ +function merge(source, target, result) { + if (result === undefined) { + result = clone(source); + } + + // merge missing values from the target to the source + Object.getOwnPropertyNames(target).forEach(function (key) { + if (source[key] === undefined) { + result[key] = target[key]; + } + }); + + Object.getOwnPropertyNames(source).forEach(function (key) { + var value = source[key]; + + if (target[key] && typesMatch(value, target[key])) { + // merge empty values + if (value === '') { + result[key] = target[key]; + } + + if (Array.isArray(value)) { + if (value.length === 0 && target[key].length) { + result[key] = target[key].slice(0); + } + } else if (typeof value === 'object') { + result[key] = merge(value, target[key]); + } + } + }); + + return result; +} \ No newline at end of file diff --git a/node_modules/nodemon/lib/version.js b/node_modules/nodemon/lib/version.js new file mode 100644 index 0000000..d0f5104 --- /dev/null +++ b/node_modules/nodemon/lib/version.js @@ -0,0 +1,100 @@ +module.exports = version; +module.exports.pin = pin; + +var fs = require('fs'); +var path = require('path'); +var exec = require('child_process').exec; +var root = null; + +function pin() { + return version().then(function (v) { + version.pinned = v; + }); +} + +function version(callback) { + // first find the package.json as this will be our root + var promise = findPackage(path.dirname(module.parent.filename)) + .then(function (dir) { + // now try to load the package + var v = require(path.resolve(dir, 'package.json')).version; + + if (v && v !== '0.0.0-development') { + return v; + } + + root = dir; + + // else we're in development, give the commit out + // get the last commit and whether the working dir is dirty + var promises = [ + branch().catch(function () { return 'master'; }), + commit().catch(function () { return ''; }), + dirty().catch(function () { return 0; }), + ]; + + // use the cached result as the export + return Promise.all(promises).then(function (res) { + var branch = res[0]; + var commit = res[1]; + var dirtyCount = parseInt(res[2], 10); + var curr = branch + ': ' + commit; + if (dirtyCount !== 0) { + curr += ' (' + dirtyCount + ' dirty files)'; + } + + return curr; + }); + }).catch(function (error) { + console.log(error.stack); + throw error; + }); + + if (callback) { + promise.then(function (res) { + callback(null, res); + }, callback); + } + + return promise; +} + +function findPackage(dir) { + if (dir === '/') { + return Promise.reject(new Error('package not found')); + } + return new Promise(function (resolve) { + fs.stat(path.resolve(dir, 'package.json'), function (error, exists) { + if (error || !exists) { + return resolve(findPackage(path.resolve(dir, '..'))); + } + + resolve(dir); + }); + }); +} + +function command(cmd) { + return new Promise(function (resolve, reject) { + exec(cmd, { cwd: root }, function (err, stdout, stderr) { + var error = stderr.trim(); + if (error) { + return reject(new Error(error)); + } + resolve(stdout.split('\n').join('')); + }); + }); +} + +function commit() { + return command('git rev-parse HEAD'); +} + +function branch() { + return command('git rev-parse --abbrev-ref HEAD'); +} + +function dirty() { + return command('expr $(git status --porcelain 2>/dev/null| ' + + 'egrep "^(M| M)" | wc -l)'); +} diff --git a/node_modules/nodemon/package.json b/node_modules/nodemon/package.json new file mode 100644 index 0000000..45e0987 --- /dev/null +++ b/node_modules/nodemon/package.json @@ -0,0 +1,75 @@ +{ + "name": "nodemon", + "homepage": "https://nodemon.io", + "author": { + "name": "Remy Sharp", + "url": "https://github.com/remy" + }, + "bin": { + "nodemon": "./bin/nodemon.js" + }, + "engines": { + "node": ">=10" + }, + "repository": { + "type": "git", + "url": "https://github.com/remy/nodemon.git" + }, + "description": "Simple monitor script for use during development of a Node.js app.", + "keywords": [ + "cli", + "monitor", + "monitor", + "development", + "restart", + "autoload", + "reload", + "terminal" + ], + "license": "MIT", + "types": "./index.d.ts", + "main": "./lib/nodemon", + "scripts": { + "commitmsg": "commitlint -e", + "coverage": "istanbul cover _mocha -- --timeout 30000 --ui bdd --reporter list test/**/*.test.js", + "lint": "eslint lib/**/*.js", + "test": "npm run lint && npm run spec", + "spec": "for FILE in test/**/*.test.js; do echo $FILE; TEST=1 mocha --exit --timeout 30000 $FILE; if [ $? -ne 0 ]; then exit 1; fi; sleep 1; done", + "postspec": "npm run clean", + "clean": "rm -rf test/fixtures/test*.js test/fixtures/test*.md", + "web": "node web", + "semantic-release": "semantic-release", + "prepush": "npm run lint", + "killall": "ps auxww | grep node | grep -v grep | awk '{ print $2 }' | xargs kill -9" + }, + "devDependencies": { + "@commitlint/cli": "^11.0.0", + "@commitlint/config-conventional": "^11.0.0", + "async": "1.4.2", + "coffee-script": "~1.7.1", + "eslint": "^7.32.0", + "husky": "^7.0.4", + "mocha": "^2.5.3", + "nyc": "^15.1.0", + "proxyquire": "^1.8.0", + "semantic-release": "^18.0.0", + "should": "~4.0.0" + }, + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^4", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "version": "3.1.4", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } +} diff --git a/node_modules/normalize-path/LICENSE b/node_modules/normalize-path/LICENSE new file mode 100644 index 0000000..d32ab44 --- /dev/null +++ b/node_modules/normalize-path/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/normalize-path/README.md b/node_modules/normalize-path/README.md new file mode 100644 index 0000000..726d4d6 --- /dev/null +++ b/node_modules/normalize-path/README.md @@ -0,0 +1,127 @@ +# normalize-path [![NPM version](https://img.shields.io/npm/v/normalize-path.svg?style=flat)](https://www.npmjs.com/package/normalize-path) [![NPM monthly downloads](https://img.shields.io/npm/dm/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![NPM total downloads](https://img.shields.io/npm/dt/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/normalize-path.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/normalize-path) + +> Normalize slashes in a file path to be posix/unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes, unless disabled. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save normalize-path +``` + +## Usage + +```js +const normalize = require('normalize-path'); + +console.log(normalize('\\foo\\bar\\baz\\')); +//=> '/foo/bar/baz' +``` + +**win32 namespaces** + +```js +console.log(normalize('\\\\?\\UNC\\Server01\\user\\docs\\Letter.txt')); +//=> '//?/UNC/Server01/user/docs/Letter.txt' + +console.log(normalize('\\\\.\\CdRomX')); +//=> '//./CdRomX' +``` + +**Consecutive slashes** + +Condenses multiple consecutive forward slashes (except for leading slashes in win32 namespaces) to a single slash. + +```js +console.log(normalize('.//foo//bar///////baz/')); +//=> './foo/bar/baz' +``` + +### Trailing slashes + +By default trailing slashes are removed. Pass `false` as the last argument to disable this behavior and _**keep** trailing slashes_: + +```js +console.log(normalize('foo\\bar\\baz\\', false)); //=> 'foo/bar/baz/' +console.log(normalize('./foo/bar/baz/', false)); //=> './foo/bar/baz/' +``` + +## Release history + +### v3.0 + +No breaking changes in this release. + +* a check was added to ensure that [win32 namespaces](https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces) are handled properly by win32 `path.parse()` after a path has been normalized by this library. +* a minor optimization was made to simplify how the trailing separator was handled + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +Other useful path-related libraries: + +* [contains-path](https://www.npmjs.com/package/contains-path): Return true if a file path contains the given path. | [homepage](https://github.com/jonschlinkert/contains-path "Return true if a file path contains the given path.") +* [is-absolute](https://www.npmjs.com/package/is-absolute): Returns true if a file path is absolute. Does not rely on the path module… [more](https://github.com/jonschlinkert/is-absolute) | [homepage](https://github.com/jonschlinkert/is-absolute "Returns true if a file path is absolute. Does not rely on the path module and can be used as a polyfill for node.js native `path.isAbolute`.") +* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative "Returns `true` if the path appears to be relative.") +* [parse-filepath](https://www.npmjs.com/package/parse-filepath): Pollyfill for node.js `path.parse`, parses a filepath into an object. | [homepage](https://github.com/jonschlinkert/parse-filepath "Pollyfill for node.js `path.parse`, parses a filepath into an object.") +* [path-ends-with](https://www.npmjs.com/package/path-ends-with): Return `true` if a file path ends with the given string/suffix. | [homepage](https://github.com/jonschlinkert/path-ends-with "Return `true` if a file path ends with the given string/suffix.") +* [unixify](https://www.npmjs.com/package/unixify): Convert Windows file paths to unix paths. | [homepage](https://github.com/jonschlinkert/unixify "Convert Windows file paths to unix paths.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 35 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [phated](https://github.com/phated) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on April 19, 2018._ \ No newline at end of file diff --git a/node_modules/normalize-path/index.js b/node_modules/normalize-path/index.js new file mode 100644 index 0000000..6fac553 --- /dev/null +++ b/node_modules/normalize-path/index.js @@ -0,0 +1,35 @@ +/*! + * normalize-path + * + * Copyright (c) 2014-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +module.exports = function(path, stripTrailing) { + if (typeof path !== 'string') { + throw new TypeError('expected path to be a string'); + } + + if (path === '\\' || path === '/') return '/'; + + var len = path.length; + if (len <= 1) return path; + + // ensure that win32 namespaces has two leading slashes, so that the path is + // handled properly by the win32 version of path.parse() after being normalized + // https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces + var prefix = ''; + if (len > 4 && path[3] === '\\') { + var ch = path[2]; + if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') { + path = path.slice(2); + prefix = '//'; + } + } + + var segs = path.split(/[/\\]+/); + if (stripTrailing !== false && segs[segs.length - 1] === '') { + segs.pop(); + } + return prefix + segs.join('/'); +}; diff --git a/node_modules/normalize-path/package.json b/node_modules/normalize-path/package.json new file mode 100644 index 0000000..ad61098 --- /dev/null +++ b/node_modules/normalize-path/package.json @@ -0,0 +1,77 @@ +{ + "name": "normalize-path", + "description": "Normalize slashes in a file path to be posix/unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes, unless disabled.", + "version": "3.0.0", + "homepage": "https://github.com/jonschlinkert/normalize-path", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Blaine Bublitz (https://twitter.com/BlaineBublitz)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/normalize-path", + "bugs": { + "url": "https://github.com/jonschlinkert/normalize-path/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "minimist": "^1.2.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "absolute", + "backslash", + "delimiter", + "file", + "file-path", + "filepath", + "fix", + "forward", + "fp", + "fs", + "normalize", + "path", + "relative", + "separator", + "slash", + "slashes", + "trailing", + "unix", + "urix" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "description": "Other useful path-related libraries:", + "list": [ + "contains-path", + "is-absolute", + "is-relative", + "parse-filepath", + "path-ends-with", + "path-ends-with", + "unixify" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/pg-cloudflare/LICENSE b/node_modules/pg-cloudflare/LICENSE new file mode 100644 index 0000000..5c14056 --- /dev/null +++ b/node_modules/pg-cloudflare/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2021 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg-cloudflare/README.md b/node_modules/pg-cloudflare/README.md new file mode 100644 index 0000000..8496faa --- /dev/null +++ b/node_modules/pg-cloudflare/README.md @@ -0,0 +1,33 @@ +# pg-cloudflare + +A socket implementation that can run on Cloudflare Workers using native TCP connections. + +## install + +``` +npm i --save-dev pg-cloudflare +``` + +### license + +The MIT License (MIT) + +Copyright (c) 2023 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pg-cloudflare/dist/empty.d.ts b/node_modules/pg-cloudflare/dist/empty.d.ts new file mode 100644 index 0000000..d87c485 --- /dev/null +++ b/node_modules/pg-cloudflare/dist/empty.d.ts @@ -0,0 +1,2 @@ +declare const _default: {}; +export default _default; diff --git a/node_modules/pg-cloudflare/dist/empty.js b/node_modules/pg-cloudflare/dist/empty.js new file mode 100644 index 0000000..307f5af --- /dev/null +++ b/node_modules/pg-cloudflare/dist/empty.js @@ -0,0 +1,4 @@ +// This is an empty module that is served up when outside of a workerd environment +// See the `exports` field in package.json +export default {}; +//# sourceMappingURL=empty.js.map \ No newline at end of file diff --git a/node_modules/pg-cloudflare/dist/empty.js.map b/node_modules/pg-cloudflare/dist/empty.js.map new file mode 100644 index 0000000..2dcdda3 --- /dev/null +++ b/node_modules/pg-cloudflare/dist/empty.js.map @@ -0,0 +1 @@ +{"version":3,"file":"empty.js","sourceRoot":"","sources":["../src/empty.ts"],"names":[],"mappings":"AAAA,kFAAkF;AAClF,0CAA0C;AAC1C,eAAe,EAAE,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-cloudflare/dist/index.d.ts b/node_modules/pg-cloudflare/dist/index.d.ts new file mode 100644 index 0000000..a779d14 --- /dev/null +++ b/node_modules/pg-cloudflare/dist/index.d.ts @@ -0,0 +1,31 @@ +/// +/// +/// +import { TlsOptions } from 'cloudflare:sockets'; +import { EventEmitter } from 'events'; +/** + * Wrapper around the Cloudflare built-in socket that can be used by the `Connection`. + */ +export declare class CloudflareSocket extends EventEmitter { + readonly ssl: boolean; + writable: boolean; + destroyed: boolean; + private _upgrading; + private _upgraded; + private _cfSocket; + private _cfWriter; + private _cfReader; + constructor(ssl: boolean); + setNoDelay(): this; + setKeepAlive(): this; + ref(): this; + unref(): this; + connect(port: number, host: string, connectListener?: (...args: unknown[]) => void): Promise; + _listen(): Promise; + _listenOnce(): Promise; + write(data: Uint8Array | string, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): true | void; + end(data?: Buffer, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): this; + destroy(reason: string): this; + startTls(options: TlsOptions): void; + _addClosedHandler(): void; +} diff --git a/node_modules/pg-cloudflare/dist/index.js b/node_modules/pg-cloudflare/dist/index.js new file mode 100644 index 0000000..e2db57e --- /dev/null +++ b/node_modules/pg-cloudflare/dist/index.js @@ -0,0 +1,146 @@ +import { EventEmitter } from 'events'; +/** + * Wrapper around the Cloudflare built-in socket that can be used by the `Connection`. + */ +export class CloudflareSocket extends EventEmitter { + constructor(ssl) { + super(); + this.ssl = ssl; + this.writable = false; + this.destroyed = false; + this._upgrading = false; + this._upgraded = false; + this._cfSocket = null; + this._cfWriter = null; + this._cfReader = null; + } + setNoDelay() { + return this; + } + setKeepAlive() { + return this; + } + ref() { + return this; + } + unref() { + return this; + } + async connect(port, host, connectListener) { + try { + log('connecting'); + if (connectListener) + this.once('connect', connectListener); + const options = this.ssl ? { secureTransport: 'starttls' } : {}; + const { connect } = await import('cloudflare:sockets'); + this._cfSocket = connect(`${host}:${port}`, options); + this._cfWriter = this._cfSocket.writable.getWriter(); + this._addClosedHandler(); + this._cfReader = this._cfSocket.readable.getReader(); + if (this.ssl) { + this._listenOnce().catch((e) => this.emit('error', e)); + } + else { + this._listen().catch((e) => this.emit('error', e)); + } + await this._cfWriter.ready; + log('socket ready'); + this.writable = true; + this.emit('connect'); + return this; + } + catch (e) { + this.emit('error', e); + } + } + async _listen() { + while (true) { + log('awaiting receive from CF socket'); + const { done, value } = await this._cfReader.read(); + log('CF socket received:', done, value); + if (done) { + log('done'); + break; + } + this.emit('data', Buffer.from(value)); + } + } + async _listenOnce() { + log('awaiting first receive from CF socket'); + const { done, value } = await this._cfReader.read(); + log('First CF socket received:', done, value); + this.emit('data', Buffer.from(value)); + } + write(data, encoding = 'utf8', callback = () => { }) { + if (data.length === 0) + return callback(); + if (typeof data === 'string') + data = Buffer.from(data, encoding); + log('sending data direct:', data); + this._cfWriter.write(data).then(() => { + log('data sent'); + callback(); + }, (err) => { + log('send error', err); + callback(err); + }); + return true; + } + end(data = Buffer.alloc(0), encoding = 'utf8', callback = () => { }) { + log('ending CF socket'); + this.write(data, encoding, (err) => { + this._cfSocket.close(); + if (callback) + callback(err); + }); + return this; + } + destroy(reason) { + log('destroying CF socket', reason); + this.destroyed = true; + return this.end(); + } + startTls(options) { + if (this._upgraded) { + // Don't try to upgrade again. + this.emit('error', 'Cannot call `startTls()` more than once on a socket'); + return; + } + this._cfWriter.releaseLock(); + this._cfReader.releaseLock(); + this._upgrading = true; + this._cfSocket = this._cfSocket.startTls(options); + this._cfWriter = this._cfSocket.writable.getWriter(); + this._cfReader = this._cfSocket.readable.getReader(); + this._addClosedHandler(); + this._listen().catch((e) => this.emit('error', e)); + } + _addClosedHandler() { + this._cfSocket.closed.then(() => { + if (!this._upgrading) { + log('CF socket closed'); + this._cfSocket = null; + this.emit('close'); + } + else { + this._upgrading = false; + this._upgraded = true; + } + }).catch((e) => this.emit('error', e)); + } +} +const debug = false; +function dump(data) { + if (data instanceof Uint8Array || data instanceof ArrayBuffer) { + const hex = Buffer.from(data).toString('hex'); + const str = new TextDecoder().decode(data); + return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`; + } + else { + return data; + } +} +function log(...args) { + debug && console.log(...args.map(dump)); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pg-cloudflare/dist/index.js.map b/node_modules/pg-cloudflare/dist/index.js.map new file mode 100644 index 0000000..49bf37c --- /dev/null +++ b/node_modules/pg-cloudflare/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAA;AAErC;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,YAAY;IAUhD,YAAqB,GAAY;QAC/B,KAAK,EAAE,CAAA;QADY,QAAG,GAAH,GAAG,CAAS;QATjC,aAAQ,GAAG,KAAK,CAAA;QAChB,cAAS,GAAG,KAAK,CAAA;QAET,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QACjB,cAAS,GAAkB,IAAI,CAAA;QAC/B,cAAS,GAAuC,IAAI,CAAA;QACpD,cAAS,GAAuC,IAAI,CAAA;IAI5D,CAAC;IAED,UAAU;QACR,OAAO,IAAI,CAAA;IACb,CAAC;IACD,YAAY;QACV,OAAO,IAAI,CAAA;IACb,CAAC;IACD,GAAG;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IACD,KAAK;QACH,OAAO,IAAI,CAAA;IACb,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,IAAY,EAAE,IAAY,EAAE,eAA8C;QACtF,IAAI;YACF,GAAG,CAAC,YAAY,CAAC,CAAA;YACjB,IAAI,eAAe;gBAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,eAAe,CAAC,CAAA;YAE1D,MAAM,OAAO,GAAkB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,eAAe,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;YAC9E,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,CAAA;YACtD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,EAAE,OAAO,CAAC,CAAA;YACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;YAExB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,IAAI,CAAC,GAAG,EAAE;gBACZ,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACvD;iBAAM;gBACL,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACnD;YAED,MAAM,IAAI,CAAC,SAAU,CAAC,KAAK,CAAA;YAC3B,GAAG,CAAC,cAAc,CAAC,CAAA;YACnB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;YAEpB,OAAO,IAAI,CAAA;SACZ;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;SACtB;IACH,CAAC;IAED,KAAK,CAAC,OAAO;QACX,OAAO,IAAI,EAAE;YACX,GAAG,CAAC,iCAAiC,CAAC,CAAA;YACtC,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;YACpD,GAAG,CAAC,qBAAqB,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACvC,IAAI,IAAI,EAAE;gBACR,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,MAAK;aACN;YACD,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;SACtC;IACH,CAAC;IAED,KAAK,CAAC,WAAW;QACf,GAAG,CAAC,uCAAuC,CAAC,CAAA;QAC5C,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;QACpD,GAAG,CAAC,2BAA2B,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QAC7C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IACvC,CAAC;IAED,KAAK,CACH,IAAyB,EACzB,WAA2B,MAAM,EACjC,WAAyC,GAAG,EAAE,GAAE,CAAC;QAEjD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,QAAQ,EAAE,CAAA;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ;YAAE,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;QAEhE,GAAG,CAAC,sBAAsB,EAAE,IAAI,CAAC,CAAA;QACjC,IAAI,CAAC,SAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,GAAG,EAAE;YACH,GAAG,CAAC,WAAW,CAAC,CAAA;YAChB,QAAQ,EAAE,CAAA;QACZ,CAAC,EACD,CAAC,GAAG,EAAE,EAAE;YACN,GAAG,CAAC,YAAY,EAAE,GAAG,CAAC,CAAA;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;QACf,CAAC,CACF,CAAA;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,WAA2B,MAAM,EAAE,WAAyC,GAAG,EAAE,GAAE,CAAC;QAC9G,GAAG,CAAC,kBAAkB,CAAC,CAAA;QACvB,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,EAAE;YACjC,IAAI,CAAC,SAAU,CAAC,KAAK,EAAE,CAAA;YACvB,IAAI,QAAQ;gBAAE,QAAQ,CAAC,GAAG,CAAC,CAAA;QAC7B,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAA;IACb,CAAC;IAED,OAAO,CAAC,MAAc;QACpB,GAAG,CAAC,sBAAsB,EAAE,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;QACrB,OAAO,IAAI,CAAC,GAAG,EAAE,CAAA;IACnB,CAAC;IAED,QAAQ,CAAC,OAAmB;QAC1B,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,8BAA8B;YAC9B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,qDAAqD,CAAC,CAAA;YACzE,OAAM;SACP;QACD,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAA;QACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;QACxB,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACpD,CAAC;IAED,iBAAiB;QACf,IAAI,CAAC,SAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;YAC/B,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;gBACpB,GAAG,CAAC,kBAAkB,CAAC,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;gBACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;iBAAM;gBACL,IAAI,CAAC,UAAU,GAAG,KAAK,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;aACtB;QACH,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;CACF;AAED,MAAM,KAAK,GAAG,KAAK,CAAA;AAEnB,SAAS,IAAI,CAAC,IAAa;IACzB,IAAI,IAAI,YAAY,UAAU,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7D,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;QAC7C,MAAM,GAAG,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QAC1C,OAAO,eAAe,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,eAAe,GAAG,IAAI,CAAA;KACtE;SAAM;QACL,OAAO,IAAI,CAAA;KACZ;AACH,CAAC;AAED,SAAS,GAAG,CAAC,GAAG,IAAe;IAC7B,KAAK,IAAI,OAAO,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAA;AACzC,CAAC"} \ No newline at end of file diff --git a/node_modules/pg-cloudflare/package.json b/node_modules/pg-cloudflare/package.json new file mode 100644 index 0000000..d0ae9f4 --- /dev/null +++ b/node_modules/pg-cloudflare/package.json @@ -0,0 +1,32 @@ +{ + "name": "pg-cloudflare", + "version": "1.1.1", + "description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.", + "main": "dist/empty.js", + "types": "dist/index.d.ts", + "license": "MIT", + "devDependencies": { + "ts-node": "^8.5.4", + "typescript": "^4.0.3" + }, + "exports": { + "workerd": "./dist/index.js", + "default": "./dist/empty.js" + }, + "scripts": { + "build": "tsc", + "build:watch": "tsc --watch", + "prepublish": "yarn build", + "test": "echo e2e test in pg package" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-cloudflare" + }, + "files": [ + "/dist/*{js,ts,map}", + "/src" + ], + "gitHead": "eaafac36dc8f4a13f1fecc9e3420d35559fd8e2b" +} diff --git a/node_modules/pg-cloudflare/src/empty.ts b/node_modules/pg-cloudflare/src/empty.ts new file mode 100644 index 0000000..f1e6740 --- /dev/null +++ b/node_modules/pg-cloudflare/src/empty.ts @@ -0,0 +1,3 @@ +// This is an empty module that is served up when outside of a workerd environment +// See the `exports` field in package.json +export default {} diff --git a/node_modules/pg-cloudflare/src/index.ts b/node_modules/pg-cloudflare/src/index.ts new file mode 100644 index 0000000..98dfc35 --- /dev/null +++ b/node_modules/pg-cloudflare/src/index.ts @@ -0,0 +1,164 @@ +import { SocketOptions, Socket, TlsOptions } from 'cloudflare:sockets' +import { EventEmitter } from 'events' + +/** + * Wrapper around the Cloudflare built-in socket that can be used by the `Connection`. + */ +export class CloudflareSocket extends EventEmitter { + writable = false + destroyed = false + + private _upgrading = false + private _upgraded = false + private _cfSocket: Socket | null = null + private _cfWriter: WritableStreamDefaultWriter | null = null + private _cfReader: ReadableStreamDefaultReader | null = null + + constructor(readonly ssl: boolean) { + super() + } + + setNoDelay() { + return this + } + setKeepAlive() { + return this + } + ref() { + return this + } + unref() { + return this + } + + async connect(port: number, host: string, connectListener?: (...args: unknown[]) => void) { + try { + log('connecting') + if (connectListener) this.once('connect', connectListener) + + const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {} + const { connect } = await import('cloudflare:sockets') + this._cfSocket = connect(`${host}:${port}`, options) + this._cfWriter = this._cfSocket.writable.getWriter() + this._addClosedHandler() + + this._cfReader = this._cfSocket.readable.getReader() + if (this.ssl) { + this._listenOnce().catch((e) => this.emit('error', e)) + } else { + this._listen().catch((e) => this.emit('error', e)) + } + + await this._cfWriter!.ready + log('socket ready') + this.writable = true + this.emit('connect') + + return this + } catch (e) { + this.emit('error', e) + } + } + + async _listen() { + while (true) { + log('awaiting receive from CF socket') + const { done, value } = await this._cfReader!.read() + log('CF socket received:', done, value) + if (done) { + log('done') + break + } + this.emit('data', Buffer.from(value)) + } + } + + async _listenOnce() { + log('awaiting first receive from CF socket') + const { done, value } = await this._cfReader!.read() + log('First CF socket received:', done, value) + this.emit('data', Buffer.from(value)) + } + + write( + data: Uint8Array | string, + encoding: BufferEncoding = 'utf8', + callback: (...args: unknown[]) => void = () => {} + ) { + if (data.length === 0) return callback() + if (typeof data === 'string') data = Buffer.from(data, encoding) + + log('sending data direct:', data) + this._cfWriter!.write(data).then( + () => { + log('data sent') + callback() + }, + (err) => { + log('send error', err) + callback(err) + } + ) + return true + } + + end(data = Buffer.alloc(0), encoding: BufferEncoding = 'utf8', callback: (...args: unknown[]) => void = () => {}) { + log('ending CF socket') + this.write(data, encoding, (err) => { + this._cfSocket!.close() + if (callback) callback(err) + }) + return this + } + + destroy(reason: string) { + log('destroying CF socket', reason) + this.destroyed = true + return this.end() + } + + startTls(options: TlsOptions) { + if (this._upgraded) { + // Don't try to upgrade again. + this.emit('error', 'Cannot call `startTls()` more than once on a socket') + return + } + this._cfWriter!.releaseLock() + this._cfReader!.releaseLock() + this._upgrading = true + this._cfSocket = this._cfSocket!.startTls(options) + this._cfWriter = this._cfSocket.writable.getWriter() + this._cfReader = this._cfSocket.readable.getReader() + this._addClosedHandler() + this._listen().catch((e) => this.emit('error', e)) + } + + _addClosedHandler() { + this._cfSocket!.closed.then(() => { + if (!this._upgrading) { + log('CF socket closed') + this._cfSocket = null + this.emit('close') + } else { + this._upgrading = false + this._upgraded = true + } + }).catch((e) => this.emit('error', e)) + } +} + +const debug = false + +function dump(data: unknown) { + if (data instanceof Uint8Array || data instanceof ArrayBuffer) { + const hex = Buffer.from(data).toString('hex') + const str = new TextDecoder().decode(data) + return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n` + } else { + return data + } +} + +function log(...args: unknown[]) { + debug && console.log(...args.map(dump)) +} diff --git a/node_modules/pg-cloudflare/src/types.d.ts b/node_modules/pg-cloudflare/src/types.d.ts new file mode 100644 index 0000000..f6f1c3f --- /dev/null +++ b/node_modules/pg-cloudflare/src/types.d.ts @@ -0,0 +1,25 @@ +declare module 'cloudflare:sockets' { + export class Socket { + public readonly readable: any + public readonly writable: any + public readonly closed: Promise + public close(): Promise + public startTls(options: TlsOptions): Socket + } + + export type TlsOptions = { + expectedServerHostname?: string + } + + export type SocketAddress = { + hostname: string + port: number + } + + export type SocketOptions = { + secureTransport?: 'off' | 'on' | 'starttls' + allowHalfOpen?: boolean + } + + export function connect(address: string | SocketAddress, options?: SocketOptions): Socket +} diff --git a/node_modules/pg-connection-string/LICENSE b/node_modules/pg-connection-string/LICENSE new file mode 100644 index 0000000..b068a6c --- /dev/null +++ b/node_modules/pg-connection-string/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Iced Development + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/node_modules/pg-connection-string/README.md b/node_modules/pg-connection-string/README.md new file mode 100644 index 0000000..360505e --- /dev/null +++ b/node_modules/pg-connection-string/README.md @@ -0,0 +1,77 @@ +pg-connection-string +==================== + +[![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/) + +[![Build Status](https://travis-ci.org/iceddev/pg-connection-string.svg?branch=master)](https://travis-ci.org/iceddev/pg-connection-string) +[![Coverage Status](https://coveralls.io/repos/github/iceddev/pg-connection-string/badge.svg?branch=master)](https://coveralls.io/github/iceddev/pg-connection-string?branch=master) + +Functions for dealing with a PostgresSQL connection string + +`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git) +Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +MIT License + +## Usage + +```js +var parse = require('pg-connection-string').parse; + +var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase') +``` + +The resulting config contains a subset of the following properties: + +* `host` - Postgres server hostname or, for UNIX domain sockets, the socket filename +* `port` - port on which to connect +* `user` - User with which to authenticate to the server +* `password` - Corresponding password +* `database` - Database name within the server +* `client_encoding` - string encoding the client will use +* `ssl`, either a boolean or an object with properties + * `rejectUnauthorized` + * `cert` + * `key` + * `ca` +* any other query parameters (for example, `application_name`) are preserved intact. + +## Connection Strings + +The short summary of acceptable URLs is: + + * `socket:?` - UNIX domain socket + * `postgres://:@:/?` - TCP connection + +But see below for more details. + +### UNIX Domain Sockets + +When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`. +This form can be shortened to just a path: `/var/run/pgsql`. + +When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`. + +Query parameters follow a `?` character, including the following special query parameters: + + * `db=` - sets the database name (urlencoded) + * `encoding=` - sets the `client_encoding` property + +### TCP Connections + +TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted). +If username and password are included, they should be urlencoded. +The database name, however, should *not* be urlencoded. + +Query parameters follow a `?` character, including the following special query parameters: + * `host=` - sets `host` property, overriding the URL's host + * `encoding=` - sets the `client_encoding` property + * `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly + * `sslmode=` + * `sslmode=disable` - sets `ssl` to false + * `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }` + * `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true + * `sslcert=` - reads data from the given file and includes the result as `ssl.cert` + * `sslkey=` - reads data from the given file and includes the result as `ssl.key` + * `sslrootcert=` - reads data from the given file and includes the result as `ssl.ca` + +A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty. diff --git a/node_modules/pg-connection-string/index.d.ts b/node_modules/pg-connection-string/index.d.ts new file mode 100644 index 0000000..3081270 --- /dev/null +++ b/node_modules/pg-connection-string/index.d.ts @@ -0,0 +1,15 @@ +export function parse(connectionString: string): ConnectionOptions + +export interface ConnectionOptions { + host: string | null + password?: string + user?: string + port?: string | null + database: string | null | undefined + client_encoding?: string + ssl?: boolean | string + + application_name?: string + fallback_application_name?: string + options?: string +} diff --git a/node_modules/pg-connection-string/index.js b/node_modules/pg-connection-string/index.js new file mode 100644 index 0000000..c7fc72a --- /dev/null +++ b/node_modules/pg-connection-string/index.js @@ -0,0 +1,112 @@ +'use strict' + +//Parse method copied from https://github.com/brianc/node-postgres +//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +//MIT License + +//parses a connection string +function parse(str) { + //unix socket + if (str.charAt(0) === '/') { + const config = str.split(' ') + return { host: config[0], database: config[1] } + } + + // Check for empty host in URL + + const config = {} + let result + let dummyHost = false + if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) { + // Ensure spaces are encoded as %20 + str = encodeURI(str).replace(/\%25(\d\d)/g, '%$1') + } + + try { + result = new URL(str, 'postgres://base') + } catch (e) { + // The URL is invalid so try again with a dummy host + result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base') + dummyHost = true + } + + // We'd like to use Object.fromEntries() here but Node.js 10 does not support it + for (const entry of result.searchParams.entries()) { + config[entry[0]] = entry[1] + } + + config.user = config.user || decodeURIComponent(result.username) + config.password = config.password || decodeURIComponent(result.password) + + if (result.protocol == 'socket:') { + config.host = decodeURI(result.pathname) + config.database = result.searchParams.get('db') + config.client_encoding = result.searchParams.get('encoding') + return config + } + const hostname = dummyHost ? '' : result.hostname + if (!config.host) { + // Only set the host if there is no equivalent query param. + config.host = decodeURIComponent(hostname) + } else if (hostname && /^%2f/i.test(hostname)) { + // Only prepend the hostname to the pathname if it is not a URL encoded Unix socket host. + result.pathname = hostname + result.pathname + } + if (!config.port) { + // Only set the port if there is no equivalent query param. + config.port = result.port + } + + const pathname = result.pathname.slice(1) || null + config.database = pathname ? decodeURI(pathname) : null + + if (config.ssl === 'true' || config.ssl === '1') { + config.ssl = true + } + + if (config.ssl === '0') { + config.ssl = false + } + + if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) { + config.ssl = {} + } + + // Only try to load fs if we expect to read from the disk + const fs = config.sslcert || config.sslkey || config.sslrootcert ? require('fs') : null + + if (config.sslcert) { + config.ssl.cert = fs.readFileSync(config.sslcert).toString() + } + + if (config.sslkey) { + config.ssl.key = fs.readFileSync(config.sslkey).toString() + } + + if (config.sslrootcert) { + config.ssl.ca = fs.readFileSync(config.sslrootcert).toString() + } + + switch (config.sslmode) { + case 'disable': { + config.ssl = false + break + } + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': { + break + } + case 'no-verify': { + config.ssl.rejectUnauthorized = false + break + } + } + + return config +} + +module.exports = parse + +parse.parse = parse diff --git a/node_modules/pg-connection-string/package.json b/node_modules/pg-connection-string/package.json new file mode 100644 index 0000000..3f012ff --- /dev/null +++ b/node_modules/pg-connection-string/package.json @@ -0,0 +1,40 @@ +{ + "name": "pg-connection-string", + "version": "2.6.4", + "description": "Functions for dealing with a PostgresSQL connection string", + "main": "./index.js", + "types": "./index.d.ts", + "scripts": { + "test": "istanbul cover _mocha && npm run check-coverage", + "check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100", + "coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-connection-string" + }, + "keywords": [ + "pg", + "connection", + "string", + "parse" + ], + "author": "Blaine Bublitz (http://iceddev.com/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/brianc/node-postgres/issues" + }, + "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string", + "devDependencies": { + "chai": "^4.1.1", + "coveralls": "^3.0.4", + "istanbul": "^0.4.5", + "mocha": "^7.1.2" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "gitHead": "a37a93bf7990220517a40cf16b8e72d4c3e6cef5" +} diff --git a/node_modules/pg-int8/LICENSE b/node_modules/pg-int8/LICENSE new file mode 100644 index 0000000..c56c973 --- /dev/null +++ b/node_modules/pg-int8/LICENSE @@ -0,0 +1,13 @@ +Copyright © 2017, Charmander <~@charmander.me> + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pg-int8/README.md b/node_modules/pg-int8/README.md new file mode 100644 index 0000000..ef2e608 --- /dev/null +++ b/node_modules/pg-int8/README.md @@ -0,0 +1,16 @@ +[![Build status][ci image]][ci] + +64-bit big-endian signed integer-to-string conversion designed for [pg][]. + +```js +const readInt8 = require('pg-int8'); + +readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7])) +// '283686952306183' +``` + + + [pg]: https://github.com/brianc/node-postgres + + [ci]: https://travis-ci.org/charmander/pg-int8 + [ci image]: https://api.travis-ci.org/charmander/pg-int8.svg diff --git a/node_modules/pg-int8/index.js b/node_modules/pg-int8/index.js new file mode 100644 index 0000000..db77975 --- /dev/null +++ b/node_modules/pg-int8/index.js @@ -0,0 +1,100 @@ +'use strict'; + +// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer +var BASE = 1000000; + +function readInt8(buffer) { + var high = buffer.readInt32BE(0); + var low = buffer.readUInt32BE(4); + var sign = ''; + + if (high < 0) { + high = ~high + (low === 0); + low = (~low + 1) >>> 0; + sign = '-'; + } + + var result = ''; + var carry; + var t; + var digits; + var pad; + var l; + var i; + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + t = 0x100000000 * carry + low; + digits = '' + t % BASE; + + return sign + digits + result; + } +} + +module.exports = readInt8; diff --git a/node_modules/pg-int8/package.json b/node_modules/pg-int8/package.json new file mode 100644 index 0000000..4b937e1 --- /dev/null +++ b/node_modules/pg-int8/package.json @@ -0,0 +1,24 @@ +{ + "name": "pg-int8", + "version": "1.0.1", + "description": "64-bit big-endian signed integer-to-string conversion", + "bugs": "https://github.com/charmander/pg-int8/issues", + "license": "ISC", + "files": [ + "index.js" + ], + "repository": { + "type": "git", + "url": "https://github.com/charmander/pg-int8" + }, + "scripts": { + "test": "tap test" + }, + "devDependencies": { + "@charmander/eslint-config-base": "1.0.2", + "tap": "10.7.3" + }, + "engines": { + "node": ">=4.0.0" + } +} diff --git a/node_modules/pg-pool/LICENSE b/node_modules/pg-pool/LICENSE new file mode 100644 index 0000000..4e90581 --- /dev/null +++ b/node_modules/pg-pool/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg-pool/README.md b/node_modules/pg-pool/README.md new file mode 100644 index 0000000..c78b89f --- /dev/null +++ b/node_modules/pg-pool/README.md @@ -0,0 +1,376 @@ +# pg-pool +[![Build Status](https://travis-ci.org/brianc/node-pg-pool.svg?branch=master)](https://travis-ci.org/brianc/node-pg-pool) + +A connection pool for node-postgres + +## install +```sh +npm i pg-pool pg +``` + +## use + +### create + +to use pg-pool you must first create an instance of a pool + +```js +var Pool = require('pg-pool') + +// by default the pool uses the same +// configuration as whatever `pg` version you have installed +var pool = new Pool() + +// you can pass properties to the pool +// these properties are passed unchanged to both the node-postgres Client constructor +// and the node-pool (https://github.com/coopernurse/node-pool) constructor +// allowing you to fully configure the behavior of both +var pool2 = new Pool({ + database: 'postgres', + user: 'brianc', + password: 'secret!', + port: 5432, + ssl: true, + max: 20, // set pool max size to 20 + idleTimeoutMillis: 1000, // close idle clients after 1 second + connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established + maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion) +}) + +//you can supply a custom client constructor +//if you want to use the native postgres client +var NativeClient = require('pg').native.Client +var nativePool = new Pool({ Client: NativeClient }) + +//you can even pool pg-native clients directly +var PgNativeClient = require('pg-native') +var pgNativePool = new Pool({ Client: PgNativeClient }) +``` + +##### Note: +The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL. + +```js +const Pool = require('pg-pool'); +const url = require('url') + +const params = url.parse(process.env.DATABASE_URL); +const auth = params.auth.split(':'); + +const config = { + user: auth[0], + password: auth[1], + host: params.hostname, + port: params.port, + database: params.pathname.split('/')[1], + ssl: true +}; + +const pool = new Pool(config); + +/* + Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into + config = { + user: 'DBuser', + password: 'secret', + host: 'DBHost', + port: '#####', + database: 'myDB', + ssl: true + } +*/ +``` + +### acquire clients with a promise + +pg-pool supports a fully promise-based api for acquiring clients + +```js +var pool = new Pool() +pool.connect().then(client => { + client.query('select $1::text as name', ['pg-pool']).then(res => { + client.release() + console.log('hello from', res.rows[0].name) + }) + .catch(e => { + client.release() + console.error('query error', e.message, e.stack) + }) +}) +``` + +### plays nice with async/await + +this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await: + +```js +// with async/await +(async () => { + var pool = new Pool() + var client = await pool.connect() + try { + var result = await client.query('select $1::text as name', ['brianc']) + console.log('hello from', result.rows[0]) + } finally { + client.release() + } +})().catch(e => console.error(e.message, e.stack)) + +// with co +co(function * () { + var client = yield pool.connect() + try { + var result = yield client.query('select $1::text as name', ['brianc']) + console.log('hello from', result.rows[0]) + } finally { + client.release() + } +}).catch(e => console.error(e.message, e.stack)) +``` + +### your new favorite helper method + +because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in: + +```js +var pool = new Pool() +var time = await pool.query('SELECT NOW()') +var name = await pool.query('select $1::text as name', ['brianc']) +console.log(name.rows[0].name, 'says hello at', time.rows[0].now) +``` + +you can also use a callback here if you'd like: + +```js +var pool = new Pool() +pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + console.log(res.rows[0].name) // brianc +}) +``` + +__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you +have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor) +you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return +clients back to the pool after the query is done. + +### drop-in backwards compatible + +pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years: + +```js +var pool = new Pool() +pool.connect((err, client, done) => { + if (err) return done(err) + + client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => { + done() + if (err) { + return console.error('query error', err.message, err.stack) + } + console.log('hello from', res.rows[0].name) + }) +}) +``` + +### shut it down + +When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app +will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows: + +```js +var pool = new Pool() +var client = await pool.connect() +console.log(await client.query('select now()')) +client.release() +await pool.end() +``` + +### a note on instances + +The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example: + +```js +// assume this is a file in your program at ./your-app/lib/db.js + +// correct usage: create the pool and let it live +// 'globally' here, controlling access to it through exported methods +var pool = new pg.Pool() + +// this is the right way to export the query method +module.exports.query = (text, values) => { + console.log('query:', text, values) + return pool.query(text, values) +} + +// this would be the WRONG way to export the connect method +module.exports.connect = () => { + // notice how we would be creating a pool instance here + // every time we called 'connect' to get a new client? + // that's a bad thing & results in creating an unbounded + // number of pools & therefore connections + var aPool = new pg.Pool() + return aPool.connect() +} +``` + +### events + +Every instance of a `Pool` is an event emitter. These instances emit the following events: + +#### error + +Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients. + +Example: + +```js +const Pool = require('pg-pool') +const pool = new Pool() + +// attach an error handler to the pool for when a connected, idle client +// receives an error by being disconnected, etc +pool.on('error', function(error, client) { + // handle this in the same way you would treat process.on('uncaughtException') + // it is supplied the error as well as the idle client which received the error +}) +``` + +#### connect + +Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend. + +Example: + +```js +const Pool = require('pg-pool') +const pool = new Pool() + +var count = 0 + +pool.on('connect', client => { + client.count = count++ +}) + +pool + .connect() + .then(client => { + return client + .query('SELECT $1::int AS "clientCount"', [client.count]) + .then(res => console.log(res.rows[0].clientCount)) // outputs 0 + .then(() => client) + }) + .then(client => client.release()) + +``` + +#### acquire + +Fired whenever the a client is acquired from the pool + +Example: + +This allows you to count the number of clients which have ever been acquired from the pool. + +```js +var Pool = require('pg-pool') +var pool = new Pool() + +var acquireCount = 0 +pool.on('acquire', function (client) { + acquireCount++ +}) + +var connectCount = 0 +pool.on('connect', function () { + connectCount++ +}) + +for (var i = 0; i < 200; i++) { + pool.query('SELECT NOW()') +} + +setTimeout(function () { + console.log('connect count:', connectCount) // output: connect count: 10 + console.log('acquire count:', acquireCount) // output: acquire count: 200 +}, 100) + +``` + +### environment variables + +pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are: + +``` +PGDATABASE=my_db +PGUSER=username +PGPASSWORD="my awesome password" +PGPORT=5432 +PGSSLMODE=require +``` + +Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box. + +## bring your own promise + +In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this: + +```js +// first run `npm install promise-polyfill --save +if (typeof Promise == 'undefined') { + global.Promise = require('promise-polyfill') +} +``` + +You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level: + +```js +var bluebirdPool = new Pool({ + Promise: require('bluebird') +}) +``` + +__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own." + +## maxUses and read-replica autoscaling (e.g. AWS Aurora) + +The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections. + +The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing. + +Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections. + +If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas. + +This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance. + +You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value: + +``` +maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize +``` + +In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window: + +``` +maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize + 7200 = 1800 * 1000 / 10 / 25 +``` + +## tests + +To run tests clone the repo, `npm i` in the working dir, and then run `npm test` + +## contributions + +I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there. + +## license + +The MIT License (MIT) +Copyright (c) 2016 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pg-pool/index.js b/node_modules/pg-pool/index.js new file mode 100644 index 0000000..94004e0 --- /dev/null +++ b/node_modules/pg-pool/index.js @@ -0,0 +1,467 @@ +'use strict' +const EventEmitter = require('events').EventEmitter + +const NOOP = function () {} + +const removeWhere = (list, predicate) => { + const i = list.findIndex(predicate) + + return i === -1 ? undefined : list.splice(i, 1)[0] +} + +class IdleItem { + constructor(client, idleListener, timeoutId) { + this.client = client + this.idleListener = idleListener + this.timeoutId = timeoutId + } +} + +class PendingItem { + constructor(callback) { + this.callback = callback + } +} + +function throwOnDoubleRelease() { + throw new Error('Release called on client which has already been released to the pool.') +} + +function promisify(Promise, callback) { + if (callback) { + return { callback: callback, result: undefined } + } + let rej + let res + const cb = function (err, client) { + err ? rej(err) : res(client) + } + const result = new Promise(function (resolve, reject) { + res = resolve + rej = reject + }).catch((err) => { + // replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the + // application that created the query + Error.captureStackTrace(err) + throw err + }) + return { callback: cb, result: result } +} + +function makeIdleListener(pool, client) { + return function idleListener(err) { + err.client = client + + client.removeListener('error', idleListener) + client.on('error', () => { + pool.log('additional client error after disconnection due to error', err) + }) + pool._remove(client) + // TODO - document that once the pool emits an error + // the client has already been closed & purged and is unusable + pool.emit('error', err, client) + } +} + +class Pool extends EventEmitter { + constructor(options, Client) { + super() + this.options = Object.assign({}, options) + + if (options != null && 'password' in options) { + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: options.password, + }) + } + if (options != null && options.ssl && options.ssl.key) { + // "hiding" the ssl->key so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options.ssl, 'key', { + enumerable: false, + }) + } + + this.options.max = this.options.max || this.options.poolSize || 10 + this.options.maxUses = this.options.maxUses || Infinity + this.options.allowExitOnIdle = this.options.allowExitOnIdle || false + this.options.maxLifetimeSeconds = this.options.maxLifetimeSeconds || 0 + this.log = this.options.log || function () {} + this.Client = this.options.Client || Client || require('pg').Client + this.Promise = this.options.Promise || global.Promise + + if (typeof this.options.idleTimeoutMillis === 'undefined') { + this.options.idleTimeoutMillis = 10000 + } + + this._clients = [] + this._idle = [] + this._expired = new WeakSet() + this._pendingQueue = [] + this._endCallback = undefined + this.ending = false + this.ended = false + } + + _isFull() { + return this._clients.length >= this.options.max + } + + _pulseQueue() { + this.log('pulse queue') + if (this.ended) { + this.log('pulse queue ended') + return + } + if (this.ending) { + this.log('pulse queue on ending') + if (this._idle.length) { + this._idle.slice().map((item) => { + this._remove(item.client) + }) + } + if (!this._clients.length) { + this.ended = true + this._endCallback() + } + return + } + + // if we don't have any waiting, do nothing + if (!this._pendingQueue.length) { + this.log('no queued requests') + return + } + // if we don't have any idle clients and we have no more room do nothing + if (!this._idle.length && this._isFull()) { + return + } + const pendingItem = this._pendingQueue.shift() + if (this._idle.length) { + const idleItem = this._idle.pop() + clearTimeout(idleItem.timeoutId) + const client = idleItem.client + client.ref && client.ref() + const idleListener = idleItem.idleListener + + return this._acquireClient(client, pendingItem, idleListener, false) + } + if (!this._isFull()) { + return this.newClient(pendingItem) + } + throw new Error('unexpected condition') + } + + _remove(client) { + const removed = removeWhere(this._idle, (item) => item.client === client) + + if (removed !== undefined) { + clearTimeout(removed.timeoutId) + } + + this._clients = this._clients.filter((c) => c !== client) + client.end() + this.emit('remove', client) + } + + connect(cb) { + if (this.ending) { + const err = new Error('Cannot use a pool after calling end on the pool') + return cb ? cb(err) : this.Promise.reject(err) + } + + const response = promisify(this.Promise, cb) + const result = response.result + + // if we don't have to connect a new client, don't do so + if (this._isFull() || this._idle.length) { + // if we have idle clients schedule a pulse immediately + if (this._idle.length) { + process.nextTick(() => this._pulseQueue()) + } + + if (!this.options.connectionTimeoutMillis) { + this._pendingQueue.push(new PendingItem(response.callback)) + return result + } + + const queueCallback = (err, res, done) => { + clearTimeout(tid) + response.callback(err, res, done) + } + + const pendingItem = new PendingItem(queueCallback) + + // set connection timeout on checking out an existing client + const tid = setTimeout(() => { + // remove the callback from pending waiters because + // we're going to call it with a timeout error + removeWhere(this._pendingQueue, (i) => i.callback === queueCallback) + pendingItem.timedOut = true + response.callback(new Error('timeout exceeded when trying to connect')) + }, this.options.connectionTimeoutMillis) + + this._pendingQueue.push(pendingItem) + return result + } + + this.newClient(new PendingItem(response.callback)) + + return result + } + + newClient(pendingItem) { + const client = new this.Client(this.options) + this._clients.push(client) + const idleListener = makeIdleListener(this, client) + + this.log('checking client timeout') + + // connection timeout logic + let tid + let timeoutHit = false + if (this.options.connectionTimeoutMillis) { + tid = setTimeout(() => { + this.log('ending client due to timeout') + timeoutHit = true + // force kill the node driver, and let libpq do its teardown + client.connection ? client.connection.stream.destroy() : client.end() + }, this.options.connectionTimeoutMillis) + } + + this.log('connecting new client') + client.connect((err) => { + if (tid) { + clearTimeout(tid) + } + client.on('error', idleListener) + if (err) { + this.log('client failed to connect', err) + // remove the dead client from our list of clients + this._clients = this._clients.filter((c) => c !== client) + if (timeoutHit) { + err.message = 'Connection terminated due to connection timeout' + } + + // this client won’t be released, so move on immediately + this._pulseQueue() + + if (!pendingItem.timedOut) { + pendingItem.callback(err, undefined, NOOP) + } + } else { + this.log('new client connected') + + if (this.options.maxLifetimeSeconds !== 0) { + const maxLifetimeTimeout = setTimeout(() => { + this.log('ending client due to expired lifetime') + this._expired.add(client) + const idleIndex = this._idle.findIndex((idleItem) => idleItem.client === client) + if (idleIndex !== -1) { + this._acquireClient( + client, + new PendingItem((err, client, clientRelease) => clientRelease()), + idleListener, + false + ) + } + }, this.options.maxLifetimeSeconds * 1000) + + maxLifetimeTimeout.unref() + client.once('end', () => clearTimeout(maxLifetimeTimeout)) + } + + return this._acquireClient(client, pendingItem, idleListener, true) + } + }) + } + + // acquire a client for a pending work item + _acquireClient(client, pendingItem, idleListener, isNew) { + if (isNew) { + this.emit('connect', client) + } + + this.emit('acquire', client) + + client.release = this._releaseOnce(client, idleListener) + + client.removeListener('error', idleListener) + + if (!pendingItem.timedOut) { + if (isNew && this.options.verify) { + this.options.verify(client, (err) => { + if (err) { + client.release(err) + return pendingItem.callback(err, undefined, NOOP) + } + + pendingItem.callback(undefined, client, client.release) + }) + } else { + pendingItem.callback(undefined, client, client.release) + } + } else { + if (isNew && this.options.verify) { + this.options.verify(client, client.release) + } else { + client.release() + } + } + } + + // returns a function that wraps _release and throws if called more than once + _releaseOnce(client, idleListener) { + let released = false + + return (err) => { + if (released) { + throwOnDoubleRelease() + } + + released = true + this._release(client, idleListener, err) + } + } + + // release a client back to the poll, include an error + // to remove it from the pool + _release(client, idleListener, err) { + client.on('error', idleListener) + + client._poolUseCount = (client._poolUseCount || 0) + 1 + + this.emit('release', err, client) + + // TODO(bmc): expose a proper, public interface _queryable and _ending + if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) { + if (client._poolUseCount >= this.options.maxUses) { + this.log('remove expended client') + } + this._remove(client) + this._pulseQueue() + return + } + + const isExpired = this._expired.has(client) + if (isExpired) { + this.log('remove expired client') + this._expired.delete(client) + this._remove(client) + this._pulseQueue() + return + } + + // idle timeout + let tid + if (this.options.idleTimeoutMillis) { + tid = setTimeout(() => { + this.log('remove idle client') + this._remove(client) + }, this.options.idleTimeoutMillis) + + if (this.options.allowExitOnIdle) { + // allow Node to exit if this is all that's left + tid.unref() + } + } + + if (this.options.allowExitOnIdle) { + client.unref() + } + + this._idle.push(new IdleItem(client, idleListener, tid)) + this._pulseQueue() + } + + query(text, values, cb) { + // guard clause against passing a function as the first parameter + if (typeof text === 'function') { + const response = promisify(this.Promise, text) + setImmediate(function () { + return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported')) + }) + return response.result + } + + // allow plain text query without values + if (typeof values === 'function') { + cb = values + values = undefined + } + const response = promisify(this.Promise, cb) + cb = response.callback + + this.connect((err, client) => { + if (err) { + return cb(err) + } + + let clientReleased = false + const onError = (err) => { + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + cb(err) + } + + client.once('error', onError) + this.log('dispatching query') + try { + client.query(text, values, (err, res) => { + this.log('query dispatched') + client.removeListener('error', onError) + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + if (err) { + return cb(err) + } + return cb(undefined, res) + }) + } catch (err) { + client.release(err) + return cb(err) + } + }) + return response.result + } + + end(cb) { + this.log('ending') + if (this.ending) { + const err = new Error('Called end on pool more than once') + return cb ? cb(err) : this.Promise.reject(err) + } + this.ending = true + const promised = promisify(this.Promise, cb) + this._endCallback = promised.callback + this._pulseQueue() + return promised.result + } + + get waitingCount() { + return this._pendingQueue.length + } + + get idleCount() { + return this._idle.length + } + + get expiredCount() { + return this._clients.reduce((acc, client) => acc + (this._expired.has(client) ? 1 : 0), 0) + } + + get totalCount() { + return this._clients.length + } +} +module.exports = Pool diff --git a/node_modules/pg-pool/package.json b/node_modules/pg-pool/package.json new file mode 100644 index 0000000..b587407 --- /dev/null +++ b/node_modules/pg-pool/package.json @@ -0,0 +1,41 @@ +{ + "name": "pg-pool", + "version": "3.6.2", + "description": "Connection pool for node-postgres", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": " node_modules/.bin/mocha" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-pool" + }, + "keywords": [ + "pg", + "postgres", + "pool", + "database" + ], + "author": "Brian M. Carlson", + "license": "MIT", + "bugs": { + "url": "https://github.com/brianc/node-pg-pool/issues" + }, + "homepage": "https://github.com/brianc/node-pg-pool#readme", + "devDependencies": { + "bluebird": "3.4.1", + "co": "4.6.0", + "expect.js": "0.3.1", + "lodash": "^4.17.11", + "mocha": "^7.1.2", + "pg-cursor": "^1.3.0" + }, + "peerDependencies": { + "pg": ">=8.0" + }, + "gitHead": "b03c071d2d15af259e1e008e9628191c865e58fa" +} diff --git a/node_modules/pg-pool/test/bring-your-own-promise.js b/node_modules/pg-pool/test/bring-your-own-promise.js new file mode 100644 index 0000000..e905ccc --- /dev/null +++ b/node_modules/pg-pool/test/bring-your-own-promise.js @@ -0,0 +1,42 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const BluebirdPromise = require('bluebird') + +const Pool = require('../') + +const checkType = (promise) => { + expect(promise).to.be.a(BluebirdPromise) + return promise.catch((e) => undefined) +} + +describe('Bring your own promise', function () { + it( + 'uses supplied promise for operations', + co.wrap(function* () { + const pool = new Pool({ Promise: BluebirdPromise }) + const client1 = yield checkType(pool.connect()) + client1.release() + yield checkType(pool.query('SELECT NOW()')) + const client2 = yield checkType(pool.connect()) + // TODO - make sure pg supports BYOP as well + client2.release() + yield checkType(pool.end()) + }) + ) + + it( + 'uses promises in errors', + co.wrap(function* () { + const pool = new Pool({ Promise: BluebirdPromise, port: 48484 }) + yield checkType(pool.connect()) + yield checkType(pool.end()) + yield checkType(pool.connect()) + yield checkType(pool.query()) + yield checkType(pool.end()) + }) + ) +}) diff --git a/node_modules/pg-pool/test/connection-strings.js b/node_modules/pg-pool/test/connection-strings.js new file mode 100644 index 0000000..de45830 --- /dev/null +++ b/node_modules/pg-pool/test/connection-strings.js @@ -0,0 +1,29 @@ +const expect = require('expect.js') +const describe = require('mocha').describe +const it = require('mocha').it +const Pool = require('../') + +describe('Connection strings', function () { + it('pool delegates connectionString property to client', function (done) { + const connectionString = 'postgres://foo:bar@baz:1234/xur' + + const pool = new Pool({ + // use a fake client so we can check we're passed the connectionString + Client: function (args) { + expect(args.connectionString).to.equal(connectionString) + return { + connect: function (cb) { + cb(new Error('testing')) + }, + on: function () {}, + } + }, + connectionString: connectionString, + }) + + pool.connect(function (err, client) { + expect(err).to.not.be(undefined) + done() + }) + }) +}) diff --git a/node_modules/pg-pool/test/connection-timeout.js b/node_modules/pg-pool/test/connection-timeout.js new file mode 100644 index 0000000..05e8931 --- /dev/null +++ b/node_modules/pg-pool/test/connection-timeout.js @@ -0,0 +1,229 @@ +'use strict' +const net = require('net') +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const before = require('mocha').before +const after = require('mocha').after + +const Pool = require('../') + +describe('connection timeout', () => { + const connectionFailure = new Error('Temporary connection failure') + + before((done) => { + this.server = net.createServer((socket) => { + socket.on('data', () => { + // discard any buffered data or the server wont terminate + }) + }) + + this.server.listen(() => { + this.port = this.server.address().port + done() + }) + }) + + after((done) => { + this.server.close(done) + }) + + it('should callback with an error if timeout is passed', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' }) + pool.connect((err, client, release) => { + expect(err).to.be.an(Error) + expect(err.message).to.contain('timeout') + expect(client).to.equal(undefined) + expect(pool.idleCount).to.equal(0) + done() + }) + }) + + it('should reject promise with an error if timeout is passed', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' }) + pool.connect().catch((err) => { + expect(err).to.be.an(Error) + expect(err.message).to.contain('timeout') + expect(pool.idleCount).to.equal(0) + done() + }) + }) + + it( + 'should handle multiple timeouts', + co.wrap( + function* () { + const errors = [] + const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' }) + for (var i = 0; i < 15; i++) { + try { + yield pool.connect() + } catch (e) { + errors.push(e) + } + } + expect(errors).to.have.length(15) + }.bind(this) + ) + ) + + it('should timeout on checkout of used connection', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + release() + pool.end(done) + }) + }) + }) + + it('should not break further pending checkouts on a timeout', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 200, max: 1 }) + pool.connect((err, client, releaseOuter) => { + expect(err).to.be(undefined) + + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + releaseOuter() + }) + + setTimeout(() => { + pool.connect((err, client, releaseInner) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + releaseInner() + pool.end(done) + }) + }, 100) + }) + }) + + it('should timeout on query if all clients are busy', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.query('select now()', (err, result) => { + expect(err).to.be.an(Error) + expect(result).to.be(undefined) + release() + pool.end(done) + }) + }) + }) + + it('should recover from timeout errors', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.query('select now()', (err, result) => { + expect(err).to.be.an(Error) + expect(result).to.be(undefined) + release() + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + }) + + it('continues processing after a connection failure', (done) => { + const Client = require('pg').Client + const orgConnect = Client.prototype.connect + let called = false + + Client.prototype.connect = function (cb) { + // Simulate a failure on first call + if (!called) { + called = true + + return setTimeout(() => { + cb(connectionFailure) + }, 100) + } + // And pass-through the second call + orgConnect.call(this, cb) + } + + const pool = new Pool({ + Client: Client, + connectionTimeoutMillis: 1000, + max: 1, + }) + + pool.connect((err, client, release) => { + expect(err).to.be(connectionFailure) + + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + + it('releases newly connected clients if the queued already timed out', (done) => { + const Client = require('pg').Client + + const orgConnect = Client.prototype.connect + + let connection = 0 + + Client.prototype.connect = function (cb) { + // Simulate a failure on first call + if (connection === 0) { + connection++ + + return setTimeout(() => { + cb(connectionFailure) + }, 300) + } + + // And second connect taking > connection timeout + if (connection === 1) { + connection++ + + return setTimeout(() => { + orgConnect.call(this, cb) + }, 1000) + } + + orgConnect.call(this, cb) + } + + const pool = new Pool({ + Client: Client, + connectionTimeoutMillis: 1000, + max: 1, + }) + + // Direct connect + pool.connect((err, client, release) => { + expect(err).to.be(connectionFailure) + }) + + // Queued + let called = 0 + pool.connect((err, client, release) => { + // Verify the callback is only called once + expect(called++).to.be(0) + expect(err).to.be.an(Error) + + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) +}) diff --git a/node_modules/pg-pool/test/ending.js b/node_modules/pg-pool/test/ending.js new file mode 100644 index 0000000..e1839b4 --- /dev/null +++ b/node_modules/pg-pool/test/ending.js @@ -0,0 +1,40 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool ending', () => { + it('ends without being used', (done) => { + const pool = new Pool() + pool.end(done) + }) + + it('ends with a promise', () => { + return new Pool().end() + }) + + it( + 'ends with clients', + co.wrap(function* () { + const pool = new Pool() + const res = yield pool.query('SELECT $1::text as name', ['brianc']) + expect(res.rows[0].name).to.equal('brianc') + return pool.end() + }) + ) + + it( + 'allows client to finish', + co.wrap(function* () { + const pool = new Pool() + const query = pool.query('SELECT $1::text as name', ['brianc']) + yield pool.end() + const res = yield query + expect(res.rows[0].name).to.equal('brianc') + }) + ) +}) diff --git a/node_modules/pg-pool/test/error-handling.js b/node_modules/pg-pool/test/error-handling.js new file mode 100644 index 0000000..7b15708 --- /dev/null +++ b/node_modules/pg-pool/test/error-handling.js @@ -0,0 +1,260 @@ +'use strict' +const net = require('net') +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool error handling', function () { + it('Should complete these queries without dying', function (done) { + const pool = new Pool() + let errors = 0 + let shouldGet = 0 + function runErrorQuery() { + shouldGet++ + return new Promise(function (resolve, reject) { + pool + .query("SELECT 'asd'+1 ") + .then(function (res) { + reject(res) // this should always error + }) + .catch(function (err) { + errors++ + resolve(err) + }) + }) + } + const ps = [] + for (let i = 0; i < 5; i++) { + ps.push(runErrorQuery()) + } + Promise.all(ps).then(function () { + expect(shouldGet).to.eql(errors) + pool.end(done) + }) + }) + + it('Catches errors in client.query', async function () { + let caught = false + const pool = new Pool() + try { + await pool.query(null) + } catch (e) { + caught = true + } + pool.end() + expect(caught).to.be(true) + }) + + describe('calling release more than once', () => { + it( + 'should throw each time', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + client.release() + expect(() => client.release()).to.throwError() + expect(() => client.release()).to.throwError() + return yield pool.end() + }) + ) + + it('should throw each time with callbacks', function (done) { + const pool = new Pool() + + pool.connect(function (err, client, clientDone) { + expect(err).not.to.be.an(Error) + clientDone() + + expect(() => clientDone()).to.throwError() + expect(() => clientDone()).to.throwError() + + pool.end(done) + }) + }) + }) + + describe('using an ended pool', () => { + it('rejects all additional promises', (done) => { + const pool = new Pool() + const promises = [] + pool.end().then(() => { + const squash = (promise) => promise.catch((e) => 'okay!') + promises.push(squash(pool.connect())) + promises.push(squash(pool.query('SELECT NOW()'))) + promises.push(squash(pool.end())) + Promise.all(promises).then((res) => { + expect(res).to.eql(['okay!', 'okay!', 'okay!']) + done() + }) + }) + }) + + it('returns an error on all additional callbacks', (done) => { + const pool = new Pool() + pool.end(() => { + pool.query('SELECT *', (err) => { + expect(err).to.be.an(Error) + pool.connect((err) => { + expect(err).to.be.an(Error) + pool.end((err) => { + expect(err).to.be.an(Error) + done() + }) + }) + }) + }) + }) + }) + + describe('error from idle client', () => { + it( + 'removes client from pool', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.waitingCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + client.release() + yield new Promise((resolve, reject) => { + process.nextTick(() => { + let poolError + pool.once('error', (err) => { + poolError = err + }) + + let clientError + client.once('error', (err) => { + clientError = err + }) + + client.emit('error', new Error('expected')) + + expect(clientError.message).to.equal('expected') + expect(poolError.message).to.equal('expected') + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + pool.end().then(resolve, reject) + }) + }) + }) + ) + }) + + describe('error from in-use client', () => { + it( + 'keeps the client in the pool', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.waitingCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + + yield new Promise((resolve, reject) => { + process.nextTick(() => { + let poolError + pool.once('error', (err) => { + poolError = err + }) + + let clientError + client.once('error', (err) => { + clientError = err + }) + + client.emit('error', new Error('expected')) + + expect(clientError.message).to.equal('expected') + expect(poolError).not.to.be.ok() + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + client.release() + pool.end().then(resolve, reject) + }) + }) + }) + ) + }) + + describe('passing a function to pool.query', () => { + it('calls back with error', (done) => { + const pool = new Pool() + console.log('passing fn to query') + pool.query((err) => { + expect(err).to.be.an(Error) + pool.end(done) + }) + }) + }) + + describe('pool with lots of errors', () => { + it( + 'continues to work and provide new clients', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + const errors = [] + for (var i = 0; i < 20; i++) { + try { + yield pool.query('invalid sql') + } catch (err) { + errors.push(err) + } + } + expect(errors).to.have.length(20) + expect(pool.idleCount).to.equal(0) + expect(pool.query).to.be.a(Function) + const res = yield pool.query('SELECT $1::text as name', ['brianc']) + expect(res.rows).to.have.length(1) + expect(res.rows[0].name).to.equal('brianc') + return pool.end() + }) + ) + }) + + it('should continue with queued items after a connection failure', (done) => { + const closeServer = net + .createServer((socket) => { + socket.destroy() + }) + .unref() + + closeServer.listen(() => { + const pool = new Pool({ max: 1, port: closeServer.address().port, host: 'localhost' }) + pool.connect((err) => { + expect(err).to.be.an(Error) + if (err.code) { + expect(err.code).to.be('ECONNRESET') + } + }) + pool.connect((err) => { + expect(err).to.be.an(Error) + if (err.code) { + expect(err.code).to.be('ECONNRESET') + } + closeServer.close(() => { + pool.end(done) + }) + }) + }) + }) + + it('handles post-checkout client failures in pool.query', (done) => { + const pool = new Pool({ max: 1 }) + pool.on('error', () => { + // We double close the connection in this test, prevent exception caused by that + }) + pool.query('SELECT pg_sleep(5)', [], (err) => { + expect(err).to.be.an(Error) + done() + }) + + setTimeout(() => { + pool._clients[0].end() + }, 1000) + }) +}) diff --git a/node_modules/pg-pool/test/events.js b/node_modules/pg-pool/test/events.js new file mode 100644 index 0000000..809c215 --- /dev/null +++ b/node_modules/pg-pool/test/events.js @@ -0,0 +1,124 @@ +'use strict' + +const expect = require('expect.js') +const EventEmitter = require('events').EventEmitter +const describe = require('mocha').describe +const it = require('mocha').it +const Pool = require('../') + +describe('events', function () { + it('emits connect before callback', function (done) { + const pool = new Pool() + let emittedClient = false + pool.on('connect', function (client) { + emittedClient = client + }) + + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + pool.end() + expect(client).to.be(emittedClient) + done() + }) + }) + + it('emits "connect" only with a successful connection', function () { + const pool = new Pool({ + // This client will always fail to connect + Client: mockClient({ + connect: function (cb) { + process.nextTick(() => { + cb(new Error('bad news')) + }) + }, + }), + }) + pool.on('connect', function () { + throw new Error('should never get here') + }) + return pool.connect().catch((e) => expect(e.message).to.equal('bad news')) + }) + + it('emits acquire every time a client is acquired', function (done) { + const pool = new Pool() + let acquireCount = 0 + pool.on('acquire', function (client) { + expect(client).to.be.ok() + acquireCount++ + }) + for (let i = 0; i < 10; i++) { + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + }) + pool.query('SELECT now()') + } + setTimeout(function () { + expect(acquireCount).to.be(20) + pool.end(done) + }, 100) + }) + + it('emits release every time a client is released', function (done) { + const pool = new Pool() + let releaseCount = 0 + pool.on('release', function (err, client) { + expect(err instanceof Error).not.to.be(true) + expect(client).to.be.ok() + releaseCount++ + }) + const promises = [] + for (let i = 0; i < 10; i++) { + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + }) + promises.push(pool.query('SELECT now()')) + } + Promise.all(promises).then(() => { + pool.end(() => { + expect(releaseCount).to.be(20) + done() + }) + }) + }) + + it('emits release with an error if client is released due to an error', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + expect(err).to.equal(undefined) + const releaseError = new Error('problem') + pool.once('release', function (err, errClient) { + expect(err).to.equal(releaseError) + expect(errClient).to.equal(client) + pool.end(done) + }) + release(releaseError) + }) + }) + + it('emits error and client if an idle client in the pool hits an error', function (done) { + const pool = new Pool() + pool.connect(function (err, client) { + expect(err).to.equal(undefined) + client.release() + setImmediate(function () { + client.emit('error', new Error('problem')) + }) + pool.once('error', function (err, errClient) { + expect(err.message).to.equal('problem') + expect(errClient).to.equal(client) + done() + }) + }) + }) +}) + +function mockClient(methods) { + return function () { + const client = new EventEmitter() + Object.assign(client, methods) + return client + } +} diff --git a/node_modules/pg-pool/test/idle-timeout-exit.js b/node_modules/pg-pool/test/idle-timeout-exit.js new file mode 100644 index 0000000..dbfccf3 --- /dev/null +++ b/node_modules/pg-pool/test/idle-timeout-exit.js @@ -0,0 +1,20 @@ +// This test is meant to be spawned from idle-timeout.js +if (module === require.main) { + const allowExitOnIdle = process.env.ALLOW_EXIT_ON_IDLE === '1' + const Pool = require('../index') + + const pool = new Pool({ + maxLifetimeSeconds: 2, + idleTimeoutMillis: 200, + ...(allowExitOnIdle ? { allowExitOnIdle: true } : {}), + }) + pool.query('SELECT NOW()', (err, res) => console.log('completed first')) + pool.on('remove', () => { + console.log('removed') + done() + }) + + setTimeout(() => { + pool.query('SELECT * from generate_series(0, 1000)', (err, res) => console.log('completed second')) + }, 50) +} diff --git a/node_modules/pg-pool/test/idle-timeout.js b/node_modules/pg-pool/test/idle-timeout.js new file mode 100644 index 0000000..0bb0975 --- /dev/null +++ b/node_modules/pg-pool/test/idle-timeout.js @@ -0,0 +1,118 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const { fork } = require('child_process') +const path = require('path') + +const Pool = require('../') + +const wait = (time) => new Promise((resolve) => setTimeout(resolve, time)) + +describe('idle timeout', () => { + it('should timeout and remove the client', (done) => { + const pool = new Pool({ idleTimeoutMillis: 10 }) + pool.query('SELECT NOW()') + pool.on('remove', () => { + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + done() + }) + }) + + it( + 'times out and removes clients when others are also removed', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 10 }) + const clientA = yield pool.connect() + const clientB = yield pool.connect() + clientA.release() + clientB.release(new Error()) + + const removal = new Promise((resolve) => { + pool.on('remove', () => { + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + resolve() + }) + }) + + const timeout = wait(100).then(() => Promise.reject(new Error('Idle timeout failed to occur'))) + + try { + yield Promise.race([removal, timeout]) + } finally { + pool.end() + } + }) + ) + + it( + 'can remove idle clients and recreate them', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 1 }) + const results = [] + for (var i = 0; i < 20; i++) { + let query = pool.query('SELECT NOW()') + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + results.push(yield query) + yield wait(2) + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + } + expect(results).to.have.length(20) + }) + ) + + it( + 'does not time out clients which are used', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 1 }) + const results = [] + for (var i = 0; i < 20; i++) { + let client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.idleCount).to.equal(0) + yield wait(10) + results.push(yield client.query('SELECT NOW()')) + client.release() + expect(pool.idleCount).to.equal(1) + expect(pool.totalCount).to.equal(1) + } + expect(results).to.have.length(20) + return pool.end() + }) + ) + + it('unrefs the connections and timeouts so the program can exit when idle when the allowExitOnIdle option is set', function (done) { + const child = fork(path.join(__dirname, 'idle-timeout-exit.js'), [], { + silent: true, + env: { ...process.env, ALLOW_EXIT_ON_IDLE: '1' }, + }) + let result = '' + child.stdout.setEncoding('utf8') + child.stdout.on('data', (chunk) => (result += chunk)) + child.on('error', (err) => done(err)) + child.on('close', () => { + expect(result).to.equal('completed first\ncompleted second\n') + done() + }) + }) + + it('keeps old behavior when allowExitOnIdle option is not set', function (done) { + const child = fork(path.join(__dirname, 'idle-timeout-exit.js'), [], { + silent: true, + }) + let result = '' + child.stdout.setEncoding('utf8') + child.stdout.on('data', (chunk) => (result += chunk)) + child.on('error', (err) => done(err)) + child.on('close', () => { + expect(result).to.equal('completed first\ncompleted second\nremoved\n') + done() + }) + }) +}) diff --git a/node_modules/pg-pool/test/index.js b/node_modules/pg-pool/test/index.js new file mode 100644 index 0000000..57a68e0 --- /dev/null +++ b/node_modules/pg-pool/test/index.js @@ -0,0 +1,226 @@ +'use strict' +const expect = require('expect.js') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool', function () { + describe('with callbacks', function () { + it('works totally unconfigured', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + if (err) return done(err) + client.query('SELECT NOW()', function (err, res) { + release() + if (err) return done(err) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + + it('passes props to clients', function (done) { + const pool = new Pool({ binary: true }) + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + expect(client.binary).to.eql(true) + pool.end(done) + }) + }) + + it('can run a query with a callback without parameters', function (done) { + const pool = new Pool() + pool.query('SELECT 1 as num', function (err, res) { + expect(res.rows[0]).to.eql({ num: 1 }) + pool.end(function () { + done(err) + }) + }) + }) + + it('can run a query with a callback', function (done) { + const pool = new Pool() + pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + expect(res.rows[0]).to.eql({ name: 'brianc' }) + pool.end(function () { + done(err) + }) + }) + }) + + it('passes connection errors to callback', function (done) { + const pool = new Pool({ port: 53922 }) + pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + expect(res).to.be(undefined) + expect(err).to.be.an(Error) + // a connection error should not polute the pool with a dead client + expect(pool.totalCount).to.equal(0) + pool.end(function (err) { + done(err) + }) + }) + }) + + it('does not pass client to error callback', function (done) { + const pool = new Pool({ port: 58242 }) + pool.connect(function (err, client, release) { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + expect(release).to.be.a(Function) + pool.end(done) + }) + }) + + it('removes client if it errors in background', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + client.testString = 'foo' + setTimeout(function () { + client.emit('error', new Error('on purpose')) + }, 10) + }) + pool.on('error', function (err) { + expect(err.message).to.be('on purpose') + expect(err.client).to.not.be(undefined) + expect(err.client.testString).to.be('foo') + err.client.connection.stream.on('end', function () { + pool.end(done) + }) + }) + }) + + it('should not change given options', function (done) { + const options = { max: 10 } + const pool = new Pool(options) + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + expect(options).to.eql({ max: 10 }) + pool.end(done) + }) + }) + + it('does not create promises when connecting', function (done) { + const pool = new Pool() + const returnValue = pool.connect(function (err, client, release) { + release() + if (err) return done(err) + pool.end(done) + }) + expect(returnValue).to.be(undefined) + }) + + it('does not create promises when querying', function (done) { + const pool = new Pool() + const returnValue = pool.query('SELECT 1 as num', function (err) { + pool.end(function () { + done(err) + }) + }) + expect(returnValue).to.be(undefined) + }) + + it('does not create promises when ending', function (done) { + const pool = new Pool() + const returnValue = pool.end(done) + expect(returnValue).to.be(undefined) + }) + + it('never calls callback syncronously', function (done) { + const pool = new Pool() + pool.connect((err, client) => { + if (err) throw err + client.release() + setImmediate(() => { + let called = false + pool.connect((err, client) => { + if (err) throw err + called = true + client.release() + setImmediate(() => { + pool.end(done) + }) + }) + expect(called).to.equal(false) + }) + }) + }) + }) + + describe('with promises', function () { + it('connects, queries, and disconnects', function () { + const pool = new Pool() + return pool.connect().then(function (client) { + return client.query('select $1::text as name', ['hi']).then(function (res) { + expect(res.rows).to.eql([{ name: 'hi' }]) + client.release() + return pool.end() + }) + }) + }) + + it('executes a query directly', () => { + const pool = new Pool() + return pool.query('SELECT $1::text as name', ['hi']).then((res) => { + expect(res.rows).to.have.length(1) + expect(res.rows[0].name).to.equal('hi') + return pool.end() + }) + }) + + it('properly pools clients', function () { + const pool = new Pool({ poolSize: 9 }) + const promises = _.times(30, function () { + return pool.connect().then(function (client) { + return client.query('select $1::text as name', ['hi']).then(function (res) { + client.release() + return res + }) + }) + }) + return Promise.all(promises).then(function (res) { + expect(res).to.have.length(30) + expect(pool.totalCount).to.be(9) + return pool.end() + }) + }) + + it('supports just running queries', function () { + const pool = new Pool({ poolSize: 9 }) + const text = 'select $1::text as name' + const values = ['hi'] + const query = { text: text, values: values } + const promises = _.times(30, () => pool.query(query)) + return Promise.all(promises).then(function (queries) { + expect(queries).to.have.length(30) + return pool.end() + }) + }) + + it('recovers from query errors', function () { + const pool = new Pool() + + const errors = [] + const promises = _.times(30, () => { + return pool.query('SELECT asldkfjasldkf').catch(function (e) { + errors.push(e) + }) + }) + return Promise.all(promises).then(() => { + expect(errors).to.have.length(30) + expect(pool.totalCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + return pool.query('SELECT $1::text as name', ['hi']).then(function (res) { + expect(res.rows).to.eql([{ name: 'hi' }]) + return pool.end() + }) + }) + }) + }) +}) diff --git a/node_modules/pg-pool/test/lifetime-timeout.js b/node_modules/pg-pool/test/lifetime-timeout.js new file mode 100644 index 0000000..3e69042 --- /dev/null +++ b/node_modules/pg-pool/test/lifetime-timeout.js @@ -0,0 +1,48 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const path = require('path') + +const Pool = require('../') + +describe('lifetime timeout', () => { + it('connection lifetime should expire and remove the client', (done) => { + const pool = new Pool({ maxLifetimeSeconds: 1 }) + pool.query('SELECT NOW()') + pool.on('remove', () => { + console.log('expired while idle - on-remove event') + expect(pool.expiredCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + done() + }) + }) + it('connection lifetime should expire and remove the client after the client is done working', (done) => { + const pool = new Pool({ maxLifetimeSeconds: 1 }) + pool.query('SELECT pg_sleep(1.4)') + pool.on('remove', () => { + console.log('expired while busy - on-remove event') + expect(pool.expiredCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + done() + }) + }) + it( + 'can remove expired clients and recreate them', + co.wrap(function* () { + const pool = new Pool({ maxLifetimeSeconds: 1 }) + let query = pool.query('SELECT pg_sleep(1.4)') + expect(pool.expiredCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + yield query + yield new Promise((resolve) => setTimeout(resolve, 100)) + expect(pool.expiredCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + yield pool.query('SELECT NOW()') + expect(pool.expiredCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + }) + ) +}) diff --git a/node_modules/pg-pool/test/logging.js b/node_modules/pg-pool/test/logging.js new file mode 100644 index 0000000..839603b --- /dev/null +++ b/node_modules/pg-pool/test/logging.js @@ -0,0 +1,20 @@ +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('logging', function () { + it('logs to supplied log function if given', function () { + const messages = [] + const log = function (msg) { + messages.push(msg) + } + const pool = new Pool({ log: log }) + return pool.query('SELECT NOW()').then(function () { + expect(messages.length).to.be.greaterThan(0) + return pool.end() + }) + }) +}) diff --git a/node_modules/pg-pool/test/max-uses.js b/node_modules/pg-pool/test/max-uses.js new file mode 100644 index 0000000..c94ddec --- /dev/null +++ b/node_modules/pg-pool/test/max-uses.js @@ -0,0 +1,98 @@ +const expect = require('expect.js') +const co = require('co') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('maxUses', () => { + it( + 'can create a single client and use it once', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const res = yield client.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + client.release() + pool.end() + }) + ) + + it( + 'getting a connection a second time returns the same connection and releasing it also closes it', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + client.release() + const client2 = yield pool.connect() + expect(client).to.equal(client2) + expect(client2._ending).to.equal(false) + client2.release() + expect(client2._ending).to.equal(true) + return yield pool.end() + }) + ) + + it( + 'getting a connection a third time returns a new connection', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + client.release() + const client2 = yield pool.connect() + expect(client).to.equal(client2) + client2.release() + const client3 = yield pool.connect() + expect(client3).not.to.equal(client2) + client3.release() + return yield pool.end() + }) + ) + + it( + 'getting a connection from a pending request gets a fresh client when the released candidate is expended', + co.wrap(function* () { + const pool = new Pool({ max: 1, maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client1 = yield pool.connect() + pool.connect().then((client2) => { + expect(client2).to.equal(client1) + expect(pool.waitingCount).to.equal(1) + // Releasing the client this time should also expend it since maxUses is 2, causing client3 to be a fresh client + client2.release() + }) + const client3Promise = pool.connect().then((client3) => { + // client3 should be a fresh client since client2's release caused the first client to be expended + expect(pool.waitingCount).to.equal(0) + expect(client3).not.to.equal(client1) + return client3.release() + }) + // There should be two pending requests since we have 3 connect requests but a max size of 1 + expect(pool.waitingCount).to.equal(2) + // Releasing the client should not yet expend it since maxUses is 2 + client1.release() + yield client3Promise + return yield pool.end() + }) + ) + + it( + 'logs when removing an expended client', + co.wrap(function* () { + const messages = [] + const log = function (msg) { + messages.push(msg) + } + const pool = new Pool({ maxUses: 1, log }) + const client = yield pool.connect() + client.release() + expect(messages).to.contain('remove expended client') + return yield pool.end() + }) + ) +}) diff --git a/node_modules/pg-pool/test/releasing-clients.js b/node_modules/pg-pool/test/releasing-clients.js new file mode 100644 index 0000000..da8e09c --- /dev/null +++ b/node_modules/pg-pool/test/releasing-clients.js @@ -0,0 +1,54 @@ +const Pool = require('../') + +const expect = require('expect.js') +const net = require('net') + +describe('releasing clients', () => { + it('removes a client which cannot be queried', async () => { + // make a pool w/ only 1 client + const pool = new Pool({ max: 1 }) + expect(pool.totalCount).to.eql(0) + const client = await pool.connect() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + // reach into the client and sever its connection + client.connection.end() + + // wait for the client to error out + const err = await new Promise((resolve) => client.once('error', resolve)) + expect(err).to.be.ok() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + + // try to return it to the pool - this removes it because its broken + client.release() + expect(pool.totalCount).to.eql(0) + expect(pool.idleCount).to.eql(0) + + // make sure pool still works + const { rows } = await pool.query('SELECT NOW()') + expect(rows).to.have.length(1) + await pool.end() + }) + + it('removes a client which is ending', async () => { + // make a pool w/ only 1 client + const pool = new Pool({ max: 1 }) + expect(pool.totalCount).to.eql(0) + const client = await pool.connect() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + // end the client gracefully (but you shouldn't do this with pooled clients) + client.end() + + // try to return it to the pool + client.release() + expect(pool.totalCount).to.eql(0) + expect(pool.idleCount).to.eql(0) + + // make sure pool still works + const { rows } = await pool.query('SELECT NOW()') + expect(rows).to.have.length(1) + await pool.end() + }) +}) diff --git a/node_modules/pg-pool/test/setup.js b/node_modules/pg-pool/test/setup.js new file mode 100644 index 0000000..811e956 --- /dev/null +++ b/node_modules/pg-pool/test/setup.js @@ -0,0 +1,10 @@ +const crash = (reason) => { + process.on(reason, (err) => { + console.error(reason, err.stack) + process.exit(-1) + }) +} + +crash('unhandledRejection') +crash('uncaughtError') +crash('warning') diff --git a/node_modules/pg-pool/test/sizing.js b/node_modules/pg-pool/test/sizing.js new file mode 100644 index 0000000..e7863ba --- /dev/null +++ b/node_modules/pg-pool/test/sizing.js @@ -0,0 +1,58 @@ +const expect = require('expect.js') +const co = require('co') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool size of 1', () => { + it( + 'can create a single client and use it once', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const res = yield client.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + client.release() + pool.end() + }) + ) + + it( + 'can create a single client and use it multiple times', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const wait = pool.connect() + expect(pool.waitingCount).to.equal(1) + client.release() + const client2 = yield wait + expect(client).to.equal(client2) + client2.release() + return yield pool.end() + }) + ) + + it( + 'can only send 1 query at a time', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + + // the query text column name changed in PostgreSQL 9.2 + const versionResult = yield pool.query('SHOW server_version_num') + const version = parseInt(versionResult.rows[0].server_version_num, 10) + const queryColumn = version < 90200 ? 'current_query' : 'query' + + const queryText = 'SELECT COUNT(*) as counts FROM pg_stat_activity WHERE ' + queryColumn + ' = $1' + const queries = _.times(20, () => pool.query(queryText, [queryText])) + const results = yield Promise.all(queries) + const counts = results.map((res) => parseInt(res.rows[0].counts, 10)) + expect(counts).to.eql(_.times(20, (i) => 1)) + return yield pool.end() + }) + ) +}) diff --git a/node_modules/pg-pool/test/submittable.js b/node_modules/pg-pool/test/submittable.js new file mode 100644 index 0000000..7a1574d --- /dev/null +++ b/node_modules/pg-pool/test/submittable.js @@ -0,0 +1,19 @@ +'use strict' +const Cursor = require('pg-cursor') +const expect = require('expect.js') +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('submittle', () => { + it('is returned from the query method', false, (done) => { + const pool = new Pool() + const cursor = pool.query(new Cursor('SELECT * from generate_series(0, 1000)')) + cursor.read((err, rows) => { + expect(err).to.be(undefined) + expect(!!rows).to.be.ok() + cursor.close(done) + }) + }) +}) diff --git a/node_modules/pg-pool/test/timeout.js b/node_modules/pg-pool/test/timeout.js new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/pg-pool/test/verify.js b/node_modules/pg-pool/test/verify.js new file mode 100644 index 0000000..9331e1a --- /dev/null +++ b/node_modules/pg-pool/test/verify.js @@ -0,0 +1,24 @@ +'use strict' +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('verify', () => { + it('verifies a client with a callback', (done) => { + const pool = new Pool({ + verify: (client, cb) => { + cb(new Error('nope')) + }, + }) + + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(err.message).to.be('nope') + pool.end() + done() + }) + }) +}) diff --git a/node_modules/pg-protocol/LICENSE b/node_modules/pg-protocol/LICENSE new file mode 100644 index 0000000..5c14056 --- /dev/null +++ b/node_modules/pg-protocol/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2021 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg-protocol/README.md b/node_modules/pg-protocol/README.md new file mode 100644 index 0000000..8c52e40 --- /dev/null +++ b/node_modules/pg-protocol/README.md @@ -0,0 +1,3 @@ +# pg-protocol + +Low level postgres wire protocol parser and serializer written in Typescript. Used by node-postgres. Needs more documentation. :smile: diff --git a/node_modules/pg-protocol/dist/b.d.ts b/node_modules/pg-protocol/dist/b.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/pg-protocol/dist/b.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/pg-protocol/dist/b.js b/node_modules/pg-protocol/dist/b.js new file mode 100644 index 0000000..5f5efb8 --- /dev/null +++ b/node_modules/pg-protocol/dist/b.js @@ -0,0 +1,25 @@ +"use strict"; +// file for microbenchmarking +Object.defineProperty(exports, "__esModule", { value: true }); +const buffer_writer_1 = require("./buffer-writer"); +const buffer_reader_1 = require("./buffer-reader"); +const LOOPS = 1000; +let count = 0; +let start = Date.now(); +const writer = new buffer_writer_1.Writer(); +const reader = new buffer_reader_1.BufferReader(); +const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]); +const run = () => { + if (count > LOOPS) { + console.log(Date.now() - start); + return; + } + count++; + for (let i = 0; i < LOOPS; i++) { + reader.setBuffer(0, buffer); + reader.cstring(); + } + setImmediate(run); +}; +run(); +//# sourceMappingURL=b.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/b.js.map b/node_modules/pg-protocol/dist/b.js.map new file mode 100644 index 0000000..cddd15e --- /dev/null +++ b/node_modules/pg-protocol/dist/b.js.map @@ -0,0 +1 @@ +{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAAwC;AAExC,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,IAAI,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;AACtB,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QAC/B,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-reader.d.ts b/node_modules/pg-protocol/dist/buffer-reader.d.ts new file mode 100644 index 0000000..8970d77 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-reader.d.ts @@ -0,0 +1,14 @@ +/// +export declare class BufferReader { + private offset; + private buffer; + private encoding; + constructor(offset?: number); + setBuffer(offset: number, buffer: Buffer): void; + int16(): number; + byte(): number; + int32(): number; + string(length: number): string; + cstring(): string; + bytes(length: number): Buffer; +} diff --git a/node_modules/pg-protocol/dist/buffer-reader.js b/node_modules/pg-protocol/dist/buffer-reader.js new file mode 100644 index 0000000..ef633b1 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-reader.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BufferReader = void 0; +const emptyBuffer = Buffer.allocUnsafe(0); +class BufferReader { + constructor(offset = 0) { + this.offset = offset; + this.buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding? + this.encoding = 'utf-8'; + } + setBuffer(offset, buffer) { + this.offset = offset; + this.buffer = buffer; + } + int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + string(length) { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); + this.offset += length; + return result; + } + cstring() { + const start = this.offset; + let end = start; + while (this.buffer[end++] !== 0) { } + this.offset = end; + return this.buffer.toString(this.encoding, start, end - 1); + } + bytes(length) { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result; + } +} +exports.BufferReader = BufferReader; +//# sourceMappingURL=buffer-reader.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-reader.js.map b/node_modules/pg-protocol/dist/buffer-reader.js.map new file mode 100644 index 0000000..04d5b1d --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-reader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAEzC,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,WAAW,CAAA;QAEpC,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlDD,oCAkDC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-writer.d.ts b/node_modules/pg-protocol/dist/buffer-writer.d.ts new file mode 100644 index 0000000..4ac41e6 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-writer.d.ts @@ -0,0 +1,16 @@ +/// +export declare class Writer { + private size; + private buffer; + private offset; + private headerPosition; + constructor(size?: number); + private ensure; + addInt32(num: number): Writer; + addInt16(num: number): Writer; + addCString(string: string): Writer; + addString(string?: string): Writer; + add(otherBuffer: Buffer): Writer; + private join; + flush(code?: number): Buffer; +} diff --git a/node_modules/pg-protocol/dist/buffer-writer.js b/node_modules/pg-protocol/dist/buffer-writer.js new file mode 100644 index 0000000..16fd616 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-writer.js @@ -0,0 +1,81 @@ +"use strict"; +//binary data writer tuned for encoding binary specific to the postgres binary protocol +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Writer = void 0; +class Writer { + constructor(size = 256) { + this.size = size; + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(size); + } + ensure(size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.allocUnsafe(newSize); + oldBuffer.copy(this.buffer); + } + } + addInt32(num) { + this.ensure(4); + this.buffer[this.offset++] = (num >>> 24) & 0xff; + this.buffer[this.offset++] = (num >>> 16) & 0xff; + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addInt16(num) { + this.ensure(2); + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addCString(string) { + if (!string) { + this.ensure(1); + } + else { + var len = Buffer.byteLength(string); + this.ensure(len + 1); // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8'); + this.offset += len; + } + this.buffer[this.offset++] = 0; // null terminator + return this; + } + addString(string = '') { + var len = Buffer.byteLength(string); + this.ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; + } + add(otherBuffer) { + this.ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; + } + join(code) { + if (code) { + this.buffer[this.headerPosition] = code; + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1); + this.buffer.writeInt32BE(length, this.headerPosition + 1); + } + return this.buffer.slice(code ? 0 : 5, this.offset); + } + flush(code) { + var result = this.join(code); + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(this.size); + return result; + } +} +exports.Writer = Writer; +//# sourceMappingURL=buffer-writer.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-writer.js.map b/node_modules/pg-protocol/dist/buffer-writer.js.map new file mode 100644 index 0000000..fc6e650 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-writer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAChD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC3B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YAC/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;YACzC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACnC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/inbound-parser.test.d.ts b/node_modules/pg-protocol/dist/inbound-parser.test.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/pg-protocol/dist/inbound-parser.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/pg-protocol/dist/inbound-parser.test.js b/node_modules/pg-protocol/dist/inbound-parser.test.js new file mode 100644 index 0000000..dbed7fa --- /dev/null +++ b/node_modules/pg-protocol/dist/inbound-parser.test.js @@ -0,0 +1,511 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const test_buffers_1 = __importDefault(require("./testing/test-buffers")); +const buffer_list_1 = __importDefault(require("./testing/buffer-list")); +const _1 = require("."); +const assert_1 = __importDefault(require("assert")); +const stream_1 = require("stream"); +var authOkBuffer = test_buffers_1.default.authenticationOk(); +var paramStatusBuffer = test_buffers_1.default.parameterStatus('client_encoding', 'UTF8'); +var readyForQueryBuffer = test_buffers_1.default.readyForQuery(); +var backendKeyDataBuffer = test_buffers_1.default.backendKeyData(1, 2); +var commandCompleteBuffer = test_buffers_1.default.commandComplete('SELECT 3'); +var parseCompleteBuffer = test_buffers_1.default.parseComplete(); +var bindCompleteBuffer = test_buffers_1.default.bindComplete(); +var portalSuspendedBuffer = test_buffers_1.default.portalSuspended(); +var addRow = function (bufferList, name, offset) { + return bufferList + .addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0); // format code, 0 => text +}; +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0, +}; +var oneRowDescBuff = test_buffers_1.default.rowDescription([row1]); +row1.name = 'bang'; +var twoRowBuf = test_buffers_1.default.rowDescription([ + row1, + { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0, + }, +]); +var emptyRowFieldBuf = new buffer_list_1.default().addInt16(0).join(true, 'D'); +var emptyRowFieldBuf = test_buffers_1.default.dataRow([]); +var oneFieldBuf = new buffer_list_1.default() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D'); +var oneFieldBuf = test_buffers_1.default.dataRow(['test']); +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8, +}; +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25, +}; +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2, +}; +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I', +}; +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3', +}; +var emptyRowDescriptionBuffer = new buffer_list_1.default() + .addInt16(0) // number of fields + .join(true, 'T'); +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +}; +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [ + { + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + ], +}; +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [ + { + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text', + }, + ], +}; +var emptyParameterDescriptionBuffer = new buffer_list_1.default() + .addInt16(0) // number of parameters + .join(true, 't'); +var oneParameterDescBuf = test_buffers_1.default.parameterDescription([1111]); +var twoParameterDescBuf = test_buffers_1.default.parameterDescription([2222, 3333]); +var expectedEmptyParameterDescriptionMessage = { + name: 'parameterDescription', + length: 6, + parameterCount: 0, + dataTypeIDs: [], +}; +var expectedOneParameterMessage = { + name: 'parameterDescription', + length: 10, + parameterCount: 1, + dataTypeIDs: [1111], +}; +var expectedTwoParameterMessage = { + name: 'parameterDescription', + length: 14, + parameterCount: 2, + dataTypeIDs: [2222, 3333], +}; +var testForMessage = function (buffer, expectedMessage) { + it('recieves and parses ' + expectedMessage.name, () => __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([buffer]); + const [lastMessage] = messages; + for (const key in expectedMessage) { + assert_1.default.deepEqual(lastMessage[key], expectedMessage[key]); + } + })); +}; +var plainPasswordBuffer = test_buffers_1.default.authenticationCleartextPassword(); +var md5PasswordBuffer = test_buffers_1.default.authenticationMD5Password(); +var SASLBuffer = test_buffers_1.default.authenticationSASL(); +var SASLContinueBuffer = test_buffers_1.default.authenticationSASLContinue(); +var SASLFinalBuffer = test_buffers_1.default.authenticationSASLFinal(); +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword', +}; +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]), +}; +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'], +}; +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +}; +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +}; +var notificationResponseBuffer = test_buffers_1.default.notification(4, 'hi', 'boom'); +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom', +}; +const parseBuffers = (buffers) => __awaiter(void 0, void 0, void 0, function* () { + const stream = new stream_1.PassThrough(); + for (const buffer of buffers) { + stream.write(buffer); + } + stream.end(); + const msgs = []; + yield (0, _1.parse)(stream, (msg) => msgs.push(msg)); + return msgs; +}); +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage); + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage); + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage); + testForMessage(SASLBuffer, expectedSASLMessage); + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage); + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]); + testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage); + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage); + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]); + testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage); + testForMessage(paramStatusBuffer, expectedParameterStatusMessage); + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage); + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage); + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage); + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage); + testForMessage(test_buffers_1.default.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }); + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData', + }); + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage); + testForMessage(oneRowDescBuff, expectedOneRowMessage); + testForMessage(twoRowBuf, expectedTwoRowMessage); + }); + describe('parameterDescription messages', function () { + testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage); + testForMessage(oneParameterDescBuf, expectedOneParameterMessage); + testForMessage(twoParameterDescBuf, expectedTwoParameterMessage); + }); + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0, + }); + }); + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'], + }); + }); + }); + describe('notice message', function () { + // this uses the same logic as error message + var buff = test_buffers_1.default.notice([{ type: 'C', value: 'code' }]); + testForMessage(buff, { + name: 'notice', + code: 'code', + }); + }); + testForMessage(test_buffers_1.default.error([]), { + name: 'error', + }); + describe('with all the fields', function () { + var buffer = test_buffers_1.default.error([ + { + type: 'S', + value: 'ERROR', + }, + { + type: 'C', + value: 'code', + }, + { + type: 'M', + value: 'message', + }, + { + type: 'D', + value: 'details', + }, + { + type: 'H', + value: 'hint', + }, + { + type: 'P', + value: '100', + }, + { + type: 'p', + value: '101', + }, + { + type: 'q', + value: 'query', + }, + { + type: 'W', + value: 'where', + }, + { + type: 'F', + value: 'file', + }, + { + type: 'L', + value: 'line', + }, + { + type: 'R', + value: 'routine', + }, + { + type: 'Z', + value: 'alsdkf', + }, + ]); + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine', + }); + }); + testForMessage(parseCompleteBuffer, { + name: 'parseComplete', + }); + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }); + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }); + testForMessage(test_buffers_1.default.closeComplete(), { + name: 'closeComplete', + }); + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended', + }); + }); + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4, + }); + }); + describe('copy', () => { + testForMessage(test_buffers_1.default.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [], + }); + testForMessage(test_buffers_1.default.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1], + }); + testForMessage(test_buffers_1.default.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [], + }); + testForMessage(test_buffers_1.default.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2], + }); + testForMessage(test_buffers_1.default.copyDone(), { + name: 'copyDone', + length: 4, + }); + testForMessage(test_buffers_1.default.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]), + }); + }); + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = test_buffers_1.default.dataRow([null, 'bang', 'zug zug', null, '!']); + it('parses when full buffer comes in', function () { + return __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([fullBuffer]); + const message = messages[0]; + assert_1.default.equal(message.fields.length, 5); + assert_1.default.equal(message.fields[0], null); + assert_1.default.equal(message.fields[1], 'bang'); + assert_1.default.equal(message.fields[2], 'zug zug'); + assert_1.default.equal(message.fields[3], null); + assert_1.default.equal(message.fields[4], '!'); + }); + }); + var testMessageRecievedAfterSpiltAt = function (split) { + return __awaiter(this, void 0, void 0, function* () { + var firstBuffer = Buffer.alloc(fullBuffer.length - split); + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length); + fullBuffer.copy(firstBuffer, 0, 0); + fullBuffer.copy(secondBuffer, 0, firstBuffer.length); + const messages = yield parseBuffers([fullBuffer]); + const message = messages[0]; + assert_1.default.equal(message.fields.length, 5); + assert_1.default.equal(message.fields[0], null); + assert_1.default.equal(message.fields[1], 'bang'); + assert_1.default.equal(message.fields[2], 'zug zug'); + assert_1.default.equal(message.fields[3], null); + assert_1.default.equal(message.fields[4], '!'); + }); + }; + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6); + }); + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2); + }); + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2); + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1); + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5); + }); + }); + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = test_buffers_1.default.dataRow(['!']); + var readyForQueryBuffer = test_buffers_1.default.readyForQuery(); + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length); + dataRowBuffer.copy(fullBuffer, 0, 0); + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0); + var verifyMessages = function (messages) { + assert_1.default.strictEqual(messages.length, 2); + assert_1.default.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'], + }); + assert_1.default.equal(messages[0].fields[0], '!'); + assert_1.default.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I', + }); + }; + // sanity check + it('recieves both messages when packet is not split', function () { + return __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([fullBuffer]); + verifyMessages(messages); + }); + }); + var splitAndVerifyTwoMessages = function (split) { + return __awaiter(this, void 0, void 0, function* () { + var firstBuffer = Buffer.alloc(fullBuffer.length - split); + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length); + fullBuffer.copy(firstBuffer, 0, 0); + fullBuffer.copy(secondBuffer, 0, firstBuffer.length); + const messages = yield parseBuffers([firstBuffer, secondBuffer]); + verifyMessages(messages); + }); + }; + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11); + }); + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6), + ]); + }); + it('at the end', function () { + return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]); + }); + }); + }); +}); +//# sourceMappingURL=inbound-parser.test.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/inbound-parser.test.js.map b/node_modules/pg-protocol/dist/inbound-parser.test.js.map new file mode 100644 index 0000000..10e776a --- /dev/null +++ b/node_modules/pg-protocol/dist/inbound-parser.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inbound-parser.test.js","sourceRoot":"","sources":["../src/inbound-parser.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,0EAA4C;AAC5C,wEAA8C;AAC9C,wBAAyB;AACzB,oDAA2B;AAC3B,mCAAoC;AAGpC,IAAI,YAAY,GAAG,sBAAO,CAAC,gBAAgB,EAAE,CAAA;AAC7C,IAAI,iBAAiB,GAAG,sBAAO,CAAC,eAAe,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;AAC1E,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACjD,IAAI,oBAAoB,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;AACvD,IAAI,qBAAqB,GAAG,sBAAO,CAAC,eAAe,CAAC,UAAU,CAAC,CAAA;AAC/D,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACjD,IAAI,kBAAkB,GAAG,sBAAO,CAAC,YAAY,EAAE,CAAA;AAC/C,IAAI,qBAAqB,GAAG,sBAAO,CAAC,eAAe,EAAE,CAAA;AAErD,IAAI,MAAM,GAAG,UAAU,UAAsB,EAAE,IAAY,EAAE,MAAc;IACzE,OAAO,UAAU;SACd,UAAU,CAAC,IAAI,CAAC,CAAC,aAAa;SAC9B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,WAAW;SAC9B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,6BAA6B;SAChD,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gCAAgC;SACnD,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gBAAgB;SACnC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gBAAgB;SACnC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,yBAAyB;AAC1C,CAAC,CAAA;AAED,IAAI,IAAI,GAAG;IACT,IAAI,EAAE,IAAI;IACV,OAAO,EAAE,CAAC;IACV,eAAe,EAAE,CAAC;IAClB,UAAU,EAAE,CAAC;IACb,YAAY,EAAE,CAAC;IACf,YAAY,EAAE,CAAC;IACf,UAAU,EAAE,CAAC;CACd,CAAA;AACD,IAAI,cAAc,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AACnD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAA;AAElB,IAAI,SAAS,GAAG,sBAAO,CAAC,cAAc,CAAC;IACrC,IAAI;IACJ;QACE,IAAI,EAAE,OAAO;QACb,OAAO,EAAE,EAAE;QACX,eAAe,EAAE,EAAE;QACnB,UAAU,EAAE,EAAE;QACd,YAAY,EAAE,EAAE;QAChB,YAAY,EAAE,EAAE;QAChB,UAAU,EAAE,CAAC;KACd;CACF,CAAC,CAAA;AAEF,IAAI,gBAAgB,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAEnE,IAAI,gBAAgB,GAAG,sBAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;AAE1C,IAAI,WAAW,GAAG,IAAI,qBAAU,EAAE;KAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,4BAA4B;KACxC,UAAU,CAAC,MAAM,CAAC;KAClB,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,WAAW,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;AAE3C,IAAI,iCAAiC,GAAG;IACtC,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAED,IAAI,8BAA8B,GAAG;IACnC,IAAI,EAAE,iBAAiB;IACvB,aAAa,EAAE,iBAAiB;IAChC,cAAc,EAAE,MAAM;IACtB,MAAM,EAAE,EAAE;CACX,CAAA;AAED,IAAI,6BAA6B,GAAG;IAClC,IAAI,EAAE,gBAAgB;IACtB,SAAS,EAAE,CAAC;IACZ,SAAS,EAAE,CAAC;CACb,CAAA;AAED,IAAI,4BAA4B,GAAG;IACjC,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;IACT,MAAM,EAAE,GAAG;CACZ,CAAA;AAED,IAAI,8BAA8B,GAAG;IACnC,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,EAAE;IACV,IAAI,EAAE,UAAU;CACjB,CAAA;AACD,IAAI,yBAAyB,GAAG,IAAI,qBAAU,EAAE;KAC7C,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,kCAAkC,GAAG;IACvC,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,CAAC;IACT,UAAU,EAAE,CAAC;IACb,MAAM,EAAE,EAAE;CACX,CAAA;AACD,IAAI,qBAAqB,GAAG;IAC1B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,IAAI;YACV,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,IAAI,qBAAqB,GAAG;IAC1B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;QACD;YACE,IAAI,EAAE,OAAO;YACb,OAAO,EAAE,EAAE;YACX,QAAQ,EAAE,EAAE;YACZ,UAAU,EAAE,EAAE;YACd,YAAY,EAAE,EAAE;YAChB,gBAAgB,EAAE,EAAE;YACpB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,IAAI,+BAA+B,GAAG,IAAI,qBAAU,EAAE;KACnD,QAAQ,CAAC,CAAC,CAAC,CAAC,uBAAuB;KACnC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,mBAAmB,GAAG,sBAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AAE9D,IAAI,mBAAmB,GAAG,sBAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEpE,IAAI,wCAAwC,GAAG;IAC7C,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,CAAC;IACT,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,EAAE;CAChB,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,EAAE;IACV,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,CAAC,IAAI,CAAC;CACpB,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,EAAE;IACV,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC;CAC1B,CAAA;AAED,IAAI,cAAc,GAAG,UAAU,MAAc,EAAE,eAAoB;IACjE,EAAE,CAAC,sBAAsB,GAAG,eAAe,CAAC,IAAI,EAAE,GAAS,EAAE;QAC3D,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAA;QAE9B,KAAK,MAAM,GAAG,IAAI,eAAe,EAAE;YACjC,gBAAM,CAAC,SAAS,CAAE,WAAmB,CAAC,GAAG,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,CAAA;SAClE;IACH,CAAC,CAAA,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,IAAI,mBAAmB,GAAG,sBAAO,CAAC,+BAA+B,EAAE,CAAA;AACnE,IAAI,iBAAiB,GAAG,sBAAO,CAAC,yBAAyB,EAAE,CAAA;AAC3D,IAAI,UAAU,GAAG,sBAAO,CAAC,kBAAkB,EAAE,CAAA;AAC7C,IAAI,kBAAkB,GAAG,sBAAO,CAAC,0BAA0B,EAAE,CAAA;AAC7D,IAAI,eAAe,GAAG,sBAAO,CAAC,uBAAuB,EAAE,CAAA;AAEvD,IAAI,4BAA4B,GAAG;IACjC,IAAI,EAAE,iCAAiC;CACxC,CAAA;AAED,IAAI,0BAA0B,GAAG;IAC/B,IAAI,EAAE,2BAA2B;IACjC,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;CAChC,CAAA;AAED,IAAI,mBAAmB,GAAG;IACxB,IAAI,EAAE,oBAAoB;IAC1B,UAAU,EAAE,CAAC,eAAe,CAAC;CAC9B,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,4BAA4B;IAClC,IAAI,EAAE,MAAM;CACb,CAAA;AAED,IAAI,wBAAwB,GAAG;IAC7B,IAAI,EAAE,yBAAyB;IAC/B,IAAI,EAAE,MAAM;CACb,CAAA;AAED,IAAI,0BAA0B,GAAG,sBAAO,CAAC,YAAY,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;AACtE,IAAI,mCAAmC,GAAG;IACxC,IAAI,EAAE,cAAc;IACpB,SAAS,EAAE,CAAC;IACZ,OAAO,EAAE,IAAI;IACb,OAAO,EAAE,MAAM;CAChB,CAAA;AAED,MAAM,YAAY,GAAG,CAAO,OAAiB,EAA6B,EAAE;IAC1E,MAAM,MAAM,GAAG,IAAI,oBAAW,EAAE,CAAA;IAChC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;QAC5B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;KACrB;IACD,MAAM,CAAC,GAAG,EAAE,CAAA;IACZ,MAAM,IAAI,GAAqB,EAAE,CAAA;IACjC,MAAM,IAAA,QAAK,EAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;IAC5C,OAAO,IAAI,CAAA;AACb,CAAC,CAAA,CAAA;AAED,QAAQ,CAAC,gBAAgB,EAAE;IACzB,cAAc,CAAC,YAAY,EAAE,iCAAiC,CAAC,CAAA;IAC/D,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,iBAAiB,EAAE,0BAA0B,CAAC,CAAA;IAC7D,cAAc,CAAC,UAAU,EAAE,mBAAmB,CAAC,CAAA;IAC/C,cAAc,CAAC,kBAAkB,EAAE,2BAA2B,CAAC,CAAA;IAE/D,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,0BAA0B,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,kBAAkB,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACjG,cAAc,CAAC,0BAA0B,EAAE,2BAA2B,CAAC,CAAA;IAEvE,cAAc,CAAC,eAAe,EAAE,wBAAwB,CAAC,CAAA;IAEzD,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,uBAAuB,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,eAAe,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC3F,cAAc,CAAC,uBAAuB,EAAE,wBAAwB,CAAC,CAAA;IAEjE,cAAc,CAAC,iBAAiB,EAAE,8BAA8B,CAAC,CAAA;IACjE,cAAc,CAAC,oBAAoB,EAAE,6BAA6B,CAAC,CAAA;IACnE,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,qBAAqB,EAAE,8BAA8B,CAAC,CAAA;IACrE,cAAc,CAAC,0BAA0B,EAAE,mCAAmC,CAAC,CAAA;IAC/E,cAAc,CAAC,sBAAO,CAAC,UAAU,EAAE,EAAE;QACnC,IAAI,EAAE,YAAY;QAClB,MAAM,EAAE,CAAC;KACV,CAAC,CAAA;IAEF,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QAC9C,IAAI,EAAE,QAAQ;KACf,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,cAAc,CAAC,yBAAyB,EAAE,kCAAkC,CAAC,CAAA;QAC7E,cAAc,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAA;QACrD,cAAc,CAAC,SAAS,EAAE,qBAAqB,CAAC,CAAA;IAClD,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,+BAA+B,EAAE;QACxC,cAAc,CAAC,+BAA+B,EAAE,wCAAwC,CAAC,CAAA;QACzF,cAAc,CAAC,mBAAmB,EAAE,2BAA2B,CAAC,CAAA;QAChE,cAAc,CAAC,mBAAmB,EAAE,2BAA2B,CAAC,CAAA;IAClE,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,cAAc,EAAE;QACvB,QAAQ,CAAC,mBAAmB,EAAE;YAC5B,cAAc,CAAC,gBAAgB,EAAE;gBAC/B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;aACd,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QAEF,QAAQ,CAAC,8BAA8B,EAAE;YACvC,cAAc,CAAC,WAAW,EAAE;gBAC1B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,CAAC,MAAM,CAAC;aACjB,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,gBAAgB,EAAE;QACzB,4CAA4C;QAC5C,IAAI,IAAI,GAAG,sBAAO,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,CAAA;QACzD,cAAc,CAAC,IAAI,EAAE;YACnB,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,MAAM;SACb,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;QAChC,IAAI,EAAE,OAAO;KACd,CAAC,CAAA;IAEF,QAAQ,CAAC,qBAAqB,EAAE;QAC9B,IAAI,MAAM,GAAG,sBAAO,CAAC,KAAK,CAAC;YACzB;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,QAAQ;aAChB;SACF,CAAC,CAAA;QAEF,cAAc,CAAC,MAAM,EAAE;YACrB,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,OAAO;YACjB,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;YAClB,MAAM,EAAE,SAAS;YACjB,IAAI,EAAE,MAAM;YACZ,QAAQ,EAAE,KAAK;YACf,gBAAgB,EAAE,KAAK;YACvB,aAAa,EAAE,OAAO;YACtB,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;SACnB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,mBAAmB,EAAE;QAClC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,aAAa,EAAE,EAAE;QACtC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,QAAQ,CAAC,iCAAiC,EAAE;QAC1C,cAAc,CAAC,qBAAqB,EAAE;YACpC,IAAI,EAAE,iBAAiB;SACxB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,kCAAkC,EAAE;QAC3C,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE;YAC1D,IAAI,EAAE,kBAAkB;YACxB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE;QACpB,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC;SACpB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;SACvB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,EAAE,EAAE;YACjC,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;YACvD,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;YACT,KAAK,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC9B,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,uEAAuE;IACvE,uEAAuE;IACvE,yBAAyB;IACzB,QAAQ,CAAC,sCAAsC,EAAE;QAC/C,IAAI,UAAU,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAEtE,EAAE,CAAC,kCAAkC,EAAE;;gBACrC,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAC,CAAA;QAEF,IAAI,+BAA+B,GAAG,UAAgB,KAAa;;gBACjE,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBACzD,IAAI,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACvE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAA;QAED,EAAE,CAAC,iCAAiC,EAAE;YACpC,+BAA+B,CAAC,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,+BAA+B,CAAC,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,gCAAgC,EAAE;YACnC,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;QACxD,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wCAAwC,EAAE;QACjD,IAAI,aAAa,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;QAC1C,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;QACjD,IAAI,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,MAAM,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QAChF,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QACpC,mBAAmB,CAAC,IAAI,CAAC,UAAU,EAAE,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;QAE7D,IAAI,cAAc,GAAG,UAAU,QAAe;YAC5C,gBAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;YACtC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,EAAE;gBACV,MAAM,EAAE,CAAC,GAAG,CAAC;aACd,CAAC,CAAA;YACF,gBAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACxC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,eAAe;gBACrB,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,GAAG;aACZ,CAAC,CAAA;QACJ,CAAC,CAAA;QACD,eAAe;QACf,EAAE,CAAC,iDAAiD,EAAE;;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAC,CAAA;QAEF,IAAI,yBAAyB,GAAG,UAAgB,KAAa;;gBAC3D,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBACzD,IAAI,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACvE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,WAAW,EAAE,YAAY,CAAC,CAAC,CAAA;gBAChE,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAA;QAED,QAAQ,CAAC,6CAA6C,EAAE;YACtD,EAAE,CAAC,eAAe,EAAE;gBAClB,OAAO,yBAAyB,CAAC,EAAE,CAAC,CAAA;YACtC,CAAC,CAAC,CAAA;YACF,EAAE,CAAC,cAAc,EAAE;gBACjB,OAAO,OAAO,CAAC,GAAG,CAAC;oBACjB,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;iBACjD,CAAC,CAAA;YACJ,CAAC,CAAC,CAAA;YAEF,EAAE,CAAC,YAAY,EAAE;gBACf,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAC,CAAC,EAAE,yBAAyB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;YAClF,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/index.d.ts b/node_modules/pg-protocol/dist/index.d.ts new file mode 100644 index 0000000..3961def --- /dev/null +++ b/node_modules/pg-protocol/dist/index.d.ts @@ -0,0 +1,6 @@ +/// +import { DatabaseError } from './messages'; +import { serialize } from './serializer'; +import { MessageCallback } from './parser'; +export declare function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise; +export { serialize, DatabaseError }; diff --git a/node_modules/pg-protocol/dist/index.js b/node_modules/pg-protocol/dist/index.js new file mode 100644 index 0000000..7eca3bf --- /dev/null +++ b/node_modules/pg-protocol/dist/index.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DatabaseError = exports.serialize = exports.parse = void 0; +const messages_1 = require("./messages"); +Object.defineProperty(exports, "DatabaseError", { enumerable: true, get: function () { return messages_1.DatabaseError; } }); +const serializer_1 = require("./serializer"); +Object.defineProperty(exports, "serialize", { enumerable: true, get: function () { return serializer_1.serialize; } }); +const parser_1 = require("./parser"); +function parse(stream, callback) { + const parser = new parser_1.Parser(); + stream.on('data', (buffer) => parser.parse(buffer, callback)); + return new Promise((resolve) => stream.on('end', () => resolve())); +} +exports.parse = parse; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/index.js.map b/node_modules/pg-protocol/dist/index.js.map new file mode 100644 index 0000000..5db25b6 --- /dev/null +++ b/node_modules/pg-protocol/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA0D;AAUtC,8FAVK,wBAAa,OAUL;AATjC,6CAAwC;AAS/B,0FATA,sBAAS,OASA;AARlB,qCAAkD;AAElD,SAAgB,KAAK,CAAC,MAA6B,EAAE,QAAyB;IAC5E,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,MAAc,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;IACrE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AACpE,CAAC;AAJD,sBAIC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/messages.d.ts b/node_modules/pg-protocol/dist/messages.d.ts new file mode 100644 index 0000000..f8f2e63 --- /dev/null +++ b/node_modules/pg-protocol/dist/messages.d.ts @@ -0,0 +1,162 @@ +/// +export declare type Mode = 'text' | 'binary'; +export declare type MessageName = 'parseComplete' | 'bindComplete' | 'closeComplete' | 'noData' | 'portalSuspended' | 'replicationStart' | 'emptyQuery' | 'copyDone' | 'copyData' | 'rowDescription' | 'parameterDescription' | 'parameterStatus' | 'backendKeyData' | 'notification' | 'readyForQuery' | 'commandComplete' | 'dataRow' | 'copyInResponse' | 'copyOutResponse' | 'authenticationOk' | 'authenticationMD5Password' | 'authenticationCleartextPassword' | 'authenticationSASL' | 'authenticationSASLContinue' | 'authenticationSASLFinal' | 'error' | 'notice'; +export interface BackendMessage { + name: MessageName; + length: number; +} +export declare const parseComplete: BackendMessage; +export declare const bindComplete: BackendMessage; +export declare const closeComplete: BackendMessage; +export declare const noData: BackendMessage; +export declare const portalSuspended: BackendMessage; +export declare const replicationStart: BackendMessage; +export declare const emptyQuery: BackendMessage; +export declare const copyDone: BackendMessage; +interface NoticeOrError { + message: string | undefined; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; +} +export declare class DatabaseError extends Error implements NoticeOrError { + readonly length: number; + readonly name: MessageName; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; + constructor(message: string, length: number, name: MessageName); +} +export declare class CopyDataMessage { + readonly length: number; + readonly chunk: Buffer; + readonly name = "copyData"; + constructor(length: number, chunk: Buffer); +} +export declare class CopyResponse { + readonly length: number; + readonly name: MessageName; + readonly binary: boolean; + readonly columnTypes: number[]; + constructor(length: number, name: MessageName, binary: boolean, columnCount: number); +} +export declare class Field { + readonly name: string; + readonly tableID: number; + readonly columnID: number; + readonly dataTypeID: number; + readonly dataTypeSize: number; + readonly dataTypeModifier: number; + readonly format: Mode; + constructor(name: string, tableID: number, columnID: number, dataTypeID: number, dataTypeSize: number, dataTypeModifier: number, format: Mode); +} +export declare class RowDescriptionMessage { + readonly length: number; + readonly fieldCount: number; + readonly name: MessageName; + readonly fields: Field[]; + constructor(length: number, fieldCount: number); +} +export declare class ParameterDescriptionMessage { + readonly length: number; + readonly parameterCount: number; + readonly name: MessageName; + readonly dataTypeIDs: number[]; + constructor(length: number, parameterCount: number); +} +export declare class ParameterStatusMessage { + readonly length: number; + readonly parameterName: string; + readonly parameterValue: string; + readonly name: MessageName; + constructor(length: number, parameterName: string, parameterValue: string); +} +export declare class AuthenticationMD5Password implements BackendMessage { + readonly length: number; + readonly salt: Buffer; + readonly name: MessageName; + constructor(length: number, salt: Buffer); +} +export declare class BackendKeyDataMessage { + readonly length: number; + readonly processID: number; + readonly secretKey: number; + readonly name: MessageName; + constructor(length: number, processID: number, secretKey: number); +} +export declare class NotificationResponseMessage { + readonly length: number; + readonly processId: number; + readonly channel: string; + readonly payload: string; + readonly name: MessageName; + constructor(length: number, processId: number, channel: string, payload: string); +} +export declare class ReadyForQueryMessage { + readonly length: number; + readonly status: string; + readonly name: MessageName; + constructor(length: number, status: string); +} +export declare class CommandCompleteMessage { + readonly length: number; + readonly text: string; + readonly name: MessageName; + constructor(length: number, text: string); +} +export declare class DataRowMessage { + length: number; + fields: any[]; + readonly fieldCount: number; + readonly name: MessageName; + constructor(length: number, fields: any[]); +} +export declare class NoticeMessage implements BackendMessage, NoticeOrError { + readonly length: number; + readonly message: string | undefined; + constructor(length: number, message: string | undefined); + readonly name = "notice"; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; +} +export {}; diff --git a/node_modules/pg-protocol/dist/messages.js b/node_modules/pg-protocol/dist/messages.js new file mode 100644 index 0000000..b9f2c44 --- /dev/null +++ b/node_modules/pg-protocol/dist/messages.js @@ -0,0 +1,160 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoticeMessage = exports.DataRowMessage = exports.CommandCompleteMessage = exports.ReadyForQueryMessage = exports.NotificationResponseMessage = exports.BackendKeyDataMessage = exports.AuthenticationMD5Password = exports.ParameterStatusMessage = exports.ParameterDescriptionMessage = exports.RowDescriptionMessage = exports.Field = exports.CopyResponse = exports.CopyDataMessage = exports.DatabaseError = exports.copyDone = exports.emptyQuery = exports.replicationStart = exports.portalSuspended = exports.noData = exports.closeComplete = exports.bindComplete = exports.parseComplete = void 0; +exports.parseComplete = { + name: 'parseComplete', + length: 5, +}; +exports.bindComplete = { + name: 'bindComplete', + length: 5, +}; +exports.closeComplete = { + name: 'closeComplete', + length: 5, +}; +exports.noData = { + name: 'noData', + length: 5, +}; +exports.portalSuspended = { + name: 'portalSuspended', + length: 5, +}; +exports.replicationStart = { + name: 'replicationStart', + length: 4, +}; +exports.emptyQuery = { + name: 'emptyQuery', + length: 4, +}; +exports.copyDone = { + name: 'copyDone', + length: 4, +}; +class DatabaseError extends Error { + constructor(message, length, name) { + super(message); + this.length = length; + this.name = name; + } +} +exports.DatabaseError = DatabaseError; +class CopyDataMessage { + constructor(length, chunk) { + this.length = length; + this.chunk = chunk; + this.name = 'copyData'; + } +} +exports.CopyDataMessage = CopyDataMessage; +class CopyResponse { + constructor(length, name, binary, columnCount) { + this.length = length; + this.name = name; + this.binary = binary; + this.columnTypes = new Array(columnCount); + } +} +exports.CopyResponse = CopyResponse; +class Field { + constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) { + this.name = name; + this.tableID = tableID; + this.columnID = columnID; + this.dataTypeID = dataTypeID; + this.dataTypeSize = dataTypeSize; + this.dataTypeModifier = dataTypeModifier; + this.format = format; + } +} +exports.Field = Field; +class RowDescriptionMessage { + constructor(length, fieldCount) { + this.length = length; + this.fieldCount = fieldCount; + this.name = 'rowDescription'; + this.fields = new Array(this.fieldCount); + } +} +exports.RowDescriptionMessage = RowDescriptionMessage; +class ParameterDescriptionMessage { + constructor(length, parameterCount) { + this.length = length; + this.parameterCount = parameterCount; + this.name = 'parameterDescription'; + this.dataTypeIDs = new Array(this.parameterCount); + } +} +exports.ParameterDescriptionMessage = ParameterDescriptionMessage; +class ParameterStatusMessage { + constructor(length, parameterName, parameterValue) { + this.length = length; + this.parameterName = parameterName; + this.parameterValue = parameterValue; + this.name = 'parameterStatus'; + } +} +exports.ParameterStatusMessage = ParameterStatusMessage; +class AuthenticationMD5Password { + constructor(length, salt) { + this.length = length; + this.salt = salt; + this.name = 'authenticationMD5Password'; + } +} +exports.AuthenticationMD5Password = AuthenticationMD5Password; +class BackendKeyDataMessage { + constructor(length, processID, secretKey) { + this.length = length; + this.processID = processID; + this.secretKey = secretKey; + this.name = 'backendKeyData'; + } +} +exports.BackendKeyDataMessage = BackendKeyDataMessage; +class NotificationResponseMessage { + constructor(length, processId, channel, payload) { + this.length = length; + this.processId = processId; + this.channel = channel; + this.payload = payload; + this.name = 'notification'; + } +} +exports.NotificationResponseMessage = NotificationResponseMessage; +class ReadyForQueryMessage { + constructor(length, status) { + this.length = length; + this.status = status; + this.name = 'readyForQuery'; + } +} +exports.ReadyForQueryMessage = ReadyForQueryMessage; +class CommandCompleteMessage { + constructor(length, text) { + this.length = length; + this.text = text; + this.name = 'commandComplete'; + } +} +exports.CommandCompleteMessage = CommandCompleteMessage; +class DataRowMessage { + constructor(length, fields) { + this.length = length; + this.fields = fields; + this.name = 'dataRow'; + this.fieldCount = fields.length; + } +} +exports.DataRowMessage = DataRowMessage; +class NoticeMessage { + constructor(length, message) { + this.length = length; + this.message = message; + this.name = 'notice'; + } +} +exports.NoticeMessage = NoticeMessage; +//# sourceMappingURL=messages.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/messages.js.map b/node_modules/pg-protocol/dist/messages.js.map new file mode 100644 index 0000000..9cf2581 --- /dev/null +++ b/node_modules/pg-protocol/dist/messages.js.map @@ -0,0 +1 @@ +{"version":3,"file":"messages.js","sourceRoot":"","sources":["../src/messages.ts"],"names":[],"mappings":";;;AAoCa,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,YAAY,GAAmB;IAC1C,IAAI,EAAE,cAAc;IACpB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,MAAM,GAAmB;IACpC,IAAI,EAAE,QAAQ;IACd,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,eAAe,GAAmB;IAC7C,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,gBAAgB,GAAmB;IAC9C,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,UAAU,GAAmB;IACxC,IAAI,EAAE,YAAY;IAClB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,QAAQ,GAAmB;IACtC,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,CAAC;CACV,CAAA;AAsBD,MAAa,aAAc,SAAQ,KAAK;IAiBtC,YACE,OAAe,EACC,MAAc,EACd,IAAiB;QAEjC,KAAK,CAAC,OAAO,CAAC,CAAA;QAHE,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;IAGnC,CAAC;CACF;AAxBD,sCAwBC;AAED,MAAa,eAAe;IAE1B,YACkB,MAAc,EACd,KAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAQ;QAHf,SAAI,GAAG,UAAU,CAAA;IAI9B,CAAC;CACL;AAND,0CAMC;AAED,MAAa,YAAY;IAEvB,YACkB,MAAc,EACd,IAAiB,EACjB,MAAe,EAC/B,WAAmB;QAHH,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;QACjB,WAAM,GAAN,MAAM,CAAS;QAG/B,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAA;IAC3C,CAAC;CACF;AAVD,oCAUC;AAED,MAAa,KAAK;IAChB,YACkB,IAAY,EACZ,OAAe,EACf,QAAgB,EAChB,UAAkB,EAClB,YAAoB,EACpB,gBAAwB,EACxB,MAAY;QANZ,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,aAAQ,GAAR,QAAQ,CAAQ;QAChB,eAAU,GAAV,UAAU,CAAQ;QAClB,iBAAY,GAAZ,YAAY,CAAQ;QACpB,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAM;IAC3B,CAAC;CACL;AAVD,sBAUC;AAED,MAAa,qBAAqB;IAGhC,YACkB,MAAc,EACd,UAAkB;QADlB,WAAM,GAAN,MAAM,CAAQ;QACd,eAAU,GAAV,UAAU,CAAQ;QAJpB,SAAI,GAAgB,gBAAgB,CAAA;QAMlD,IAAI,CAAC,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC1C,CAAC;CACF;AATD,sDASC;AAED,MAAa,2BAA2B;IAGtC,YACkB,MAAc,EACd,cAAsB;QADtB,WAAM,GAAN,MAAM,CAAQ;QACd,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,sBAAsB,CAAA;QAMxD,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;IACnD,CAAC;CACF;AATD,kEASC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,aAAqB,EACrB,cAAsB;QAFtB,WAAM,GAAN,MAAM,CAAQ;QACd,kBAAa,GAAb,aAAa,CAAQ;QACrB,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,iBAAiB,CAAA;IAKlD,CAAC;CACL;AAPD,wDAOC;AAED,MAAa,yBAAyB;IAEpC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,2BAA2B,CAAA;IAI5D,CAAC;CACL;AAND,8DAMC;AAED,MAAa,qBAAqB;IAEhC,YACkB,MAAc,EACd,SAAiB,EACjB,SAAiB;QAFjB,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,cAAS,GAAT,SAAS,CAAQ;QAJnB,SAAI,GAAgB,gBAAgB,CAAA;IAKjD,CAAC;CACL;AAPD,sDAOC;AAED,MAAa,2BAA2B;IAEtC,YACkB,MAAc,EACd,SAAiB,EACjB,OAAe,EACf,OAAe;QAHf,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QALjB,SAAI,GAAgB,cAAc,CAAA;IAM/C,CAAC;CACL;AARD,kEAQC;AAED,MAAa,oBAAoB;IAE/B,YACkB,MAAc,EACd,MAAc;QADd,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAQ;QAHhB,SAAI,GAAgB,eAAe,CAAA;IAIhD,CAAC;CACL;AAND,oDAMC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,iBAAiB,CAAA;IAIlD,CAAC;CACL;AAND,wDAMC;AAED,MAAa,cAAc;IAGzB,YACS,MAAc,EACd,MAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAO;QAHN,SAAI,GAAgB,SAAS,CAAA;QAK3C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,MAAM,CAAA;IACjC,CAAC;CACF;AATD,wCASC;AAED,MAAa,aAAa;IACxB,YACkB,MAAc,EACd,OAA2B;QAD3B,WAAM,GAAN,MAAM,CAAQ;QACd,YAAO,GAAP,OAAO,CAAoB;QAE7B,SAAI,GAAG,QAAQ,CAAA;IAD5B,CAAC;CAkBL;AAtBD,sCAsBC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts b/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/pg-protocol/dist/outbound-serializer.test.js b/node_modules/pg-protocol/dist/outbound-serializer.test.js new file mode 100644 index 0000000..18d1eab --- /dev/null +++ b/node_modules/pg-protocol/dist/outbound-serializer.test.js @@ -0,0 +1,248 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __importDefault(require("assert")); +const serializer_1 = require("./serializer"); +const buffer_list_1 = __importDefault(require("./testing/buffer-list")); +describe('serializer', () => { + it('builds startup message', function () { + const actual = serializer_1.serialize.startup({ + user: 'brian', + database: 'bang', + }); + assert_1.default.deepEqual(actual, new buffer_list_1.default() + .addInt16(3) + .addInt16(0) + .addCString('user') + .addCString('brian') + .addCString('database') + .addCString('bang') + .addCString('client_encoding') + .addCString('UTF8') + .addCString('') + .join(true)); + }); + it('builds password message', function () { + const actual = serializer_1.serialize.password('!'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('!').join(true, 'p')); + }); + it('builds request ssl message', function () { + const actual = serializer_1.serialize.requestSsl(); + const expected = new buffer_list_1.default().addInt32(80877103).join(true); + assert_1.default.deepEqual(actual, expected); + }); + it('builds SASLInitialResponseMessage message', function () { + const actual = serializer_1.serialize.sendSASLInitialResponseMessage('mech', 'data'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('mech').addInt32(4).addString('data').join(true, 'p')); + }); + it('builds SCRAMClientFinalMessage message', function () { + const actual = serializer_1.serialize.sendSCRAMClientFinalMessage('data'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addString('data').join(true, 'p')); + }); + it('builds query message', function () { + var txt = 'select * from boom'; + const actual = serializer_1.serialize.query(txt); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString(txt).join(true, 'Q')); + }); + describe('parse message', () => { + it('builds parse message', function () { + const actual = serializer_1.serialize.parse({ text: '!' }); + var expected = new buffer_list_1.default().addCString('').addCString('!').addInt16(0).join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds parse message with named query', function () { + const actual = serializer_1.serialize.parse({ + name: 'boom', + text: 'select * from boom', + types: [], + }); + var expected = new buffer_list_1.default().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + it('with multiple parameters', function () { + const actual = serializer_1.serialize.parse({ + name: 'force', + text: 'select * from bang where name = $1', + types: [1, 2, 3, 4], + }); + var expected = new buffer_list_1.default() + .addCString('force') + .addCString('select * from bang where name = $1') + .addInt16(4) + .addInt32(1) + .addInt32(2) + .addInt32(3) + .addInt32(4) + .join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('bind messages', function () { + it('with no values', function () { + const actual = serializer_1.serialize.bind(); + var expectedBuffer = new buffer_list_1.default() + .addCString('') + .addCString('') + .addInt16(0) + .addInt16(0) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('with named statement, portal, and values', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing')) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + }); + it('with custom valueMapper', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + valueMapper: () => null, + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('with named statement, portal, and buffer value', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, Buffer.from('zing', 'utf8')], + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) // value count + .addInt16(0) // string + .addInt16(0) // string + .addInt16(0) // string + .addInt16(1) // binary + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing', 'utf-8')) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + describe('builds execute message', function () { + it('for unamed portal with no row limit', function () { + const actual = serializer_1.serialize.execute(); + var expectedBuffer = new buffer_list_1.default().addCString('').addInt32(0).join(true, 'E'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('for named portal with row limit', function () { + const actual = serializer_1.serialize.execute({ + portal: 'my favorite portal', + rows: 100, + }); + var expectedBuffer = new buffer_list_1.default().addCString('my favorite portal').addInt32(100).join(true, 'E'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + }); + it('builds flush command', function () { + const actual = serializer_1.serialize.flush(); + var expected = new buffer_list_1.default().join(true, 'H'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds sync command', function () { + const actual = serializer_1.serialize.sync(); + var expected = new buffer_list_1.default().join(true, 'S'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds end command', function () { + const actual = serializer_1.serialize.end(); + var expected = Buffer.from([0x58, 0, 0, 0, 4]); + assert_1.default.deepEqual(actual, expected); + }); + describe('builds describe command', function () { + it('describe statement', function () { + const actual = serializer_1.serialize.describe({ type: 'S', name: 'bang' }); + var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'D'); + assert_1.default.deepEqual(actual, expected); + }); + it('describe unnamed portal', function () { + const actual = serializer_1.serialize.describe({ type: 'P' }); + var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'D'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('builds close command', function () { + it('describe statement', function () { + const actual = serializer_1.serialize.close({ type: 'S', name: 'bang' }); + var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'C'); + assert_1.default.deepEqual(actual, expected); + }); + it('describe unnamed portal', function () { + const actual = serializer_1.serialize.close({ type: 'P' }); + var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'C'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('copy messages', function () { + it('builds copyFromChunk', () => { + const actual = serializer_1.serialize.copyData(Buffer.from([1, 2, 3])); + const expected = new buffer_list_1.default().add(Buffer.from([1, 2, 3])).join(true, 'd'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds copy fail', () => { + const actual = serializer_1.serialize.copyFail('err!'); + const expected = new buffer_list_1.default().addCString('err!').join(true, 'f'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds copy done', () => { + const actual = serializer_1.serialize.copyDone(); + const expected = new buffer_list_1.default().join(true, 'c'); + assert_1.default.deepEqual(actual, expected); + }); + }); + it('builds cancel message', () => { + const actual = serializer_1.serialize.cancel(3, 4); + const expected = new buffer_list_1.default().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true); + assert_1.default.deepEqual(actual, expected); + }); +}); +//# sourceMappingURL=outbound-serializer.test.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/outbound-serializer.test.js.map b/node_modules/pg-protocol/dist/outbound-serializer.test.js.map new file mode 100644 index 0000000..3dcb1c8 --- /dev/null +++ b/node_modules/pg-protocol/dist/outbound-serializer.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"outbound-serializer.test.js","sourceRoot":"","sources":["../src/outbound-serializer.test.ts"],"names":[],"mappings":";;;;;AAAA,oDAA2B;AAC3B,6CAAwC;AACxC,wEAA8C;AAE9C,QAAQ,CAAC,YAAY,EAAE,GAAG,EAAE;IAC1B,EAAE,CAAC,wBAAwB,EAAE;QAC3B,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;YAC/B,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,MAAM;SACjB,CAAC,CAAA;QACF,gBAAM,CAAC,SAAS,CACd,MAAM,EACN,IAAI,qBAAU,EAAE;aACb,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,OAAO,CAAC;aACnB,UAAU,CAAC,UAAU,CAAC;aACtB,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,iBAAiB,CAAC;aAC7B,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,EAAE,CAAC;aACd,IAAI,CAAC,IAAI,CAAC,CACd,CAAA;IACH,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;QACtC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,4BAA4B,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,UAAU,EAAE,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC/D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,2CAA2C,EAAE;QAC9C,MAAM,MAAM,GAAG,sBAAS,CAAC,8BAA8B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QACvE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC7G,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,wCAAwC,EAAE;QAC3C,MAAM,MAAM,GAAG,sBAAS,CAAC,2BAA2B,CAAC,MAAM,CAAC,CAAA;QAC5D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC9E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,IAAI,GAAG,GAAG,oBAAoB,CAAA;QAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACnC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE,GAAG,EAAE;QAC7B,EAAE,CAAC,sBAAsB,EAAE;YACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC1F,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,uCAAuC,EAAE;YAC1C,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,oBAAoB;gBAC1B,KAAK,EAAE,EAAE;aACV,CAAC,CAAA;YACF,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/G,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,OAAO;gBACb,IAAI,EAAE,oCAAoC;gBAC1C,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;aACpB,CAAC,CAAA;YACF,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE;iBAC5B,UAAU,CAAC,OAAO,CAAC;iBACnB,UAAU,CAAC,oCAAoC,CAAC;iBAChD,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,gBAAgB,EAAE;YACnB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;YAE/B,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;iBAClC,UAAU,CAAC,EAAE,CAAC;iBACd,UAAU,CAAC,EAAE,CAAC;iBACd,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0CAA0C,EAAE;YAC7C,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;gBAC5B,MAAM,EAAE,MAAM;gBACd,SAAS,EAAE,KAAK;gBAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;aAClC,CAAC,CAAA;YACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;iBAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;iBACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;iBACnC,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACrB,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;iBACZ,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;iBACxB,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;YACjC,WAAW,EAAE,GAAG,EAAE,CAAC,IAAI;SACxB,CAAC,CAAA;QACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;aAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,gDAAgD,EAAE;QACnD,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACvD,CAAC,CAAA;QACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;aAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC,CAAC,cAAc;aAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aACjC,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wBAAwB,EAAE;QACjC,EAAE,CAAC,qCAAqC,EAAE;YACxC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,EAAE,CAAA;YAClC,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAChF,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,iCAAiC,EAAE;YACpC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;gBAC/B,MAAM,EAAE,oBAAoB;gBAC5B,IAAI,EAAE,GAAG;aACV,CAAC,CAAA;YACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,EAAE,CAAA;QAChC,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAC/C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,qBAAqB,EAAE;QACxB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;QAC/B,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAC/C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,oBAAoB,EAAE;QACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,GAAG,EAAE,CAAA;QAC9B,IAAI,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;QAC9C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC9D,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAChD,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC3E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,sBAAsB,EAAE;QAC/B,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC3D,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC3E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,sBAAsB,EAAE,GAAG,EAAE;YAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACzD,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC7E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YACzC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,EAAE,CAAA;YACnC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,uBAAuB,EAAE,GAAG,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAClG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/parser.d.ts b/node_modules/pg-protocol/dist/parser.d.ts new file mode 100644 index 0000000..e2d134b --- /dev/null +++ b/node_modules/pg-protocol/dist/parser.d.ts @@ -0,0 +1,39 @@ +/// +/// +import { TransformOptions } from 'stream'; +import { Mode, BackendMessage } from './messages'; +export declare type Packet = { + code: number; + packet: Buffer; +}; +declare type StreamOptions = TransformOptions & { + mode: Mode; +}; +export declare type MessageCallback = (msg: BackendMessage) => void; +export declare class Parser { + private buffer; + private bufferLength; + private bufferOffset; + private reader; + private mode; + constructor(opts?: StreamOptions); + parse(buffer: Buffer, callback: MessageCallback): void; + private mergeBuffer; + private handlePacket; + private parseReadyForQueryMessage; + private parseCommandCompleteMessage; + private parseCopyData; + private parseCopyInMessage; + private parseCopyOutMessage; + private parseCopyMessage; + private parseNotificationMessage; + private parseRowDescriptionMessage; + private parseField; + private parseParameterDescriptionMessage; + private parseDataRowMessage; + private parseParameterStatusMessage; + private parseBackendKeyData; + parseAuthenticationResponse(offset: number, length: number, bytes: Buffer): any; + private parseErrorMessage; +} +export {}; diff --git a/node_modules/pg-protocol/dist/parser.js b/node_modules/pg-protocol/dist/parser.js new file mode 100644 index 0000000..9f6c7cf --- /dev/null +++ b/node_modules/pg-protocol/dist/parser.js @@ -0,0 +1,304 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Parser = void 0; +const messages_1 = require("./messages"); +const buffer_reader_1 = require("./buffer-reader"); +// every message is prefixed with a single bye +const CODE_LENGTH = 1; +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4; +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH; +const emptyBuffer = Buffer.allocUnsafe(0); +class Parser { + constructor(opts) { + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + this.reader = new buffer_reader_1.BufferReader(); + if ((opts === null || opts === void 0 ? void 0 : opts.mode) === 'binary') { + throw new Error('Binary mode not supported yet'); + } + this.mode = (opts === null || opts === void 0 ? void 0 : opts.mode) || 'text'; + } + parse(buffer, callback) { + this.mergeBuffer(buffer); + const bufferFullLength = this.bufferOffset + this.bufferLength; + let offset = this.bufferOffset; + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset]; + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH); + const fullMessageLength = CODE_LENGTH + length; + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer); + callback(message); + offset += fullMessageLength; + } + else { + break; + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + } + else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset; + this.bufferOffset = offset; + } + } + mergeBuffer(buffer) { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength; + const newFullLength = newLength + this.bufferOffset; + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer; + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer; + } + else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2; + while (newLength >= newBufferLength) { + newBufferLength *= 2; + } + newBuffer = Buffer.allocUnsafe(newBufferLength); + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength); + this.buffer = newBuffer; + this.bufferOffset = 0; + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength); + this.bufferLength = newLength; + } + else { + this.buffer = buffer; + this.bufferOffset = 0; + this.bufferLength = buffer.byteLength; + } + } + handlePacket(offset, code, length, bytes) { + switch (code) { + case 50 /* MessageCodes.BindComplete */: + return messages_1.bindComplete; + case 49 /* MessageCodes.ParseComplete */: + return messages_1.parseComplete; + case 51 /* MessageCodes.CloseComplete */: + return messages_1.closeComplete; + case 110 /* MessageCodes.NoData */: + return messages_1.noData; + case 115 /* MessageCodes.PortalSuspended */: + return messages_1.portalSuspended; + case 99 /* MessageCodes.CopyDone */: + return messages_1.copyDone; + case 87 /* MessageCodes.ReplicationStart */: + return messages_1.replicationStart; + case 73 /* MessageCodes.EmptyQuery */: + return messages_1.emptyQuery; + case 68 /* MessageCodes.DataRow */: + return this.parseDataRowMessage(offset, length, bytes); + case 67 /* MessageCodes.CommandComplete */: + return this.parseCommandCompleteMessage(offset, length, bytes); + case 90 /* MessageCodes.ReadyForQuery */: + return this.parseReadyForQueryMessage(offset, length, bytes); + case 65 /* MessageCodes.NotificationResponse */: + return this.parseNotificationMessage(offset, length, bytes); + case 82 /* MessageCodes.AuthenticationResponse */: + return this.parseAuthenticationResponse(offset, length, bytes); + case 83 /* MessageCodes.ParameterStatus */: + return this.parseParameterStatusMessage(offset, length, bytes); + case 75 /* MessageCodes.BackendKeyData */: + return this.parseBackendKeyData(offset, length, bytes); + case 69 /* MessageCodes.ErrorMessage */: + return this.parseErrorMessage(offset, length, bytes, 'error'); + case 78 /* MessageCodes.NoticeMessage */: + return this.parseErrorMessage(offset, length, bytes, 'notice'); + case 84 /* MessageCodes.RowDescriptionMessage */: + return this.parseRowDescriptionMessage(offset, length, bytes); + case 116 /* MessageCodes.ParameterDescriptionMessage */: + return this.parseParameterDescriptionMessage(offset, length, bytes); + case 71 /* MessageCodes.CopyIn */: + return this.parseCopyInMessage(offset, length, bytes); + case 72 /* MessageCodes.CopyOut */: + return this.parseCopyOutMessage(offset, length, bytes); + case 100 /* MessageCodes.CopyData */: + return this.parseCopyData(offset, length, bytes); + default: + return new messages_1.DatabaseError('received invalid response: ' + code.toString(16), length, 'error'); + } + } + parseReadyForQueryMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const status = this.reader.string(1); + return new messages_1.ReadyForQueryMessage(length, status); + } + parseCommandCompleteMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const text = this.reader.cstring(); + return new messages_1.CommandCompleteMessage(length, text); + } + parseCopyData(offset, length, bytes) { + const chunk = bytes.slice(offset, offset + (length - 4)); + return new messages_1.CopyDataMessage(length, chunk); + } + parseCopyInMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, 'copyInResponse'); + } + parseCopyOutMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse'); + } + parseCopyMessage(offset, length, bytes, messageName) { + this.reader.setBuffer(offset, bytes); + const isBinary = this.reader.byte() !== 0; + const columnCount = this.reader.int16(); + const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount); + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16(); + } + return message; + } + parseNotificationMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processId = this.reader.int32(); + const channel = this.reader.cstring(); + const payload = this.reader.cstring(); + return new messages_1.NotificationResponseMessage(length, processId, channel, payload); + } + parseRowDescriptionMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const message = new messages_1.RowDescriptionMessage(length, fieldCount); + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField(); + } + return message; + } + parseField() { + const name = this.reader.cstring(); + const tableID = this.reader.int32(); + const columnID = this.reader.int16(); + const dataTypeID = this.reader.int32(); + const dataTypeSize = this.reader.int16(); + const dataTypeModifier = this.reader.int32(); + const mode = this.reader.int16() === 0 ? 'text' : 'binary'; + return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode); + } + parseParameterDescriptionMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const parameterCount = this.reader.int16(); + const message = new messages_1.ParameterDescriptionMessage(length, parameterCount); + for (let i = 0; i < parameterCount; i++) { + message.dataTypeIDs[i] = this.reader.int32(); + } + return message; + } + parseDataRowMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const fields = new Array(fieldCount); + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32(); + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len); + } + return new messages_1.DataRowMessage(length, fields); + } + parseParameterStatusMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const name = this.reader.cstring(); + const value = this.reader.cstring(); + return new messages_1.ParameterStatusMessage(length, name, value); + } + parseBackendKeyData(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processID = this.reader.int32(); + const secretKey = this.reader.int32(); + return new messages_1.BackendKeyDataMessage(length, processID, secretKey); + } + parseAuthenticationResponse(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const code = this.reader.int32(); + // TODO(bmc): maybe better types here + const message = { + name: 'authenticationOk', + length, + }; + switch (code) { + case 0: // AuthenticationOk + break; + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = 'authenticationCleartextPassword'; + } + break; + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = 'authenticationMD5Password'; + const salt = this.reader.bytes(4); + return new messages_1.AuthenticationMD5Password(length, salt); + } + break; + case 10: // AuthenticationSASL + message.name = 'authenticationSASL'; + message.mechanisms = []; + let mechanism; + do { + mechanism = this.reader.cstring(); + if (mechanism) { + message.mechanisms.push(mechanism); + } + } while (mechanism); + break; + case 11: // AuthenticationSASLContinue + message.name = 'authenticationSASLContinue'; + message.data = this.reader.string(length - 8); + break; + case 12: // AuthenticationSASLFinal + message.name = 'authenticationSASLFinal'; + message.data = this.reader.string(length - 8); + break; + default: + throw new Error('Unknown authenticationOk message type ' + code); + } + return message; + } + parseErrorMessage(offset, length, bytes, name) { + this.reader.setBuffer(offset, bytes); + const fields = {}; + let fieldType = this.reader.string(1); + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring(); + fieldType = this.reader.string(1); + } + const messageValue = fields.M; + const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name); + message.severity = fields.S; + message.code = fields.C; + message.detail = fields.D; + message.hint = fields.H; + message.position = fields.P; + message.internalPosition = fields.p; + message.internalQuery = fields.q; + message.where = fields.W; + message.schema = fields.s; + message.table = fields.t; + message.column = fields.c; + message.dataType = fields.d; + message.constraint = fields.n; + message.file = fields.F; + message.line = fields.L; + message.routine = fields.R; + return message; + } +} +exports.Parser = Parser; +//# sourceMappingURL=parser.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/parser.js.map b/node_modules/pg-protocol/dist/parser.js.map new file mode 100644 index 0000000..61374ea --- /dev/null +++ b/node_modules/pg-protocol/dist/parser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.js","sourceRoot":"","sources":["../src/parser.ts"],"names":[],"mappings":";;;AACA,yCA0BmB;AACnB,mDAA8C;AAG9C,8CAA8C;AAC9C,MAAM,WAAW,GAAG,CAAC,CAAA;AACrB,mEAAmE;AACnE,qCAAqC;AACrC,MAAM,UAAU,GAAG,CAAC,CAAA;AAEpB,MAAM,aAAa,GAAG,WAAW,GAAG,UAAU,CAAA;AAO9C,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAiCzC,MAAa,MAAM;IAOjB,YAAY,IAAoB;QANxB,WAAM,GAAW,WAAW,CAAA;QAC5B,iBAAY,GAAW,CAAC,CAAA;QACxB,iBAAY,GAAW,CAAC,CAAA;QACxB,WAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;QAIjC,IAAI,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,MAAK,QAAQ,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAA;SACjD;QACD,IAAI,CAAC,IAAI,GAAG,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,KAAI,MAAM,CAAA;IAClC,CAAC;IAEM,KAAK,CAAC,MAAc,EAAE,QAAyB;QACpD,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QACxB,MAAM,gBAAgB,GAAG,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9D,IAAI,MAAM,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9B,OAAO,MAAM,GAAG,aAAa,IAAI,gBAAgB,EAAE;YACjD,uDAAuD;YACvD,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;YAChC,4EAA4E;YAC5E,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,CAAA;YAC7D,MAAM,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAA;YAC9C,IAAI,iBAAiB,GAAG,MAAM,IAAI,gBAAgB,EAAE;gBAClD,MAAM,OAAO,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,GAAG,aAAa,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;gBACpF,QAAQ,CAAC,OAAO,CAAC,CAAA;gBACjB,MAAM,IAAI,iBAAiB,CAAA;aAC5B;iBAAM;gBACL,MAAK;aACN;SACF;QACD,IAAI,MAAM,KAAK,gBAAgB,EAAE;YAC/B,6BAA6B;YAC7B,IAAI,CAAC,MAAM,GAAG,WAAW,CAAA;YACzB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;SACtB;aAAM;YACL,wCAAwC;YACxC,IAAI,CAAC,YAAY,GAAG,gBAAgB,GAAG,MAAM,CAAA;YAC7C,IAAI,CAAC,YAAY,GAAG,MAAM,CAAA;SAC3B;IACH,CAAC;IAEO,WAAW,CAAC,MAAc;QAChC,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YACzB,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;YACvD,MAAM,aAAa,GAAG,SAAS,GAAG,IAAI,CAAC,YAAY,CAAA;YACnD,IAAI,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE;gBAC1C,wDAAwD;gBACxD,IAAI,SAAiB,CAAA;gBACrB,IAAI,SAAS,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,EAAE;oBACjF,kGAAkG;oBAClG,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;iBACxB;qBAAM;oBACL,+BAA+B;oBAC/B,IAAI,eAAe,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,CAAC,CAAA;oBAChD,OAAO,SAAS,IAAI,eAAe,EAAE;wBACnC,eAAe,IAAI,CAAC,CAAA;qBACrB;oBACD,SAAS,GAAG,MAAM,CAAC,WAAW,CAAC,eAAe,CAAC,CAAA;iBAChD;gBACD,2CAA2C;gBAC3C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;gBACxF,IAAI,CAAC,MAAM,GAAG,SAAS,CAAA;gBACvB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;aACtB;YACD,+CAA+C;YAC/C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;YAC/D,IAAI,CAAC,YAAY,GAAG,SAAS,CAAA;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;YACpB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;SACtC;IACH,CAAC;IAEO,YAAY,CAAC,MAAc,EAAE,IAAY,EAAE,MAAc,EAAE,KAAa;QAC9E,QAAQ,IAAI,EAAE;YACZ;gBACE,OAAO,uBAAY,CAAA;YACrB;gBACE,OAAO,wBAAa,CAAA;YACtB;gBACE,OAAO,wBAAa,CAAA;YACtB;gBACE,OAAO,iBAAM,CAAA;YACf;gBACE,OAAO,0BAAe,CAAA;YACxB;gBACE,OAAO,mBAAQ,CAAA;YACjB;gBACE,OAAO,2BAAgB,CAAA;YACzB;gBACE,OAAO,qBAAU,CAAA;YACnB;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC9D;gBACE,OAAO,IAAI,CAAC,wBAAwB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC7D;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAA;YAC/D;gBACE,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,0BAA0B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC/D;gBACE,OAAO,IAAI,CAAC,gCAAgC,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACrE;gBACE,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACvD;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAClD;gBACE,OAAO,IAAI,wBAAa,CAAC,6BAA6B,GAAG,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;SAC/F;IACH,CAAC;IAEO,yBAAyB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC7E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACpC,OAAO,IAAI,+BAAoB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;IAEO,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC/E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,OAAO,IAAI,iCAAsB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IACjD,CAAC;IAEO,aAAa,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACjE,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,0BAAe,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC3C,CAAC;IAEO,kBAAkB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACtE,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,gBAAgB,CAAC,CAAA;IACvE,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,iBAAiB,CAAC,CAAA;IACxE,CAAC;IAEO,gBAAgB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa,EAAE,WAAwB;QAC9F,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QACzC,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACvC,MAAM,OAAO,GAAG,IAAI,uBAAY,CAAC,MAAM,EAAE,WAAW,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAA;QAC5E,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE;YACpC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;SAC7C;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,wBAAwB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC5E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACrC,OAAO,IAAI,sCAA2B,CAAC,MAAM,EAAE,SAAS,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC7E,CAAC;IAEO,0BAA0B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC9E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,OAAO,GAAG,IAAI,gCAAqB,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;QAC7D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;SACtC;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,UAAU;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACxC,MAAM,gBAAgB,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC1D,OAAO,IAAI,gBAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,YAAY,EAAE,gBAAgB,EAAE,IAAI,CAAC,CAAA;IAC7F,CAAC;IAEO,gCAAgC,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACpF,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAC1C,MAAM,OAAO,GAAG,IAAI,sCAA2B,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;SAC7C;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,MAAM,GAAU,IAAI,KAAK,CAAC,UAAU,CAAC,CAAA;QAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;YAC/B,uDAAuD;YACvD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;SACxD;QACD,OAAO,IAAI,yBAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC3C,CAAC;IAEO,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC/E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACnC,OAAO,IAAI,iCAAsB,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;IACxD,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,OAAO,IAAI,gCAAqB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAA;IAChE,CAAC;IAEM,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC9E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAChC,qCAAqC;QACrC,MAAM,OAAO,GAAyB;YACpC,IAAI,EAAE,kBAAkB;YACxB,MAAM;SACP,CAAA;QAED,QAAQ,IAAI,EAAE;YACZ,KAAK,CAAC,EAAE,mBAAmB;gBACzB,MAAK;YACP,KAAK,CAAC,EAAE,kCAAkC;gBACxC,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;oBACxB,OAAO,CAAC,IAAI,GAAG,iCAAiC,CAAA;iBACjD;gBACD,MAAK;YACP,KAAK,CAAC,EAAE,4BAA4B;gBAClC,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;oBACzB,OAAO,CAAC,IAAI,GAAG,2BAA2B,CAAA;oBAC1C,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;oBACjC,OAAO,IAAI,oCAAyB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;iBACnD;gBACD,MAAK;YACP,KAAK,EAAE,EAAE,qBAAqB;gBAC5B,OAAO,CAAC,IAAI,GAAG,oBAAoB,CAAA;gBACnC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAA;gBACvB,IAAI,SAAiB,CAAA;gBACrB,GAAG;oBACD,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAEjC,IAAI,SAAS,EAAE;wBACb,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;qBACnC;iBACF,QAAQ,SAAS,EAAC;gBACnB,MAAK;YACP,KAAK,EAAE,EAAE,6BAA6B;gBACpC,OAAO,CAAC,IAAI,GAAG,4BAA4B,CAAA;gBAC3C,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC7C,MAAK;YACP,KAAK,EAAE,EAAE,0BAA0B;gBACjC,OAAO,CAAC,IAAI,GAAG,yBAAyB,CAAA;gBACxC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC7C,MAAK;YACP;gBACE,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,IAAI,CAAC,CAAA;SACnE;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,iBAAiB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa,EAAE,IAAiB;QACxF,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,MAAM,GAA2B,EAAE,CAAA;QACzC,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACrC,OAAO,SAAS,KAAK,IAAI,EAAE;YACzB,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;YACzC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAClC;QAED,MAAM,YAAY,GAAG,MAAM,CAAC,CAAC,CAAA;QAE7B,MAAM,OAAO,GACX,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,wBAAa,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,wBAAa,CAAC,YAAY,EAAE,MAAM,EAAE,IAAI,CAAC,CAAA;QAE7G,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,gBAAgB,GAAG,MAAM,CAAC,CAAC,CAAA;QACnC,OAAO,CAAC,aAAa,GAAG,MAAM,CAAC,CAAC,CAAA;QAChC,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;QACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;QACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,CAAC,CAAA;QAC7B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,CAAC,CAAA;QAC1B,OAAO,OAAO,CAAA;IAChB,CAAC;CACF;AAvTD,wBAuTC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/serializer.d.ts b/node_modules/pg-protocol/dist/serializer.d.ts new file mode 100644 index 0000000..a9ef64a --- /dev/null +++ b/node_modules/pg-protocol/dist/serializer.d.ts @@ -0,0 +1,42 @@ +declare type ParseOpts = { + name?: string; + types?: number[]; + text: string; +}; +declare type ValueMapper = (param: any, index: number) => any; +declare type BindOpts = { + portal?: string; + binary?: boolean; + statement?: string; + values?: any[]; + valueMapper?: ValueMapper; +}; +declare type ExecOpts = { + portal?: string; + rows?: number; +}; +declare type PortalOpts = { + type: 'S' | 'P'; + name?: string; +}; +declare const serialize: { + startup: (opts: Record) => Buffer; + password: (password: string) => Buffer; + requestSsl: () => Buffer; + sendSASLInitialResponseMessage: (mechanism: string, initialResponse: string) => Buffer; + sendSCRAMClientFinalMessage: (additionalData: string) => Buffer; + query: (text: string) => Buffer; + parse: (query: ParseOpts) => Buffer; + bind: (config?: BindOpts) => Buffer; + execute: (config?: ExecOpts) => Buffer; + describe: (msg: PortalOpts) => Buffer; + close: (msg: PortalOpts) => Buffer; + flush: () => Buffer; + sync: () => Buffer; + end: () => Buffer; + copyData: (chunk: Buffer) => Buffer; + copyDone: () => Buffer; + copyFail: (message: string) => Buffer; + cancel: (processID: number, secretKey: number) => Buffer; +}; +export { serialize }; diff --git a/node_modules/pg-protocol/dist/serializer.js b/node_modules/pg-protocol/dist/serializer.js new file mode 100644 index 0000000..886663a --- /dev/null +++ b/node_modules/pg-protocol/dist/serializer.js @@ -0,0 +1,189 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.serialize = void 0; +const buffer_writer_1 = require("./buffer-writer"); +const writer = new buffer_writer_1.Writer(); +const startup = (opts) => { + // protocol version + writer.addInt16(3).addInt16(0); + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]); + } + writer.addCString('client_encoding').addCString('UTF8'); + var bodyBuffer = writer.addCString('').flush(); + // this message is sent without a code + var length = bodyBuffer.length + 4; + return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush(); +}; +const requestSsl = () => { + const response = Buffer.allocUnsafe(8); + response.writeInt32BE(8, 0); + response.writeInt32BE(80877103, 4); + return response; +}; +const password = (password) => { + return writer.addCString(password).flush(112 /* code.startup */); +}; +const sendSASLInitialResponseMessage = function (mechanism, initialResponse) { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse); + return writer.flush(112 /* code.startup */); +}; +const sendSCRAMClientFinalMessage = function (additionalData) { + return writer.addString(additionalData).flush(112 /* code.startup */); +}; +const query = (text) => { + return writer.addCString(text).flush(81 /* code.query */); +}; +const emptyArray = []; +const parse = (query) => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + // normalize missing query names to allow for null + const name = query.name || ''; + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.'); + console.error('You supplied %s (%s)', name, name.length); + console.error('This can cause conflicts and silent errors executing queries'); + /* eslint-enable no-console */ + } + const types = query.types || emptyArray; + var len = types.length; + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len); + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]); + } + return writer.flush(80 /* code.parse */); +}; +const paramWriter = new buffer_writer_1.Writer(); +const writeValues = function (values, valueMapper) { + for (let i = 0; i < values.length; i++) { + const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]; + if (mappedVal == null) { + // add the param type (string) to the writer + writer.addInt16(0 /* ParamType.STRING */); + // write -1 to the param writer to indicate null + paramWriter.addInt32(-1); + } + else if (mappedVal instanceof Buffer) { + // add the param type (binary) to the writer + writer.addInt16(1 /* ParamType.BINARY */); + // add the buffer to the param writer + paramWriter.addInt32(mappedVal.length); + paramWriter.add(mappedVal); + } + else { + // add the param type (string) to the writer + writer.addInt16(0 /* ParamType.STRING */); + paramWriter.addInt32(Buffer.byteLength(mappedVal)); + paramWriter.addString(mappedVal); + } + } +}; +const bind = (config = {}) => { + // normalize config + const portal = config.portal || ''; + const statement = config.statement || ''; + const binary = config.binary || false; + const values = config.values || emptyArray; + const len = values.length; + writer.addCString(portal).addCString(statement); + writer.addInt16(len); + writeValues(values, config.valueMapper); + writer.addInt16(len); + writer.add(paramWriter.flush()); + // format code + writer.addInt16(binary ? 1 /* ParamType.BINARY */ : 0 /* ParamType.STRING */); + return writer.flush(66 /* code.bind */); +}; +const emptyExecute = Buffer.from([69 /* code.execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]); +const execute = (config) => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute; + } + const portal = config.portal || ''; + const rows = config.rows || 0; + const portalLength = Buffer.byteLength(portal); + const len = 4 + portalLength + 1 + 4; + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len); + buff[0] = 69 /* code.execute */; + buff.writeInt32BE(len, 1); + buff.write(portal, 5, 'utf-8'); + buff[portalLength + 5] = 0; // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4); + return buff; +}; +const cancel = (processID, secretKey) => { + const buffer = Buffer.allocUnsafe(16); + buffer.writeInt32BE(16, 0); + buffer.writeInt16BE(1234, 4); + buffer.writeInt16BE(5678, 6); + buffer.writeInt32BE(processID, 8); + buffer.writeInt32BE(secretKey, 12); + return buffer; +}; +const cstringMessage = (code, string) => { + const stringLen = Buffer.byteLength(string); + const len = 4 + stringLen + 1; + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len); + buffer[0] = code; + buffer.writeInt32BE(len, 1); + buffer.write(string, 5, 'utf-8'); + buffer[len] = 0; // null terminate cString + return buffer; +}; +const emptyDescribePortal = writer.addCString('P').flush(68 /* code.describe */); +const emptyDescribeStatement = writer.addCString('S').flush(68 /* code.describe */); +const describe = (msg) => { + return msg.name + ? cstringMessage(68 /* code.describe */, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement; +}; +const close = (msg) => { + const text = `${msg.type}${msg.name || ''}`; + return cstringMessage(67 /* code.close */, text); +}; +const copyData = (chunk) => { + return writer.add(chunk).flush(100 /* code.copyFromChunk */); +}; +const copyFail = (message) => { + return cstringMessage(102 /* code.copyFail */, message); +}; +const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]); +const flushBuffer = codeOnlyBuffer(72 /* code.flush */); +const syncBuffer = codeOnlyBuffer(83 /* code.sync */); +const endBuffer = codeOnlyBuffer(88 /* code.end */); +const copyDoneBuffer = codeOnlyBuffer(99 /* code.copyDone */); +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +}; +exports.serialize = serialize; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/serializer.js.map b/node_modules/pg-protocol/dist/serializer.js.map new file mode 100644 index 0000000..5146ebe --- /dev/null +++ b/node_modules/pg-protocol/dist/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../src/serializer.ts"],"names":[],"mappings":";;;AAAA,mDAAwC;AAkBxC,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,OAAO,GAAG,CAAC,IAA4B,EAAU,EAAE;IACvD,mBAAmB;IACnB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;KAC7C;IAED,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAEvD,IAAI,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAC9C,sCAAsC;IAEtC,IAAI,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAElC,OAAO,IAAI,sBAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE,CAAA;AAC9D,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,GAAW,EAAE;IAC9B,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;IACtC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAC3B,QAAQ,CAAC,YAAY,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAA;IAClC,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,QAAgB,EAAU,EAAE;IAC5C,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,KAAK,wBAAc,CAAA;AACxD,CAAC,CAAA;AAED,MAAM,8BAA8B,GAAG,UAAU,SAAiB,EAAE,eAAuB;IACzF,aAAa;IACb,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;IAEpG,OAAO,MAAM,CAAC,KAAK,wBAAc,CAAA;AACnC,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,UAAU,cAAsB;IAClE,OAAO,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,wBAAc,CAAA;AAC7D,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAU,EAAE;IACrC,OAAO,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,KAAK,qBAAY,CAAA;AAClD,CAAC,CAAA;AAQD,MAAM,UAAU,GAAU,EAAE,CAAA;AAE5B,MAAM,KAAK,GAAG,CAAC,KAAgB,EAAU,EAAE;IACzC,8BAA8B;IAC9B,uBAAuB;IACvB,gCAAgC;IAChC,8BAA8B;IAE9B,kDAAkD;IAClD,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,EAAE,CAAA;IAC7B,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;QACpB,+BAA+B;QAC/B,OAAO,CAAC,KAAK,CAAC,gEAAgE,CAAC,CAAA;QAC/E,OAAO,CAAC,KAAK,CAAC,sBAAsB,EAAE,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACxD,OAAO,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAA;QAC7E,8BAA8B;KAC/B;IAED,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,IAAI,UAAU,CAAA;IAEvC,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,CAAA;IAEtB,IAAI,MAAM,GAAG,MAAM;SAChB,UAAU,CAAC,IAAI,CAAC,CAAC,gBAAgB;SACjC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,oBAAoB;SAC3C,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEhB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;KAC1B;IAED,OAAO,MAAM,CAAC,KAAK,qBAAY,CAAA;AACjC,CAAC,CAAA;AAaD,MAAM,WAAW,GAAG,IAAI,sBAAM,EAAE,CAAA;AAQhC,MAAM,WAAW,GAAG,UAAU,MAAa,EAAE,WAAyB;IACpE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,SAAS,GAAG,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACrE,IAAI,SAAS,IAAI,IAAI,EAAE;YACrB,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,0BAAkB,CAAA;YACjC,gDAAgD;YAChD,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;SACzB;aAAM,IAAI,SAAS,YAAY,MAAM,EAAE;YACtC,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,0BAAkB,CAAA;YACjC,qCAAqC;YACrC,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;YACtC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAA;SAC3B;aAAM;YACL,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,0BAAkB,CAAA;YACjC,WAAW,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAA;YAClD,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;SACjC;KACF;AACH,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,CAAC,SAAmB,EAAE,EAAU,EAAE;IAC7C,mBAAmB;IACnB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;IACxC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAA;IACrC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,UAAU,CAAA;IAC1C,MAAM,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;IAEzB,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;IAC/C,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEpB,WAAW,CAAC,MAAM,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;IAEvC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IACpB,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAA;IAE/B,cAAc;IACd,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,0BAAkB,CAAC,yBAAiB,CAAC,CAAA;IAC7D,OAAO,MAAM,CAAC,KAAK,oBAAW,CAAA;AAChC,CAAC,CAAA;AAOD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAe,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEtG,MAAM,OAAO,GAAG,CAAC,MAAiB,EAAU,EAAE;IAC5C,0CAA0C;IAC1C,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC/C,OAAO,YAAY,CAAA;KACpB;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,CAAC,CAAA;IAE7B,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,CAAC,GAAG,YAAY,GAAG,CAAC,GAAG,CAAC,CAAA;IACpC,yBAAyB;IACzB,MAAM,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IACxC,IAAI,CAAC,CAAC,CAAC,wBAAe,CAAA;IACtB,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IACzB,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9B,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA,CAAC,gCAAgC;IAC3D,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IACzC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,MAAM,GAAG,CAAC,SAAiB,EAAE,SAAiB,EAAU,EAAE;IAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,CAAA;IACrC,MAAM,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;IAC1B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,CAAA;IACjC,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,EAAE,CAAC,CAAA;IAClC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAOD,MAAM,cAAc,GAAG,CAAC,IAAU,EAAE,MAAc,EAAU,EAAE;IAC5D,MAAM,SAAS,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC3C,MAAM,GAAG,GAAG,CAAC,GAAG,SAAS,GAAG,CAAC,CAAA;IAC7B,yBAAyB;IACzB,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IAC1C,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAA;IAChB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IAC3B,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAChC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA,CAAC,yBAAyB;IACzC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,wBAAe,CAAA;AACvE,MAAM,sBAAsB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,wBAAe,CAAA;AAE1E,MAAM,QAAQ,GAAG,CAAC,GAAe,EAAU,EAAE;IAC3C,OAAO,GAAG,CAAC,IAAI;QACb,CAAC,CAAC,cAAc,yBAAgB,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAC;QAC/D,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,GAAG;YAClB,CAAC,CAAC,mBAAmB;YACrB,CAAC,CAAC,sBAAsB,CAAA;AAC5B,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,GAAe,EAAU,EAAE;IACxC,MAAM,IAAI,GAAG,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAA;IAC3C,OAAO,cAAc,sBAAa,IAAI,CAAC,CAAA;AACzC,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,KAAa,EAAU,EAAE;IACzC,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,8BAAoB,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,OAAe,EAAU,EAAE;IAC3C,OAAO,cAAc,0BAAgB,OAAO,CAAC,CAAA;AAC/C,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,CAAC,IAAU,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAE1F,MAAM,WAAW,GAAG,cAAc,qBAAY,CAAA;AAC9C,MAAM,UAAU,GAAG,cAAc,oBAAW,CAAA;AAC5C,MAAM,SAAS,GAAG,cAAc,mBAAU,CAAA;AAC1C,MAAM,cAAc,GAAG,cAAc,wBAAe,CAAA;AAEpD,MAAM,SAAS,GAAG;IAChB,OAAO;IACP,QAAQ;IACR,UAAU;IACV,8BAA8B;IAC9B,2BAA2B;IAC3B,KAAK;IACL,KAAK;IACL,IAAI;IACJ,OAAO;IACP,QAAQ;IACR,KAAK;IACL,KAAK,EAAE,GAAG,EAAE,CAAC,WAAW;IACxB,IAAI,EAAE,GAAG,EAAE,CAAC,UAAU;IACtB,GAAG,EAAE,GAAG,EAAE,CAAC,SAAS;IACpB,QAAQ;IACR,QAAQ,EAAE,GAAG,EAAE,CAAC,cAAc;IAC9B,QAAQ;IACR,MAAM;CACP,CAAA;AAEQ,8BAAS"} \ No newline at end of file diff --git a/node_modules/pg-protocol/package.json b/node_modules/pg-protocol/package.json new file mode 100644 index 0000000..a51a1e9 --- /dev/null +++ b/node_modules/pg-protocol/package.json @@ -0,0 +1,35 @@ +{ + "name": "pg-protocol", + "version": "1.6.1", + "description": "The postgres client/server binary protocol, implemented in TypeScript", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "license": "MIT", + "devDependencies": { + "@types/chai": "^4.2.7", + "@types/mocha": "^5.2.7", + "@types/node": "^12.12.21", + "chai": "^4.2.0", + "chunky": "^0.0.0", + "mocha": "^7.1.2", + "ts-node": "^8.5.4", + "typescript": "^4.0.3" + }, + "scripts": { + "test": "mocha dist/**/*.test.js", + "build": "tsc", + "build:watch": "tsc --watch", + "prepublish": "yarn build", + "pretest": "yarn build" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-protocol" + }, + "files": [ + "/dist/*{js,ts,map}", + "/src" + ], + "gitHead": "b03c071d2d15af259e1e008e9628191c865e58fa" +} diff --git a/node_modules/pg-protocol/src/b.ts b/node_modules/pg-protocol/src/b.ts new file mode 100644 index 0000000..028b763 --- /dev/null +++ b/node_modules/pg-protocol/src/b.ts @@ -0,0 +1,28 @@ +// file for microbenchmarking + +import { Writer } from './buffer-writer' +import { serialize } from './index' +import { BufferReader } from './buffer-reader' + +const LOOPS = 1000 +let count = 0 +let start = Date.now() +const writer = new Writer() + +const reader = new BufferReader() +const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]) + +const run = () => { + if (count > LOOPS) { + console.log(Date.now() - start) + return + } + count++ + for (let i = 0; i < LOOPS; i++) { + reader.setBuffer(0, buffer) + reader.cstring() + } + setImmediate(run) +} + +run() diff --git a/node_modules/pg-protocol/src/buffer-reader.ts b/node_modules/pg-protocol/src/buffer-reader.ts new file mode 100644 index 0000000..2305e13 --- /dev/null +++ b/node_modules/pg-protocol/src/buffer-reader.ts @@ -0,0 +1,53 @@ +const emptyBuffer = Buffer.allocUnsafe(0) + +export class BufferReader { + private buffer: Buffer = emptyBuffer + + // TODO(bmc): support non-utf8 encoding? + private encoding: string = 'utf-8' + + constructor(private offset: number = 0) {} + + public setBuffer(offset: number, buffer: Buffer): void { + this.offset = offset + this.buffer = buffer + } + + public int16(): number { + const result = this.buffer.readInt16BE(this.offset) + this.offset += 2 + return result + } + + public byte(): number { + const result = this.buffer[this.offset] + this.offset++ + return result + } + + public int32(): number { + const result = this.buffer.readInt32BE(this.offset) + this.offset += 4 + return result + } + + public string(length: number): string { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length) + this.offset += length + return result + } + + public cstring(): string { + const start = this.offset + let end = start + while (this.buffer[end++] !== 0) {} + this.offset = end + return this.buffer.toString(this.encoding, start, end - 1) + } + + public bytes(length: number): Buffer { + const result = this.buffer.slice(this.offset, this.offset + length) + this.offset += length + return result + } +} diff --git a/node_modules/pg-protocol/src/buffer-writer.ts b/node_modules/pg-protocol/src/buffer-writer.ts new file mode 100644 index 0000000..756cdc9 --- /dev/null +++ b/node_modules/pg-protocol/src/buffer-writer.ts @@ -0,0 +1,85 @@ +//binary data writer tuned for encoding binary specific to the postgres binary protocol + +export class Writer { + private buffer: Buffer + private offset: number = 5 + private headerPosition: number = 0 + constructor(private size = 256) { + this.buffer = Buffer.allocUnsafe(size) + } + + private ensure(size: number): void { + var remaining = this.buffer.length - this.offset + if (remaining < size) { + var oldBuffer = this.buffer + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size + this.buffer = Buffer.allocUnsafe(newSize) + oldBuffer.copy(this.buffer) + } + } + + public addInt32(num: number): Writer { + this.ensure(4) + this.buffer[this.offset++] = (num >>> 24) & 0xff + this.buffer[this.offset++] = (num >>> 16) & 0xff + this.buffer[this.offset++] = (num >>> 8) & 0xff + this.buffer[this.offset++] = (num >>> 0) & 0xff + return this + } + + public addInt16(num: number): Writer { + this.ensure(2) + this.buffer[this.offset++] = (num >>> 8) & 0xff + this.buffer[this.offset++] = (num >>> 0) & 0xff + return this + } + + public addCString(string: string): Writer { + if (!string) { + this.ensure(1) + } else { + var len = Buffer.byteLength(string) + this.ensure(len + 1) // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8') + this.offset += len + } + + this.buffer[this.offset++] = 0 // null terminator + return this + } + + public addString(string: string = ''): Writer { + var len = Buffer.byteLength(string) + this.ensure(len) + this.buffer.write(string, this.offset) + this.offset += len + return this + } + + public add(otherBuffer: Buffer): Writer { + this.ensure(otherBuffer.length) + otherBuffer.copy(this.buffer, this.offset) + this.offset += otherBuffer.length + return this + } + + private join(code?: number): Buffer { + if (code) { + this.buffer[this.headerPosition] = code + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1) + this.buffer.writeInt32BE(length, this.headerPosition + 1) + } + return this.buffer.slice(code ? 0 : 5, this.offset) + } + + public flush(code?: number): Buffer { + var result = this.join(code) + this.offset = 5 + this.headerPosition = 0 + this.buffer = Buffer.allocUnsafe(this.size) + return result + } +} diff --git a/node_modules/pg-protocol/src/inbound-parser.test.ts b/node_modules/pg-protocol/src/inbound-parser.test.ts new file mode 100644 index 0000000..364bd8d --- /dev/null +++ b/node_modules/pg-protocol/src/inbound-parser.test.ts @@ -0,0 +1,557 @@ +import buffers from './testing/test-buffers' +import BufferList from './testing/buffer-list' +import { parse } from '.' +import assert from 'assert' +import { PassThrough } from 'stream' +import { BackendMessage } from './messages' + +var authOkBuffer = buffers.authenticationOk() +var paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8') +var readyForQueryBuffer = buffers.readyForQuery() +var backendKeyDataBuffer = buffers.backendKeyData(1, 2) +var commandCompleteBuffer = buffers.commandComplete('SELECT 3') +var parseCompleteBuffer = buffers.parseComplete() +var bindCompleteBuffer = buffers.bindComplete() +var portalSuspendedBuffer = buffers.portalSuspended() + +var addRow = function (bufferList: BufferList, name: string, offset: number) { + return bufferList + .addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0) // format code, 0 => text +} + +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0, +} +var oneRowDescBuff = buffers.rowDescription([row1]) +row1.name = 'bang' + +var twoRowBuf = buffers.rowDescription([ + row1, + { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0, + }, +]) + +var emptyRowFieldBuf = new BufferList().addInt16(0).join(true, 'D') + +var emptyRowFieldBuf = buffers.dataRow([]) + +var oneFieldBuf = new BufferList() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D') + +var oneFieldBuf = buffers.dataRow(['test']) + +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8, +} + +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25, +} + +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2, +} + +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I', +} + +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3', +} +var emptyRowDescriptionBuffer = new BufferList() + .addInt16(0) // number of fields + .join(true, 'T') + +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +} +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [ + { + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + ], +} + +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [ + { + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text', + }, + ], +} + +var emptyParameterDescriptionBuffer = new BufferList() + .addInt16(0) // number of parameters + .join(true, 't') + +var oneParameterDescBuf = buffers.parameterDescription([1111]) + +var twoParameterDescBuf = buffers.parameterDescription([2222, 3333]) + +var expectedEmptyParameterDescriptionMessage = { + name: 'parameterDescription', + length: 6, + parameterCount: 0, + dataTypeIDs: [], +} + +var expectedOneParameterMessage = { + name: 'parameterDescription', + length: 10, + parameterCount: 1, + dataTypeIDs: [1111], +} + +var expectedTwoParameterMessage = { + name: 'parameterDescription', + length: 14, + parameterCount: 2, + dataTypeIDs: [2222, 3333], +} + +var testForMessage = function (buffer: Buffer, expectedMessage: any) { + it('recieves and parses ' + expectedMessage.name, async () => { + const messages = await parseBuffers([buffer]) + const [lastMessage] = messages + + for (const key in expectedMessage) { + assert.deepEqual((lastMessage as any)[key], expectedMessage[key]) + } + }) +} + +var plainPasswordBuffer = buffers.authenticationCleartextPassword() +var md5PasswordBuffer = buffers.authenticationMD5Password() +var SASLBuffer = buffers.authenticationSASL() +var SASLContinueBuffer = buffers.authenticationSASLContinue() +var SASLFinalBuffer = buffers.authenticationSASLFinal() + +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword', +} + +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]), +} + +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'], +} + +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +} + +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +} + +var notificationResponseBuffer = buffers.notification(4, 'hi', 'boom') +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom', +} + +const parseBuffers = async (buffers: Buffer[]): Promise => { + const stream = new PassThrough() + for (const buffer of buffers) { + stream.write(buffer) + } + stream.end() + const msgs: BackendMessage[] = [] + await parse(stream, (msg) => msgs.push(msg)) + return msgs +} + +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage) + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage) + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage) + testForMessage(SASLBuffer, expectedSASLMessage) + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage) + + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]) + testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage) + + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage) + + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]) + testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage) + + testForMessage(paramStatusBuffer, expectedParameterStatusMessage) + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage) + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage) + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage) + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage) + testForMessage(buffers.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }) + + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData', + }) + + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage) + testForMessage(oneRowDescBuff, expectedOneRowMessage) + testForMessage(twoRowBuf, expectedTwoRowMessage) + }) + + describe('parameterDescription messages', function () { + testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage) + testForMessage(oneParameterDescBuf, expectedOneParameterMessage) + testForMessage(twoParameterDescBuf, expectedTwoParameterMessage) + }) + + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0, + }) + }) + + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'], + }) + }) + }) + + describe('notice message', function () { + // this uses the same logic as error message + var buff = buffers.notice([{ type: 'C', value: 'code' }]) + testForMessage(buff, { + name: 'notice', + code: 'code', + }) + }) + + testForMessage(buffers.error([]), { + name: 'error', + }) + + describe('with all the fields', function () { + var buffer = buffers.error([ + { + type: 'S', + value: 'ERROR', + }, + { + type: 'C', + value: 'code', + }, + { + type: 'M', + value: 'message', + }, + { + type: 'D', + value: 'details', + }, + { + type: 'H', + value: 'hint', + }, + { + type: 'P', + value: '100', + }, + { + type: 'p', + value: '101', + }, + { + type: 'q', + value: 'query', + }, + { + type: 'W', + value: 'where', + }, + { + type: 'F', + value: 'file', + }, + { + type: 'L', + value: 'line', + }, + { + type: 'R', + value: 'routine', + }, + { + type: 'Z', // ignored + value: 'alsdkf', + }, + ]) + + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine', + }) + }) + + testForMessage(parseCompleteBuffer, { + name: 'parseComplete', + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }) + + testForMessage(buffers.closeComplete(), { + name: 'closeComplete', + }) + + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended', + }) + }) + + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4, + }) + }) + + describe('copy', () => { + testForMessage(buffers.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [], + }) + + testForMessage(buffers.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1], + }) + + testForMessage(buffers.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [], + }) + + testForMessage(buffers.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2], + }) + + testForMessage(buffers.copyDone(), { + name: 'copyDone', + length: 4, + }) + + testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]), + }) + }) + + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!']) + + it('parses when full buffer comes in', async function () { + const messages = await parseBuffers([fullBuffer]) + const message = messages[0] as any + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + }) + + var testMessageRecievedAfterSpiltAt = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parseBuffers([fullBuffer]) + const message = messages[0] as any + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + } + + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6) + }) + + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2) + }) + + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5) + }) + }) + + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = buffers.dataRow(['!']) + var readyForQueryBuffer = buffers.readyForQuery() + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length) + dataRowBuffer.copy(fullBuffer, 0, 0) + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0) + + var verifyMessages = function (messages: any[]) { + assert.strictEqual(messages.length, 2) + assert.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'], + }) + assert.equal(messages[0].fields[0], '!') + assert.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I', + }) + } + // sanity check + it('recieves both messages when packet is not split', async function () { + const messages = await parseBuffers([fullBuffer]) + verifyMessages(messages) + }) + + var splitAndVerifyTwoMessages = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parseBuffers([firstBuffer, secondBuffer]) + verifyMessages(messages) + } + + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11) + }) + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6), + ]) + }) + + it('at the end', function () { + return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]) + }) + }) + }) +}) diff --git a/node_modules/pg-protocol/src/index.ts b/node_modules/pg-protocol/src/index.ts new file mode 100644 index 0000000..00491ff --- /dev/null +++ b/node_modules/pg-protocol/src/index.ts @@ -0,0 +1,11 @@ +import { BackendMessage, DatabaseError } from './messages' +import { serialize } from './serializer' +import { Parser, MessageCallback } from './parser' + +export function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise { + const parser = new Parser() + stream.on('data', (buffer: Buffer) => parser.parse(buffer, callback)) + return new Promise((resolve) => stream.on('end', () => resolve())) +} + +export { serialize, DatabaseError } diff --git a/node_modules/pg-protocol/src/messages.ts b/node_modules/pg-protocol/src/messages.ts new file mode 100644 index 0000000..c3fbbdd --- /dev/null +++ b/node_modules/pg-protocol/src/messages.ts @@ -0,0 +1,262 @@ +export type Mode = 'text' | 'binary' + +export type MessageName = + | 'parseComplete' + | 'bindComplete' + | 'closeComplete' + | 'noData' + | 'portalSuspended' + | 'replicationStart' + | 'emptyQuery' + | 'copyDone' + | 'copyData' + | 'rowDescription' + | 'parameterDescription' + | 'parameterStatus' + | 'backendKeyData' + | 'notification' + | 'readyForQuery' + | 'commandComplete' + | 'dataRow' + | 'copyInResponse' + | 'copyOutResponse' + | 'authenticationOk' + | 'authenticationMD5Password' + | 'authenticationCleartextPassword' + | 'authenticationSASL' + | 'authenticationSASLContinue' + | 'authenticationSASLFinal' + | 'error' + | 'notice' + +export interface BackendMessage { + name: MessageName + length: number +} + +export const parseComplete: BackendMessage = { + name: 'parseComplete', + length: 5, +} + +export const bindComplete: BackendMessage = { + name: 'bindComplete', + length: 5, +} + +export const closeComplete: BackendMessage = { + name: 'closeComplete', + length: 5, +} + +export const noData: BackendMessage = { + name: 'noData', + length: 5, +} + +export const portalSuspended: BackendMessage = { + name: 'portalSuspended', + length: 5, +} + +export const replicationStart: BackendMessage = { + name: 'replicationStart', + length: 4, +} + +export const emptyQuery: BackendMessage = { + name: 'emptyQuery', + length: 4, +} + +export const copyDone: BackendMessage = { + name: 'copyDone', + length: 4, +} + +interface NoticeOrError { + message: string | undefined + severity: string | undefined + code: string | undefined + detail: string | undefined + hint: string | undefined + position: string | undefined + internalPosition: string | undefined + internalQuery: string | undefined + where: string | undefined + schema: string | undefined + table: string | undefined + column: string | undefined + dataType: string | undefined + constraint: string | undefined + file: string | undefined + line: string | undefined + routine: string | undefined +} + +export class DatabaseError extends Error implements NoticeOrError { + public severity: string | undefined + public code: string | undefined + public detail: string | undefined + public hint: string | undefined + public position: string | undefined + public internalPosition: string | undefined + public internalQuery: string | undefined + public where: string | undefined + public schema: string | undefined + public table: string | undefined + public column: string | undefined + public dataType: string | undefined + public constraint: string | undefined + public file: string | undefined + public line: string | undefined + public routine: string | undefined + constructor( + message: string, + public readonly length: number, + public readonly name: MessageName + ) { + super(message) + } +} + +export class CopyDataMessage { + public readonly name = 'copyData' + constructor( + public readonly length: number, + public readonly chunk: Buffer + ) {} +} + +export class CopyResponse { + public readonly columnTypes: number[] + constructor( + public readonly length: number, + public readonly name: MessageName, + public readonly binary: boolean, + columnCount: number + ) { + this.columnTypes = new Array(columnCount) + } +} + +export class Field { + constructor( + public readonly name: string, + public readonly tableID: number, + public readonly columnID: number, + public readonly dataTypeID: number, + public readonly dataTypeSize: number, + public readonly dataTypeModifier: number, + public readonly format: Mode + ) {} +} + +export class RowDescriptionMessage { + public readonly name: MessageName = 'rowDescription' + public readonly fields: Field[] + constructor( + public readonly length: number, + public readonly fieldCount: number + ) { + this.fields = new Array(this.fieldCount) + } +} + +export class ParameterDescriptionMessage { + public readonly name: MessageName = 'parameterDescription' + public readonly dataTypeIDs: number[] + constructor( + public readonly length: number, + public readonly parameterCount: number + ) { + this.dataTypeIDs = new Array(this.parameterCount) + } +} + +export class ParameterStatusMessage { + public readonly name: MessageName = 'parameterStatus' + constructor( + public readonly length: number, + public readonly parameterName: string, + public readonly parameterValue: string + ) {} +} + +export class AuthenticationMD5Password implements BackendMessage { + public readonly name: MessageName = 'authenticationMD5Password' + constructor( + public readonly length: number, + public readonly salt: Buffer + ) {} +} + +export class BackendKeyDataMessage { + public readonly name: MessageName = 'backendKeyData' + constructor( + public readonly length: number, + public readonly processID: number, + public readonly secretKey: number + ) {} +} + +export class NotificationResponseMessage { + public readonly name: MessageName = 'notification' + constructor( + public readonly length: number, + public readonly processId: number, + public readonly channel: string, + public readonly payload: string + ) {} +} + +export class ReadyForQueryMessage { + public readonly name: MessageName = 'readyForQuery' + constructor( + public readonly length: number, + public readonly status: string + ) {} +} + +export class CommandCompleteMessage { + public readonly name: MessageName = 'commandComplete' + constructor( + public readonly length: number, + public readonly text: string + ) {} +} + +export class DataRowMessage { + public readonly fieldCount: number + public readonly name: MessageName = 'dataRow' + constructor( + public length: number, + public fields: any[] + ) { + this.fieldCount = fields.length + } +} + +export class NoticeMessage implements BackendMessage, NoticeOrError { + constructor( + public readonly length: number, + public readonly message: string | undefined + ) {} + public readonly name = 'notice' + public severity: string | undefined + public code: string | undefined + public detail: string | undefined + public hint: string | undefined + public position: string | undefined + public internalPosition: string | undefined + public internalQuery: string | undefined + public where: string | undefined + public schema: string | undefined + public table: string | undefined + public column: string | undefined + public dataType: string | undefined + public constraint: string | undefined + public file: string | undefined + public line: string | undefined + public routine: string | undefined +} diff --git a/node_modules/pg-protocol/src/outbound-serializer.test.ts b/node_modules/pg-protocol/src/outbound-serializer.test.ts new file mode 100644 index 0000000..f6669be --- /dev/null +++ b/node_modules/pg-protocol/src/outbound-serializer.test.ts @@ -0,0 +1,272 @@ +import assert from 'assert' +import { serialize } from './serializer' +import BufferList from './testing/buffer-list' + +describe('serializer', () => { + it('builds startup message', function () { + const actual = serialize.startup({ + user: 'brian', + database: 'bang', + }) + assert.deepEqual( + actual, + new BufferList() + .addInt16(3) + .addInt16(0) + .addCString('user') + .addCString('brian') + .addCString('database') + .addCString('bang') + .addCString('client_encoding') + .addCString('UTF8') + .addCString('') + .join(true) + ) + }) + + it('builds password message', function () { + const actual = serialize.password('!') + assert.deepEqual(actual, new BufferList().addCString('!').join(true, 'p')) + }) + + it('builds request ssl message', function () { + const actual = serialize.requestSsl() + const expected = new BufferList().addInt32(80877103).join(true) + assert.deepEqual(actual, expected) + }) + + it('builds SASLInitialResponseMessage message', function () { + const actual = serialize.sendSASLInitialResponseMessage('mech', 'data') + assert.deepEqual(actual, new BufferList().addCString('mech').addInt32(4).addString('data').join(true, 'p')) + }) + + it('builds SCRAMClientFinalMessage message', function () { + const actual = serialize.sendSCRAMClientFinalMessage('data') + assert.deepEqual(actual, new BufferList().addString('data').join(true, 'p')) + }) + + it('builds query message', function () { + var txt = 'select * from boom' + const actual = serialize.query(txt) + assert.deepEqual(actual, new BufferList().addCString(txt).join(true, 'Q')) + }) + + describe('parse message', () => { + it('builds parse message', function () { + const actual = serialize.parse({ text: '!' }) + var expected = new BufferList().addCString('').addCString('!').addInt16(0).join(true, 'P') + assert.deepEqual(actual, expected) + }) + + it('builds parse message with named query', function () { + const actual = serialize.parse({ + name: 'boom', + text: 'select * from boom', + types: [], + }) + var expected = new BufferList().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P') + assert.deepEqual(actual, expected) + }) + + it('with multiple parameters', function () { + const actual = serialize.parse({ + name: 'force', + text: 'select * from bang where name = $1', + types: [1, 2, 3, 4], + }) + var expected = new BufferList() + .addCString('force') + .addCString('select * from bang where name = $1') + .addInt16(4) + .addInt32(1) + .addInt32(2) + .addInt32(3) + .addInt32(4) + .join(true, 'P') + assert.deepEqual(actual, expected) + }) + }) + + describe('bind messages', function () { + it('with no values', function () { + const actual = serialize.bind() + + var expectedBuffer = new BufferList() + .addCString('') + .addCString('') + .addInt16(0) + .addInt16(0) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + it('with named statement, portal, and values', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing')) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + }) + + it('with custom valueMapper', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + valueMapper: () => null, + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + it('with named statement, portal, and buffer value', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, Buffer.from('zing', 'utf8')], + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) // value count + .addInt16(0) // string + .addInt16(0) // string + .addInt16(0) // string + .addInt16(1) // binary + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing', 'utf-8')) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + describe('builds execute message', function () { + it('for unamed portal with no row limit', function () { + const actual = serialize.execute() + var expectedBuffer = new BufferList().addCString('').addInt32(0).join(true, 'E') + assert.deepEqual(actual, expectedBuffer) + }) + + it('for named portal with row limit', function () { + const actual = serialize.execute({ + portal: 'my favorite portal', + rows: 100, + }) + var expectedBuffer = new BufferList().addCString('my favorite portal').addInt32(100).join(true, 'E') + assert.deepEqual(actual, expectedBuffer) + }) + }) + + it('builds flush command', function () { + const actual = serialize.flush() + var expected = new BufferList().join(true, 'H') + assert.deepEqual(actual, expected) + }) + + it('builds sync command', function () { + const actual = serialize.sync() + var expected = new BufferList().join(true, 'S') + assert.deepEqual(actual, expected) + }) + + it('builds end command', function () { + const actual = serialize.end() + var expected = Buffer.from([0x58, 0, 0, 0, 4]) + assert.deepEqual(actual, expected) + }) + + describe('builds describe command', function () { + it('describe statement', function () { + const actual = serialize.describe({ type: 'S', name: 'bang' }) + var expected = new BufferList().addChar('S').addCString('bang').join(true, 'D') + assert.deepEqual(actual, expected) + }) + + it('describe unnamed portal', function () { + const actual = serialize.describe({ type: 'P' }) + var expected = new BufferList().addChar('P').addCString('').join(true, 'D') + assert.deepEqual(actual, expected) + }) + }) + + describe('builds close command', function () { + it('describe statement', function () { + const actual = serialize.close({ type: 'S', name: 'bang' }) + var expected = new BufferList().addChar('S').addCString('bang').join(true, 'C') + assert.deepEqual(actual, expected) + }) + + it('describe unnamed portal', function () { + const actual = serialize.close({ type: 'P' }) + var expected = new BufferList().addChar('P').addCString('').join(true, 'C') + assert.deepEqual(actual, expected) + }) + }) + + describe('copy messages', function () { + it('builds copyFromChunk', () => { + const actual = serialize.copyData(Buffer.from([1, 2, 3])) + const expected = new BufferList().add(Buffer.from([1, 2, 3])).join(true, 'd') + assert.deepEqual(actual, expected) + }) + + it('builds copy fail', () => { + const actual = serialize.copyFail('err!') + const expected = new BufferList().addCString('err!').join(true, 'f') + assert.deepEqual(actual, expected) + }) + + it('builds copy done', () => { + const actual = serialize.copyDone() + const expected = new BufferList().join(true, 'c') + assert.deepEqual(actual, expected) + }) + }) + + it('builds cancel message', () => { + const actual = serialize.cancel(3, 4) + const expected = new BufferList().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true) + assert.deepEqual(actual, expected) + }) +}) diff --git a/node_modules/pg-protocol/src/parser.ts b/node_modules/pg-protocol/src/parser.ts new file mode 100644 index 0000000..1ad4e3a --- /dev/null +++ b/node_modules/pg-protocol/src/parser.ts @@ -0,0 +1,389 @@ +import { TransformOptions } from 'stream' +import { + Mode, + bindComplete, + parseComplete, + closeComplete, + noData, + portalSuspended, + copyDone, + replicationStart, + emptyQuery, + ReadyForQueryMessage, + CommandCompleteMessage, + CopyDataMessage, + CopyResponse, + NotificationResponseMessage, + RowDescriptionMessage, + ParameterDescriptionMessage, + Field, + DataRowMessage, + ParameterStatusMessage, + BackendKeyDataMessage, + DatabaseError, + BackendMessage, + MessageName, + AuthenticationMD5Password, + NoticeMessage, +} from './messages' +import { BufferReader } from './buffer-reader' +import assert from 'assert' + +// every message is prefixed with a single bye +const CODE_LENGTH = 1 +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4 + +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH + +export type Packet = { + code: number + packet: Buffer +} + +const emptyBuffer = Buffer.allocUnsafe(0) + +type StreamOptions = TransformOptions & { + mode: Mode +} + +const enum MessageCodes { + DataRow = 0x44, // D + ParseComplete = 0x31, // 1 + BindComplete = 0x32, // 2 + CloseComplete = 0x33, // 3 + CommandComplete = 0x43, // C + ReadyForQuery = 0x5a, // Z + NoData = 0x6e, // n + NotificationResponse = 0x41, // A + AuthenticationResponse = 0x52, // R + ParameterStatus = 0x53, // S + BackendKeyData = 0x4b, // K + ErrorMessage = 0x45, // E + NoticeMessage = 0x4e, // N + RowDescriptionMessage = 0x54, // T + ParameterDescriptionMessage = 0x74, // t + PortalSuspended = 0x73, // s + ReplicationStart = 0x57, // W + EmptyQuery = 0x49, // I + CopyIn = 0x47, // G + CopyOut = 0x48, // H + CopyDone = 0x63, // c + CopyData = 0x64, // d +} + +export type MessageCallback = (msg: BackendMessage) => void + +export class Parser { + private buffer: Buffer = emptyBuffer + private bufferLength: number = 0 + private bufferOffset: number = 0 + private reader = new BufferReader() + private mode: Mode + + constructor(opts?: StreamOptions) { + if (opts?.mode === 'binary') { + throw new Error('Binary mode not supported yet') + } + this.mode = opts?.mode || 'text' + } + + public parse(buffer: Buffer, callback: MessageCallback) { + this.mergeBuffer(buffer) + const bufferFullLength = this.bufferOffset + this.bufferLength + let offset = this.bufferOffset + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset] + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH) + const fullMessageLength = CODE_LENGTH + length + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer) + callback(message) + offset += fullMessageLength + } else { + break + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer + this.bufferLength = 0 + this.bufferOffset = 0 + } else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset + this.bufferOffset = offset + } + } + + private mergeBuffer(buffer: Buffer): void { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength + const newFullLength = newLength + this.bufferOffset + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer: Buffer + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer + } else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2 + while (newLength >= newBufferLength) { + newBufferLength *= 2 + } + newBuffer = Buffer.allocUnsafe(newBufferLength) + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength) + this.buffer = newBuffer + this.bufferOffset = 0 + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength) + this.bufferLength = newLength + } else { + this.buffer = buffer + this.bufferOffset = 0 + this.bufferLength = buffer.byteLength + } + } + + private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage { + switch (code) { + case MessageCodes.BindComplete: + return bindComplete + case MessageCodes.ParseComplete: + return parseComplete + case MessageCodes.CloseComplete: + return closeComplete + case MessageCodes.NoData: + return noData + case MessageCodes.PortalSuspended: + return portalSuspended + case MessageCodes.CopyDone: + return copyDone + case MessageCodes.ReplicationStart: + return replicationStart + case MessageCodes.EmptyQuery: + return emptyQuery + case MessageCodes.DataRow: + return this.parseDataRowMessage(offset, length, bytes) + case MessageCodes.CommandComplete: + return this.parseCommandCompleteMessage(offset, length, bytes) + case MessageCodes.ReadyForQuery: + return this.parseReadyForQueryMessage(offset, length, bytes) + case MessageCodes.NotificationResponse: + return this.parseNotificationMessage(offset, length, bytes) + case MessageCodes.AuthenticationResponse: + return this.parseAuthenticationResponse(offset, length, bytes) + case MessageCodes.ParameterStatus: + return this.parseParameterStatusMessage(offset, length, bytes) + case MessageCodes.BackendKeyData: + return this.parseBackendKeyData(offset, length, bytes) + case MessageCodes.ErrorMessage: + return this.parseErrorMessage(offset, length, bytes, 'error') + case MessageCodes.NoticeMessage: + return this.parseErrorMessage(offset, length, bytes, 'notice') + case MessageCodes.RowDescriptionMessage: + return this.parseRowDescriptionMessage(offset, length, bytes) + case MessageCodes.ParameterDescriptionMessage: + return this.parseParameterDescriptionMessage(offset, length, bytes) + case MessageCodes.CopyIn: + return this.parseCopyInMessage(offset, length, bytes) + case MessageCodes.CopyOut: + return this.parseCopyOutMessage(offset, length, bytes) + case MessageCodes.CopyData: + return this.parseCopyData(offset, length, bytes) + default: + return new DatabaseError('received invalid response: ' + code.toString(16), length, 'error') + } + } + + private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const status = this.reader.string(1) + return new ReadyForQueryMessage(length, status) + } + + private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const text = this.reader.cstring() + return new CommandCompleteMessage(length, text) + } + + private parseCopyData(offset: number, length: number, bytes: Buffer) { + const chunk = bytes.slice(offset, offset + (length - 4)) + return new CopyDataMessage(length, chunk) + } + + private parseCopyInMessage(offset: number, length: number, bytes: Buffer) { + return this.parseCopyMessage(offset, length, bytes, 'copyInResponse') + } + + private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) { + return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse') + } + + private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: MessageName) { + this.reader.setBuffer(offset, bytes) + const isBinary = this.reader.byte() !== 0 + const columnCount = this.reader.int16() + const message = new CopyResponse(length, messageName, isBinary, columnCount) + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16() + } + return message + } + + private parseNotificationMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const processId = this.reader.int32() + const channel = this.reader.cstring() + const payload = this.reader.cstring() + return new NotificationResponseMessage(length, processId, channel, payload) + } + + private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const fieldCount = this.reader.int16() + const message = new RowDescriptionMessage(length, fieldCount) + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField() + } + return message + } + + private parseField(): Field { + const name = this.reader.cstring() + const tableID = this.reader.int32() + const columnID = this.reader.int16() + const dataTypeID = this.reader.int32() + const dataTypeSize = this.reader.int16() + const dataTypeModifier = this.reader.int32() + const mode = this.reader.int16() === 0 ? 'text' : 'binary' + return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode) + } + + private parseParameterDescriptionMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const parameterCount = this.reader.int16() + const message = new ParameterDescriptionMessage(length, parameterCount) + for (let i = 0; i < parameterCount; i++) { + message.dataTypeIDs[i] = this.reader.int32() + } + return message + } + + private parseDataRowMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const fieldCount = this.reader.int16() + const fields: any[] = new Array(fieldCount) + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32() + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len) + } + return new DataRowMessage(length, fields) + } + + private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const name = this.reader.cstring() + const value = this.reader.cstring() + return new ParameterStatusMessage(length, name, value) + } + + private parseBackendKeyData(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const processID = this.reader.int32() + const secretKey = this.reader.int32() + return new BackendKeyDataMessage(length, processID, secretKey) + } + + public parseAuthenticationResponse(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const code = this.reader.int32() + // TODO(bmc): maybe better types here + const message: BackendMessage & any = { + name: 'authenticationOk', + length, + } + + switch (code) { + case 0: // AuthenticationOk + break + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = 'authenticationCleartextPassword' + } + break + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = 'authenticationMD5Password' + const salt = this.reader.bytes(4) + return new AuthenticationMD5Password(length, salt) + } + break + case 10: // AuthenticationSASL + message.name = 'authenticationSASL' + message.mechanisms = [] + let mechanism: string + do { + mechanism = this.reader.cstring() + + if (mechanism) { + message.mechanisms.push(mechanism) + } + } while (mechanism) + break + case 11: // AuthenticationSASLContinue + message.name = 'authenticationSASLContinue' + message.data = this.reader.string(length - 8) + break + case 12: // AuthenticationSASLFinal + message.name = 'authenticationSASLFinal' + message.data = this.reader.string(length - 8) + break + default: + throw new Error('Unknown authenticationOk message type ' + code) + } + return message + } + + private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: MessageName) { + this.reader.setBuffer(offset, bytes) + const fields: Record = {} + let fieldType = this.reader.string(1) + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring() + fieldType = this.reader.string(1) + } + + const messageValue = fields.M + + const message = + name === 'notice' ? new NoticeMessage(length, messageValue) : new DatabaseError(messageValue, length, name) + + message.severity = fields.S + message.code = fields.C + message.detail = fields.D + message.hint = fields.H + message.position = fields.P + message.internalPosition = fields.p + message.internalQuery = fields.q + message.where = fields.W + message.schema = fields.s + message.table = fields.t + message.column = fields.c + message.dataType = fields.d + message.constraint = fields.n + message.file = fields.F + message.line = fields.L + message.routine = fields.R + return message + } +} diff --git a/node_modules/pg-protocol/src/serializer.ts b/node_modules/pg-protocol/src/serializer.ts new file mode 100644 index 0000000..07e2fe4 --- /dev/null +++ b/node_modules/pg-protocol/src/serializer.ts @@ -0,0 +1,274 @@ +import { Writer } from './buffer-writer' + +const enum code { + startup = 0x70, + query = 0x51, + parse = 0x50, + bind = 0x42, + execute = 0x45, + flush = 0x48, + sync = 0x53, + end = 0x58, + close = 0x43, + describe = 0x44, + copyFromChunk = 0x64, + copyDone = 0x63, + copyFail = 0x66, +} + +const writer = new Writer() + +const startup = (opts: Record): Buffer => { + // protocol version + writer.addInt16(3).addInt16(0) + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]) + } + + writer.addCString('client_encoding').addCString('UTF8') + + var bodyBuffer = writer.addCString('').flush() + // this message is sent without a code + + var length = bodyBuffer.length + 4 + + return new Writer().addInt32(length).add(bodyBuffer).flush() +} + +const requestSsl = (): Buffer => { + const response = Buffer.allocUnsafe(8) + response.writeInt32BE(8, 0) + response.writeInt32BE(80877103, 4) + return response +} + +const password = (password: string): Buffer => { + return writer.addCString(password).flush(code.startup) +} + +const sendSASLInitialResponseMessage = function (mechanism: string, initialResponse: string): Buffer { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse) + + return writer.flush(code.startup) +} + +const sendSCRAMClientFinalMessage = function (additionalData: string): Buffer { + return writer.addString(additionalData).flush(code.startup) +} + +const query = (text: string): Buffer => { + return writer.addCString(text).flush(code.query) +} + +type ParseOpts = { + name?: string + types?: number[] + text: string +} + +const emptyArray: any[] = [] + +const parse = (query: ParseOpts): Buffer => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + + // normalize missing query names to allow for null + const name = query.name || '' + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', name, name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + + const types = query.types || emptyArray + + var len = types.length + + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len) + + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]) + } + + return writer.flush(code.parse) +} + +type ValueMapper = (param: any, index: number) => any + +type BindOpts = { + portal?: string + binary?: boolean + statement?: string + values?: any[] + // optional map from JS value to postgres value per parameter + valueMapper?: ValueMapper +} + +const paramWriter = new Writer() + +// make this a const enum so typescript will inline the value +const enum ParamType { + STRING = 0, + BINARY = 1, +} + +const writeValues = function (values: any[], valueMapper?: ValueMapper): void { + for (let i = 0; i < values.length; i++) { + const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i] + if (mappedVal == null) { + // add the param type (string) to the writer + writer.addInt16(ParamType.STRING) + // write -1 to the param writer to indicate null + paramWriter.addInt32(-1) + } else if (mappedVal instanceof Buffer) { + // add the param type (binary) to the writer + writer.addInt16(ParamType.BINARY) + // add the buffer to the param writer + paramWriter.addInt32(mappedVal.length) + paramWriter.add(mappedVal) + } else { + // add the param type (string) to the writer + writer.addInt16(ParamType.STRING) + paramWriter.addInt32(Buffer.byteLength(mappedVal)) + paramWriter.addString(mappedVal) + } + } +} + +const bind = (config: BindOpts = {}): Buffer => { + // normalize config + const portal = config.portal || '' + const statement = config.statement || '' + const binary = config.binary || false + const values = config.values || emptyArray + const len = values.length + + writer.addCString(portal).addCString(statement) + writer.addInt16(len) + + writeValues(values, config.valueMapper) + + writer.addInt16(len) + writer.add(paramWriter.flush()) + + // format code + writer.addInt16(binary ? ParamType.BINARY : ParamType.STRING) + return writer.flush(code.bind) +} + +type ExecOpts = { + portal?: string + rows?: number +} + +const emptyExecute = Buffer.from([code.execute, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]) + +const execute = (config?: ExecOpts): Buffer => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute + } + + const portal = config.portal || '' + const rows = config.rows || 0 + + const portalLength = Buffer.byteLength(portal) + const len = 4 + portalLength + 1 + 4 + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len) + buff[0] = code.execute + buff.writeInt32BE(len, 1) + buff.write(portal, 5, 'utf-8') + buff[portalLength + 5] = 0 // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4) + return buff +} + +const cancel = (processID: number, secretKey: number): Buffer => { + const buffer = Buffer.allocUnsafe(16) + buffer.writeInt32BE(16, 0) + buffer.writeInt16BE(1234, 4) + buffer.writeInt16BE(5678, 6) + buffer.writeInt32BE(processID, 8) + buffer.writeInt32BE(secretKey, 12) + return buffer +} + +type PortalOpts = { + type: 'S' | 'P' + name?: string +} + +const cstringMessage = (code: code, string: string): Buffer => { + const stringLen = Buffer.byteLength(string) + const len = 4 + stringLen + 1 + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len) + buffer[0] = code + buffer.writeInt32BE(len, 1) + buffer.write(string, 5, 'utf-8') + buffer[len] = 0 // null terminate cString + return buffer +} + +const emptyDescribePortal = writer.addCString('P').flush(code.describe) +const emptyDescribeStatement = writer.addCString('S').flush(code.describe) + +const describe = (msg: PortalOpts): Buffer => { + return msg.name + ? cstringMessage(code.describe, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement +} + +const close = (msg: PortalOpts): Buffer => { + const text = `${msg.type}${msg.name || ''}` + return cstringMessage(code.close, text) +} + +const copyData = (chunk: Buffer): Buffer => { + return writer.add(chunk).flush(code.copyFromChunk) +} + +const copyFail = (message: string): Buffer => { + return cstringMessage(code.copyFail, message) +} + +const codeOnlyBuffer = (code: code): Buffer => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]) + +const flushBuffer = codeOnlyBuffer(code.flush) +const syncBuffer = codeOnlyBuffer(code.sync) +const endBuffer = codeOnlyBuffer(code.end) +const copyDoneBuffer = codeOnlyBuffer(code.copyDone) + +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +} + +export { serialize } diff --git a/node_modules/pg-protocol/src/testing/buffer-list.ts b/node_modules/pg-protocol/src/testing/buffer-list.ts new file mode 100644 index 0000000..15ac785 --- /dev/null +++ b/node_modules/pg-protocol/src/testing/buffer-list.ts @@ -0,0 +1,75 @@ +export default class BufferList { + constructor(public buffers: Buffer[] = []) {} + + public add(buffer: Buffer, front?: boolean) { + this.buffers[front ? 'unshift' : 'push'](buffer) + return this + } + + public addInt16(val: number, front?: boolean) { + return this.add(Buffer.from([val >>> 8, val >>> 0]), front) + } + + public getByteLength(initial?: number) { + return this.buffers.reduce(function (previous, current) { + return previous + current.length + }, initial || 0) + } + + public addInt32(val: number, first?: boolean) { + return this.add( + Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]), + first + ) + } + + public addCString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len + 1) + buffer.write(val) + buffer[len] = 0 + return this.add(buffer, front) + } + + public addString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len) + buffer.write(val) + return this.add(buffer, front) + } + + public addChar(char: string, first?: boolean) { + return this.add(Buffer.from(char, 'utf8'), first) + } + + public addByte(byte: number) { + return this.add(Buffer.from([byte])) + } + + public join(appendLength?: boolean, char?: string): Buffer { + var length = this.getByteLength() + if (appendLength) { + this.addInt32(length + 4, true) + return this.join(false, char) + } + if (char) { + this.addChar(char, true) + length++ + } + var result = Buffer.alloc(length) + var index = 0 + this.buffers.forEach(function (buffer) { + buffer.copy(result, index, 0) + index += buffer.length + }) + return result + } + + public static concat(): Buffer { + var total = new BufferList() + for (var i = 0; i < arguments.length; i++) { + total.add(arguments[i]) + } + return total.join() + } +} diff --git a/node_modules/pg-protocol/src/testing/test-buffers.ts b/node_modules/pg-protocol/src/testing/test-buffers.ts new file mode 100644 index 0000000..a4d49f3 --- /dev/null +++ b/node_modules/pg-protocol/src/testing/test-buffers.ts @@ -0,0 +1,166 @@ +// https://www.postgresql.org/docs/current/protocol-message-formats.html +import BufferList from './buffer-list' + +const buffers = { + readyForQuery: function () { + return new BufferList().add(Buffer.from('I')).join(true, 'Z') + }, + + authenticationOk: function () { + return new BufferList().addInt32(0).join(true, 'R') + }, + + authenticationCleartextPassword: function () { + return new BufferList().addInt32(3).join(true, 'R') + }, + + authenticationMD5Password: function () { + return new BufferList() + .addInt32(5) + .add(Buffer.from([1, 2, 3, 4])) + .join(true, 'R') + }, + + authenticationSASL: function () { + return new BufferList().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R') + }, + + authenticationSASLContinue: function () { + return new BufferList().addInt32(11).addString('data').join(true, 'R') + }, + + authenticationSASLFinal: function () { + return new BufferList().addInt32(12).addString('data').join(true, 'R') + }, + + parameterStatus: function (name: string, value: string) { + return new BufferList().addCString(name).addCString(value).join(true, 'S') + }, + + backendKeyData: function (processID: number, secretKey: number) { + return new BufferList().addInt32(processID).addInt32(secretKey).join(true, 'K') + }, + + commandComplete: function (string: string) { + return new BufferList().addCString(string).join(true, 'C') + }, + + rowDescription: function (fields: any[]) { + fields = fields || [] + var buf = new BufferList() + buf.addInt16(fields.length) + fields.forEach(function (field) { + buf + .addCString(field.name) + .addInt32(field.tableID || 0) + .addInt16(field.attributeNumber || 0) + .addInt32(field.dataTypeID || 0) + .addInt16(field.dataTypeSize || 0) + .addInt32(field.typeModifier || 0) + .addInt16(field.formatCode || 0) + }) + return buf.join(true, 'T') + }, + + parameterDescription: function (dataTypeIDs: number[]) { + dataTypeIDs = dataTypeIDs || [] + var buf = new BufferList() + buf.addInt16(dataTypeIDs.length) + dataTypeIDs.forEach(function (dataTypeID) { + buf.addInt32(dataTypeID) + }) + return buf.join(true, 't') + }, + + dataRow: function (columns: any[]) { + columns = columns || [] + var buf = new BufferList() + buf.addInt16(columns.length) + columns.forEach(function (col) { + if (col == null) { + buf.addInt32(-1) + } else { + var strBuf = Buffer.from(col, 'utf8') + buf.addInt32(strBuf.length) + buf.add(strBuf) + } + }) + return buf.join(true, 'D') + }, + + error: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'E') + }, + + notice: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'N') + }, + + errorOrNotice: function (fields: any) { + fields = fields || [] + var buf = new BufferList() + fields.forEach(function (field: any) { + buf.addChar(field.type) + buf.addCString(field.value) + }) + return buf.add(Buffer.from([0])) // terminator + }, + + parseComplete: function () { + return new BufferList().join(true, '1') + }, + + bindComplete: function () { + return new BufferList().join(true, '2') + }, + + notification: function (id: number, channel: string, payload: string) { + return new BufferList().addInt32(id).addCString(channel).addCString(payload).join(true, 'A') + }, + + emptyQuery: function () { + return new BufferList().join(true, 'I') + }, + + portalSuspended: function () { + return new BufferList().join(true, 's') + }, + + closeComplete: function () { + return new BufferList().join(true, '3') + }, + + copyIn: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols) + for (let i = 0; i < cols; i++) { + list.addInt16(i) + } + return list.join(true, 'G') + }, + + copyOut: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols) + for (let i = 0; i < cols; i++) { + list.addInt16(i) + } + return list.join(true, 'H') + }, + + copyData: function (bytes: Buffer) { + return new BufferList().add(bytes).join(true, 'd') + }, + + copyDone: function () { + return new BufferList().join(true, 'c') + }, +} + +export default buffers diff --git a/node_modules/pg-protocol/src/types/chunky.d.ts b/node_modules/pg-protocol/src/types/chunky.d.ts new file mode 100644 index 0000000..7389bda --- /dev/null +++ b/node_modules/pg-protocol/src/types/chunky.d.ts @@ -0,0 +1 @@ +declare module 'chunky' diff --git a/node_modules/pg-types/.travis.yml b/node_modules/pg-types/.travis.yml new file mode 100644 index 0000000..dd6b033 --- /dev/null +++ b/node_modules/pg-types/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - '4' + - 'lts/*' + - 'node' +env: + - PGUSER=postgres diff --git a/node_modules/pg-types/Makefile b/node_modules/pg-types/Makefile new file mode 100644 index 0000000..d7ec83d --- /dev/null +++ b/node_modules/pg-types/Makefile @@ -0,0 +1,14 @@ +.PHONY: publish-patch test + +test: + npm test + +patch: test + npm version patch -m "Bump version" + git push origin master --tags + npm publish + +minor: test + npm version minor -m "Bump version" + git push origin master --tags + npm publish diff --git a/node_modules/pg-types/README.md b/node_modules/pg-types/README.md new file mode 100644 index 0000000..54a3f2c --- /dev/null +++ b/node_modules/pg-types/README.md @@ -0,0 +1,75 @@ +# pg-types + +This is the code that turns all the raw text from postgres into JavaScript types for [node-postgres](https://github.com/brianc/node-postgres.git) + +## use + +This module is consumed and exported from the root `pg` object of node-postgres. To access it, do the following: + +```js +var types = require('pg').types +``` + +Generally what you'll want to do is override how a specific data-type is parsed and turned into a JavaScript type. By default the PostgreSQL backend server returns everything as strings. Every data type corresponds to a unique `OID` within the server, and these `OIDs` are sent back with the query response. So, you need to match a particluar `OID` to a function you'd like to use to take the raw text input and produce a valid JavaScript object as a result. `null` values are never parsed. + +Let's do something I commonly like to do on projects: return 64-bit integers `(int8)` as JavaScript integers. Because JavaScript doesn't have support for 64-bit integers node-postgres cannot confidently parse `int8` data type results as numbers because if you have a _huge_ number it will overflow and the result you'd get back from node-postgres would not be the result in the datbase. That would be a __very bad thing__ so node-postgres just returns `int8` results as strings and leaves the parsing up to you. Let's say that you know you don't and wont ever have numbers greater than `int4` in your database, but you're tired of recieving results from the `COUNT(*)` function as strings (because that function returns `int8`). You would do this: + +```js +var types = require('pg').types +types.setTypeParser(20, function(val) { + return parseInt(val) +}) +``` + +__boom__: now you get numbers instead of strings. + +Just as another example -- not saying this is a good idea -- let's say you want to return all dates from your database as [moment](http://momentjs.com/docs/) objects. Okay, do this: + +```js +var types = require('pg').types +var moment = require('moment') +var parseFn = function(val) { + return val === null ? null : moment(val) +} +types.setTypeParser(types.builtins.TIMESTAMPTZ, parseFn) +types.setTypeParser(types.builtins.TIMESTAMP, parseFn) +``` +_note: I've never done that with my dates, and I'm not 100% sure moment can parse all the date strings returned from postgres. It's just an example!_ + +If you're thinking "gee, this seems pretty handy, but how can I get a list of all the OIDs in the database and what they correspond to?!?!?!" worry not: + +```bash +$ psql -c "select typname, oid, typarray from pg_type order by oid" +``` + +If you want to find out the OID of a specific type: + +```bash +$ psql -c "select typname, oid, typarray from pg_type where typname = 'daterange' order by oid" +``` + +:smile: + +## license + +The MIT License (MIT) + +Copyright (c) 2014 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pg-types/index.d.ts b/node_modules/pg-types/index.d.ts new file mode 100644 index 0000000..4bebcbe --- /dev/null +++ b/node_modules/pg-types/index.d.ts @@ -0,0 +1,137 @@ +export enum TypeId { + BOOL = 16, + BYTEA = 17, + CHAR = 18, + INT8 = 20, + INT2 = 21, + INT4 = 23, + REGPROC = 24, + TEXT = 25, + OID = 26, + TID = 27, + XID = 28, + CID = 29, + JSON = 114, + XML = 142, + PG_NODE_TREE = 194, + SMGR = 210, + PATH = 602, + POLYGON = 604, + CIDR = 650, + FLOAT4 = 700, + FLOAT8 = 701, + ABSTIME = 702, + RELTIME = 703, + TINTERVAL = 704, + CIRCLE = 718, + MACADDR8 = 774, + MONEY = 790, + MACADDR = 829, + INET = 869, + ACLITEM = 1033, + BPCHAR = 1042, + VARCHAR = 1043, + DATE = 1082, + TIME = 1083, + TIMESTAMP = 1114, + TIMESTAMPTZ = 1184, + INTERVAL = 1186, + TIMETZ = 1266, + BIT = 1560, + VARBIT = 1562, + NUMERIC = 1700, + REFCURSOR = 1790, + REGPROCEDURE = 2202, + REGOPER = 2203, + REGOPERATOR = 2204, + REGCLASS = 2205, + REGTYPE = 2206, + UUID = 2950, + TXID_SNAPSHOT = 2970, + PG_LSN = 3220, + PG_NDISTINCT = 3361, + PG_DEPENDENCIES = 3402, + TSVECTOR = 3614, + TSQUERY = 3615, + GTSVECTOR = 3642, + REGCONFIG = 3734, + REGDICTIONARY = 3769, + JSONB = 3802, + REGNAMESPACE = 4089, + REGROLE = 4096 +} + +export type builtinsTypes = + 'BOOL' | + 'BYTEA' | + 'CHAR' | + 'INT8' | + 'INT2' | + 'INT4' | + 'REGPROC' | + 'TEXT' | + 'OID' | + 'TID' | + 'XID' | + 'CID' | + 'JSON' | + 'XML' | + 'PG_NODE_TREE' | + 'SMGR' | + 'PATH' | + 'POLYGON' | + 'CIDR' | + 'FLOAT4' | + 'FLOAT8' | + 'ABSTIME' | + 'RELTIME' | + 'TINTERVAL' | + 'CIRCLE' | + 'MACADDR8' | + 'MONEY' | + 'MACADDR' | + 'INET' | + 'ACLITEM' | + 'BPCHAR' | + 'VARCHAR' | + 'DATE' | + 'TIME' | + 'TIMESTAMP' | + 'TIMESTAMPTZ' | + 'INTERVAL' | + 'TIMETZ' | + 'BIT' | + 'VARBIT' | + 'NUMERIC' | + 'REFCURSOR' | + 'REGPROCEDURE' | + 'REGOPER' | + 'REGOPERATOR' | + 'REGCLASS' | + 'REGTYPE' | + 'UUID' | + 'TXID_SNAPSHOT' | + 'PG_LSN' | + 'PG_NDISTINCT' | + 'PG_DEPENDENCIES' | + 'TSVECTOR' | + 'TSQUERY' | + 'GTSVECTOR' | + 'REGCONFIG' | + 'REGDICTIONARY' | + 'JSONB' | + 'REGNAMESPACE' | + 'REGROLE'; + +export type TypesBuiltins = {[key in builtinsTypes]: TypeId}; + +export type TypeFormat = 'text' | 'binary'; + +export const builtins: TypesBuiltins; + +export function setTypeParser (id: TypeId, parseFn: ((value: string) => any)): void; +export function setTypeParser (id: TypeId, format: TypeFormat, parseFn: (value: string) => any): void; + +export const getTypeParser: (id: TypeId, format?: TypeFormat) => any + +export const arrayParser: (source: string, transform: (entry: any) => any) => any[]; diff --git a/node_modules/pg-types/index.js b/node_modules/pg-types/index.js new file mode 100644 index 0000000..952d8c2 --- /dev/null +++ b/node_modules/pg-types/index.js @@ -0,0 +1,47 @@ +var textParsers = require('./lib/textParsers'); +var binaryParsers = require('./lib/binaryParsers'); +var arrayParser = require('./lib/arrayParser'); +var builtinTypes = require('./lib/builtins'); + +exports.getTypeParser = getTypeParser; +exports.setTypeParser = setTypeParser; +exports.arrayParser = arrayParser; +exports.builtins = builtinTypes; + +var typeParsers = { + text: {}, + binary: {} +}; + +//the empty parse function +function noParse (val) { + return String(val); +}; + +//returns a function used to convert a specific type (specified by +//oid) into a result javascript type +//note: the oid can be obtained via the following sql query: +//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE'; +function getTypeParser (oid, format) { + format = format || 'text'; + if (!typeParsers[format]) { + return noParse; + } + return typeParsers[format][oid] || noParse; +}; + +function setTypeParser (oid, format, parseFn) { + if(typeof format == 'function') { + parseFn = format; + format = 'text'; + } + typeParsers[format][oid] = parseFn; +}; + +textParsers.init(function(oid, converter) { + typeParsers.text[oid] = converter; +}); + +binaryParsers.init(function(oid, converter) { + typeParsers.binary[oid] = converter; +}); diff --git a/node_modules/pg-types/index.test-d.ts b/node_modules/pg-types/index.test-d.ts new file mode 100644 index 0000000..d530e6e --- /dev/null +++ b/node_modules/pg-types/index.test-d.ts @@ -0,0 +1,21 @@ +import * as types from '.'; +import { expectType } from 'tsd'; + +// builtins +expectType(types.builtins); + +// getTypeParser +const noParse = types.getTypeParser(types.builtins.NUMERIC, 'text'); +const numericParser = types.getTypeParser(types.builtins.NUMERIC, 'binary'); +expectType(noParse('noParse')); +expectType(numericParser([200, 1, 0, 15])); + +// getArrayParser +const value = types.arrayParser('{1,2,3}', (num) => parseInt(num)); +expectType(value); + +//setTypeParser +types.setTypeParser(types.builtins.INT8, parseInt); +types.setTypeParser(types.builtins.FLOAT8, parseFloat); +types.setTypeParser(types.builtins.FLOAT8, 'binary', (data) => data[0]); +types.setTypeParser(types.builtins.FLOAT8, 'text', parseFloat); diff --git a/node_modules/pg-types/lib/arrayParser.js b/node_modules/pg-types/lib/arrayParser.js new file mode 100644 index 0000000..81ccffb --- /dev/null +++ b/node_modules/pg-types/lib/arrayParser.js @@ -0,0 +1,11 @@ +var array = require('postgres-array'); + +module.exports = { + create: function (source, transform) { + return { + parse: function() { + return array.parse(source, transform); + } + }; + } +}; diff --git a/node_modules/pg-types/lib/binaryParsers.js b/node_modules/pg-types/lib/binaryParsers.js new file mode 100644 index 0000000..e12c2f4 --- /dev/null +++ b/node_modules/pg-types/lib/binaryParsers.js @@ -0,0 +1,257 @@ +var parseInt64 = require('pg-int8'); + +var parseBits = function(data, bits, offset, invert, callback) { + offset = offset || 0; + invert = invert || false; + callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; }; + var offsetBytes = offset >> 3; + + var inv = function(value) { + if (invert) { + return ~value & 0xff; + } + + return value; + }; + + // read first (maybe partial) byte + var mask = 0xff; + var firstBits = 8 - (offset % 8); + if (bits < firstBits) { + mask = (0xff << (8 - bits)) & 0xff; + firstBits = bits; + } + + if (offset) { + mask = mask >> (offset % 8); + } + + var result = 0; + if ((offset % 8) + bits >= 8) { + result = callback(0, inv(data[offsetBytes]) & mask, firstBits); + } + + // read bytes + var bytes = (bits + offset) >> 3; + for (var i = offsetBytes + 1; i < bytes; i++) { + result = callback(result, inv(data[i]), 8); + } + + // bits to read, that are not a complete byte + var lastBits = (bits + offset) % 8; + if (lastBits > 0) { + result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); + } + + return result; +}; + +var parseFloatFromBits = function(data, precisionBits, exponentBits) { + var bias = Math.pow(2, exponentBits - 1) - 1; + var sign = parseBits(data, 1); + var exponent = parseBits(data, exponentBits, 1); + + if (exponent === 0) { + return 0; + } + + // parse mantissa + var precisionBitsCounter = 1; + var parsePrecisionBits = function(lastValue, newValue, bits) { + if (lastValue === 0) { + lastValue = 1; + } + + for (var i = 1; i <= bits; i++) { + precisionBitsCounter /= 2; + if ((newValue & (0x1 << (bits - i))) > 0) { + lastValue += precisionBitsCounter; + } + } + + return lastValue; + }; + + var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); + + // special cases + if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { + if (mantissa === 0) { + return (sign === 0) ? Infinity : -Infinity; + } + + return NaN; + } + + // normale number + return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; +}; + +var parseInt16 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 15, 1, true) + 1); + } + + return parseBits(value, 15, 1); +}; + +var parseInt32 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 31, 1, true) + 1); + } + + return parseBits(value, 31, 1); +}; + +var parseFloat32 = function(value) { + return parseFloatFromBits(value, 23, 8); +}; + +var parseFloat64 = function(value) { + return parseFloatFromBits(value, 52, 11); +}; + +var parseNumeric = function(value) { + var sign = parseBits(value, 16, 32); + if (sign == 0xc000) { + return NaN; + } + + var weight = Math.pow(10000, parseBits(value, 16, 16)); + var result = 0; + + var digits = []; + var ndigits = parseBits(value, 16); + for (var i = 0; i < ndigits; i++) { + result += parseBits(value, 16, 64 + (16 * i)) * weight; + weight /= 10000; + } + + var scale = Math.pow(10, parseBits(value, 16, 48)); + return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; +}; + +var parseDate = function(isUTC, value) { + var sign = parseBits(value, 1); + var rawValue = parseBits(value, 63, 1); + + // discard usecs and shift from 2000 to 1970 + var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); + + if (!isUTC) { + result.setTime(result.getTime() + result.getTimezoneOffset() * 60000); + } + + // add microseconds to the date + result.usec = rawValue % 1000; + result.getMicroSeconds = function() { + return this.usec; + }; + result.setMicroSeconds = function(value) { + this.usec = value; + }; + result.getUTCMicroSeconds = function() { + return this.usec; + }; + + return result; +}; + +var parseArray = function(value) { + var dim = parseBits(value, 32); + + var flags = parseBits(value, 32, 32); + var elementType = parseBits(value, 32, 64); + + var offset = 96; + var dims = []; + for (var i = 0; i < dim; i++) { + // parse dimension + dims[i] = parseBits(value, 32, offset); + offset += 32; + + // ignore lower bounds + offset += 32; + } + + var parseElement = function(elementType) { + // parse content length + var length = parseBits(value, 32, offset); + offset += 32; + + // parse null values + if (length == 0xffffffff) { + return null; + } + + var result; + if ((elementType == 0x17) || (elementType == 0x14)) { + // int/bigint + result = parseBits(value, length * 8, offset); + offset += length * 8; + return result; + } + else if (elementType == 0x19) { + // string + result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); + return result; + } + else { + console.log("ERROR: ElementType not implemented: " + elementType); + } + }; + + var parse = function(dimension, elementType) { + var array = []; + var i; + + if (dimension.length > 1) { + var count = dimension.shift(); + for (i = 0; i < count; i++) { + array[i] = parse(dimension, elementType); + } + dimension.unshift(count); + } + else { + for (i = 0; i < dimension[0]; i++) { + array[i] = parseElement(elementType); + } + } + + return array; + }; + + return parse(dims, elementType); +}; + +var parseText = function(value) { + return value.toString('utf8'); +}; + +var parseBool = function(value) { + if(value === null) return null; + return (parseBits(value, 8) > 0); +}; + +var init = function(register) { + register(20, parseInt64); + register(21, parseInt16); + register(23, parseInt32); + register(26, parseInt32); + register(1700, parseNumeric); + register(700, parseFloat32); + register(701, parseFloat64); + register(16, parseBool); + register(1114, parseDate.bind(null, false)); + register(1184, parseDate.bind(null, true)); + register(1000, parseArray); + register(1007, parseArray); + register(1016, parseArray); + register(1008, parseArray); + register(1009, parseArray); + register(25, parseText); +}; + +module.exports = { + init: init +}; diff --git a/node_modules/pg-types/lib/builtins.js b/node_modules/pg-types/lib/builtins.js new file mode 100644 index 0000000..f0c134a --- /dev/null +++ b/node_modules/pg-types/lib/builtins.js @@ -0,0 +1,73 @@ +/** + * Following query was used to generate this file: + + SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid) + FROM pg_type PT + WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable) + AND typtype = 'b' -- Only basic types + AND typelem = 0 -- Ignore aliases + AND typisdefined -- Ignore undefined types + */ + +module.exports = { + BOOL: 16, + BYTEA: 17, + CHAR: 18, + INT8: 20, + INT2: 21, + INT4: 23, + REGPROC: 24, + TEXT: 25, + OID: 26, + TID: 27, + XID: 28, + CID: 29, + JSON: 114, + XML: 142, + PG_NODE_TREE: 194, + SMGR: 210, + PATH: 602, + POLYGON: 604, + CIDR: 650, + FLOAT4: 700, + FLOAT8: 701, + ABSTIME: 702, + RELTIME: 703, + TINTERVAL: 704, + CIRCLE: 718, + MACADDR8: 774, + MONEY: 790, + MACADDR: 829, + INET: 869, + ACLITEM: 1033, + BPCHAR: 1042, + VARCHAR: 1043, + DATE: 1082, + TIME: 1083, + TIMESTAMP: 1114, + TIMESTAMPTZ: 1184, + INTERVAL: 1186, + TIMETZ: 1266, + BIT: 1560, + VARBIT: 1562, + NUMERIC: 1700, + REFCURSOR: 1790, + REGPROCEDURE: 2202, + REGOPER: 2203, + REGOPERATOR: 2204, + REGCLASS: 2205, + REGTYPE: 2206, + UUID: 2950, + TXID_SNAPSHOT: 2970, + PG_LSN: 3220, + PG_NDISTINCT: 3361, + PG_DEPENDENCIES: 3402, + TSVECTOR: 3614, + TSQUERY: 3615, + GTSVECTOR: 3642, + REGCONFIG: 3734, + REGDICTIONARY: 3769, + JSONB: 3802, + REGNAMESPACE: 4089, + REGROLE: 4096 +}; diff --git a/node_modules/pg-types/lib/textParsers.js b/node_modules/pg-types/lib/textParsers.js new file mode 100644 index 0000000..b1218bf --- /dev/null +++ b/node_modules/pg-types/lib/textParsers.js @@ -0,0 +1,215 @@ +var array = require('postgres-array') +var arrayParser = require('./arrayParser'); +var parseDate = require('postgres-date'); +var parseInterval = require('postgres-interval'); +var parseByteA = require('postgres-bytea'); + +function allowNull (fn) { + return function nullAllowed (value) { + if (value === null) return value + return fn(value) + } +} + +function parseBool (value) { + if (value === null) return value + return value === 'TRUE' || + value === 't' || + value === 'true' || + value === 'y' || + value === 'yes' || + value === 'on' || + value === '1'; +} + +function parseBoolArray (value) { + if (!value) return null + return array.parse(value, parseBool) +} + +function parseBaseTenInt (string) { + return parseInt(string, 10) +} + +function parseIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(parseBaseTenInt)) +} + +function parseBigIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(function (entry) { + return parseBigInteger(entry).trim() + })) +} + +var parsePointArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parsePoint(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseFloatArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parseFloat(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseStringArray = function(value) { + if(!value) { return null; } + + var p = arrayParser.create(value); + return p.parse(); +}; + +var parseDateArray = function(value) { + if (!value) { return null; } + + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseDate(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseIntervalArray = function(value) { + if (!value) { return null; } + + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseInterval(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseByteAArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(parseByteA)); +}; + +var parseInteger = function(value) { + return parseInt(value, 10); +}; + +var parseBigInteger = function(value) { + var valStr = String(value); + if (/^\d+$/.test(valStr)) { return valStr; } + return value; +}; + +var parseJsonArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(JSON.parse)); +}; + +var parsePoint = function(value) { + if (value[0] !== '(') { return null; } + + value = value.substring( 1, value.length - 1 ).split(','); + + return { + x: parseFloat(value[0]) + , y: parseFloat(value[1]) + }; +}; + +var parseCircle = function(value) { + if (value[0] !== '<' && value[1] !== '(') { return null; } + + var point = '('; + var radius = ''; + var pointParsed = false; + for (var i = 2; i < value.length - 1; i++){ + if (!pointParsed) { + point += value[i]; + } + + if (value[i] === ')') { + pointParsed = true; + continue; + } else if (!pointParsed) { + continue; + } + + if (value[i] === ','){ + continue; + } + + radius += value[i]; + } + var result = parsePoint(point); + result.radius = parseFloat(radius); + + return result; +}; + +var init = function(register) { + register(20, parseBigInteger); // int8 + register(21, parseInteger); // int2 + register(23, parseInteger); // int4 + register(26, parseInteger); // oid + register(700, parseFloat); // float4/real + register(701, parseFloat); // float8/double + register(16, parseBool); + register(1082, parseDate); // date + register(1114, parseDate); // timestamp without timezone + register(1184, parseDate); // timestamp + register(600, parsePoint); // point + register(651, parseStringArray); // cidr[] + register(718, parseCircle); // circle + register(1000, parseBoolArray); + register(1001, parseByteAArray); + register(1005, parseIntegerArray); // _int2 + register(1007, parseIntegerArray); // _int4 + register(1028, parseIntegerArray); // oid[] + register(1016, parseBigIntegerArray); // _int8 + register(1017, parsePointArray); // point[] + register(1021, parseFloatArray); // _float4 + register(1022, parseFloatArray); // _float8 + register(1231, parseFloatArray); // _numeric + register(1014, parseStringArray); //char + register(1015, parseStringArray); //varchar + register(1008, parseStringArray); + register(1009, parseStringArray); + register(1040, parseStringArray); // macaddr[] + register(1041, parseStringArray); // inet[] + register(1115, parseDateArray); // timestamp without time zone[] + register(1182, parseDateArray); // _date + register(1185, parseDateArray); // timestamp with time zone[] + register(1186, parseInterval); + register(1187, parseIntervalArray); + register(17, parseByteA); + register(114, JSON.parse.bind(JSON)); // json + register(3802, JSON.parse.bind(JSON)); // jsonb + register(199, parseJsonArray); // json[] + register(3807, parseJsonArray); // jsonb[] + register(3907, parseStringArray); // numrange[] + register(2951, parseStringArray); // uuid[] + register(791, parseStringArray); // money[] + register(1183, parseStringArray); // time[] + register(1270, parseStringArray); // timetz[] +}; + +module.exports = { + init: init +}; diff --git a/node_modules/pg-types/package.json b/node_modules/pg-types/package.json new file mode 100644 index 0000000..5f18026 --- /dev/null +++ b/node_modules/pg-types/package.json @@ -0,0 +1,42 @@ +{ + "name": "pg-types", + "version": "2.2.0", + "description": "Query result type converters for node-postgres", + "main": "index.js", + "scripts": { + "test": "tape test/*.js | tap-spec && npm run test-ts", + "test-ts": "if-node-version '>= 8' tsd" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-pg-types.git" + }, + "keywords": [ + "postgres", + "PostgreSQL", + "pg" + ], + "author": "Brian M. Carlson", + "license": "MIT", + "bugs": { + "url": "https://github.com/brianc/node-pg-types/issues" + }, + "homepage": "https://github.com/brianc/node-pg-types", + "devDependencies": { + "if-node-version": "^1.1.1", + "pff": "^1.0.0", + "tap-spec": "^4.0.0", + "tape": "^4.0.0", + "tsd": "^0.7.4" + }, + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } +} diff --git a/node_modules/pg-types/test/index.js b/node_modules/pg-types/test/index.js new file mode 100644 index 0000000..b7d05cd --- /dev/null +++ b/node_modules/pg-types/test/index.js @@ -0,0 +1,24 @@ + +var test = require('tape') +var printf = require('pff') +var getTypeParser = require('../').getTypeParser +var types = require('./types') + +test('types', function (t) { + Object.keys(types).forEach(function (typeName) { + var type = types[typeName] + t.test(typeName, function (t) { + var parser = getTypeParser(type.id, type.format) + type.tests.forEach(function (tests) { + var input = tests[0] + var expected = tests[1] + var result = parser(input) + if (typeof expected === 'function') { + return expected(t, result) + } + t.equal(result, expected) + }) + t.end() + }) + }) +}) diff --git a/node_modules/pg-types/test/types.js b/node_modules/pg-types/test/types.js new file mode 100644 index 0000000..af708a5 --- /dev/null +++ b/node_modules/pg-types/test/types.js @@ -0,0 +1,597 @@ +'use strict' + +exports['string/varchar'] = { + format: 'text', + id: 1043, + tests: [ + ['bang', 'bang'] + ] +} + +exports['integer/int4'] = { + format: 'text', + id: 23, + tests: [ + ['2147483647', 2147483647] + ] +} + +exports['smallint/int2'] = { + format: 'text', + id: 21, + tests: [ + ['32767', 32767] + ] +} + +exports['bigint/int8'] = { + format: 'text', + id: 20, + tests: [ + ['9223372036854775807', '9223372036854775807'] + ] +} + +exports.oid = { + format: 'text', + id: 26, + tests: [ + ['103', 103] + ] +} + +var bignum = '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628' +exports.numeric = { + format: 'text', + id: 1700, + tests: [ + [bignum, bignum] + ] +} + +exports['real/float4'] = { + format: 'text', + id: 700, + tests: [ + ['123.456', 123.456] + ] +} + +exports['double precision / float 8'] = { + format: 'text', + id: 701, + tests: [ + ['12345678.12345678', 12345678.12345678] + ] +} + +exports.boolean = { + format: 'text', + id: 16, + tests: [ + ['TRUE', true], + ['t', true], + ['true', true], + ['y', true], + ['yes', true], + ['on', true], + ['1', true], + ['f', false], + [null, null] + ] +} + +exports.timestamptz = { + format: 'text', + id: 1184, + tests: [ + [ + '2010-10-31 14:54:13.74-05:30', + dateEquals(2010, 9, 31, 20, 24, 13, 740) + ], + [ + '2011-01-23 22:05:00.68-06', + dateEquals(2011, 0, 24, 4, 5, 0, 680) + ], + [ + '2010-10-30 14:11:12.730838Z', + dateEquals(2010, 9, 30, 14, 11, 12, 730) + ], + [ + '2010-10-30 13:10:01+05', + dateEquals(2010, 9, 30, 8, 10, 1, 0) + ] + ] +} + +exports.timestamp = { + format: 'text', + id: 1114, + tests: [ + [ + '2010-10-31 00:00:00', + function (t, value) { + t.equal( + value.toUTCString(), + new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString() + ) + t.equal( + value.toString(), + new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString() + ) + } + ] + ] +} + +exports.date = { + format: 'text', + id: 1082, + tests: [ + ['2010-10-31', function (t, value) { + var now = new Date(2010, 9, 31) + dateEquals( + 2010, + now.getUTCMonth(), + now.getUTCDate(), + now.getUTCHours(), 0, 0, 0)(t, value) + t.equal(value.getHours(), now.getHours()) + }] + ] +} + +exports.inet = { + format: 'text', + id: 869, + tests: [ + ['8.8.8.8', '8.8.8.8'], + ['2001:4860:4860::8888', '2001:4860:4860::8888'], + ['127.0.0.1', '127.0.0.1'], + ['fd00:1::40e', 'fd00:1::40e'], + ['1.2.3.4', '1.2.3.4'] + ] +} + +exports.cidr = { + format: 'text', + id: 650, + tests: [ + ['172.16.0.0/12', '172.16.0.0/12'], + ['fe80::/10', 'fe80::/10'], + ['fc00::/7', 'fc00::/7'], + ['192.168.0.0/24', '192.168.0.0/24'], + ['10.0.0.0/8', '10.0.0.0/8'] + ] +} + +exports.macaddr = { + format: 'text', + id: 829, + tests: [ + ['08:00:2b:01:02:03', '08:00:2b:01:02:03'], + ['16:10:9f:0d:66:00', '16:10:9f:0d:66:00'] + ] +} + +exports.numrange = { + format: 'text', + id: 3906, + tests: [ + ['[,]', '[,]'], + ['(,)', '(,)'], + ['(,]', '(,]'], + ['[1,)', '[1,)'], + ['[,1]', '[,1]'], + ['(1,2)', '(1,2)'], + ['(1,20.5]', '(1,20.5]'] + ] +} + +exports.interval = { + format: 'text', + id: 1186, + tests: [ + ['01:02:03', function (t, value) { + t.equal(value.toPostgres(), '3 seconds 2 minutes 1 hours') + t.deepEqual(value, {hours: 1, minutes: 2, seconds: 3}) + }], + ['01:02:03.456', function (t, value) { + t.deepEqual(value, {hours: 1, minutes:2, seconds: 3, milliseconds: 456}) + }], + ['1 year -32 days', function (t, value) { + t.equal(value.toPostgres(), '-32 days 1 years') + t.deepEqual(value, {years: 1, days: -32}) + }], + ['1 day -00:00:03', function (t, value) { + t.equal(value.toPostgres(), '-3 seconds 1 days') + t.deepEqual(value, {days: 1, seconds: -3}) + }] + ] +} + +exports.bytea = { + format: 'text', + id: 17, + tests: [ + ['foo\\000\\200\\\\\\377', function (t, value) { + var buffer = new Buffer([102, 111, 111, 0, 128, 92, 255]) + t.ok(buffer.equals(value)) + }], + ['', function (t, value) { + var buffer = new Buffer(0) + t.ok(buffer.equals(value)) + }] + ] +} + +exports['array/boolean'] = { + format: 'text', + id: 1000, + tests: [ + ['{true,false}', function (t, value) { + t.deepEqual(value, [true, false]) + }] + ] +} + +exports['array/char'] = { + format: 'text', + id: 1014, + tests: [ + ['{foo,bar}', function (t, value) { + t.deepEqual(value, ['foo', 'bar']) + }] + ] +} + +exports['array/varchar'] = { + format: 'text', + id: 1015, + tests: [ + ['{foo,bar}', function (t, value) { + t.deepEqual(value, ['foo', 'bar']) + }] + ] +} + +exports['array/text'] = { + format: 'text', + id: 1008, + tests: [ + ['{foo}', function (t, value) { + t.deepEqual(value, ['foo']) + }] + ] +} + +exports['array/bytea'] = { + format: 'text', + id: 1001, + tests: [ + ['{"\\\\x00000000"}', function (t, value) { + var buffer = new Buffer('00000000', 'hex') + t.ok(Array.isArray(value)) + t.equal(value.length, 1) + t.ok(buffer.equals(value[0])) + }], + ['{NULL,"\\\\x4e554c4c"}', function (t, value) { + var buffer = new Buffer('4e554c4c', 'hex') + t.ok(Array.isArray(value)) + t.equal(value.length, 2) + t.equal(value[0], null) + t.ok(buffer.equals(value[1])) + }], + ] +} + +exports['array/numeric'] = { + format: 'text', + id: 1231, + tests: [ + ['{1.2,3.4}', function (t, value) { + t.deepEqual(value, [1.2, 3.4]) + }] + ] +} + +exports['array/int2'] = { + format: 'text', + id: 1005, + tests: [ + ['{-32768, -32767, 32766, 32767}', function (t, value) { + t.deepEqual(value, [-32768, -32767, 32766, 32767]) + }] + ] +} + +exports['array/int4'] = { + format: 'text', + id: 1005, + tests: [ + ['{-2147483648, -2147483647, 2147483646, 2147483647}', function (t, value) { + t.deepEqual(value, [-2147483648, -2147483647, 2147483646, 2147483647]) + }] + ] +} + +exports['array/int8'] = { + format: 'text', + id: 1016, + tests: [ + [ + '{-9223372036854775808, -9223372036854775807, 9223372036854775806, 9223372036854775807}', + function (t, value) { + t.deepEqual(value, [ + '-9223372036854775808', + '-9223372036854775807', + '9223372036854775806', + '9223372036854775807' + ]) + } + ] + ] +} + +exports['array/json'] = { + format: 'text', + id: 199, + tests: [ + [ + '{{1,2},{[3],"[4,5]"},{null,NULL}}', + function (t, value) { + t.deepEqual(value, [ + [1, 2], + [[3], [4, 5]], + [null, null], + ]) + } + ] + ] +} + +exports['array/jsonb'] = { + format: 'text', + id: 3807, + tests: exports['array/json'].tests +} + +exports['array/point'] = { + format: 'text', + id: 1017, + tests: [ + ['{"(25.1,50.5)","(10.1,40)"}', function (t, value) { + t.deepEqual(value, [{x: 25.1, y: 50.5}, {x: 10.1, y: 40}]) + }] + ] +} + +exports['array/oid'] = { + format: 'text', + id: 1028, + tests: [ + ['{25864,25860}', function (t, value) { + t.deepEqual(value, [25864, 25860]) + }] + ] +} + +exports['array/float4'] = { + format: 'text', + id: 1021, + tests: [ + ['{1.2, 3.4}', function (t, value) { + t.deepEqual(value, [1.2, 3.4]) + }] + ] +} + +exports['array/float8'] = { + format: 'text', + id: 1022, + tests: [ + ['{-12345678.1234567, 12345678.12345678}', function (t, value) { + t.deepEqual(value, [-12345678.1234567, 12345678.12345678]) + }] + ] +} + +exports['array/date'] = { + format: 'text', + id: 1182, + tests: [ + ['{2014-01-01,2015-12-31}', function (t, value) { + var expecteds = [new Date(2014, 0, 1), new Date(2015, 11, 31)] + t.equal(value.length, 2) + value.forEach(function (date, index) { + var expected = expecteds[index] + dateEquals( + expected.getUTCFullYear(), + expected.getUTCMonth(), + expected.getUTCDate(), + expected.getUTCHours(), 0, 0, 0)(t, date) + }) + }] + ] +} + +exports['array/interval'] = { + format: 'text', + id: 1187, + tests: [ + ['{01:02:03,1 day -00:00:03}', function (t, value) { + var expecteds = [{hours: 1, minutes: 2, seconds: 3}, + {days: 1, seconds: -3}] + t.equal(value.length, 2) + t.deepEqual(value, expecteds); + }] + ] +} + +exports['array/inet'] = { + format: 'text', + id: 1041, + tests: [ + ['{8.8.8.8}', function (t, value) { + t.deepEqual(value, ['8.8.8.8']); + }], + ['{2001:4860:4860::8888}', function (t, value) { + t.deepEqual(value, ['2001:4860:4860::8888']); + }], + ['{127.0.0.1,fd00:1::40e,1.2.3.4}', function (t, value) { + t.deepEqual(value, ['127.0.0.1', 'fd00:1::40e', '1.2.3.4']); + }] + ] +} + +exports['array/cidr'] = { + format: 'text', + id: 651, + tests: [ + ['{172.16.0.0/12}', function (t, value) { + t.deepEqual(value, ['172.16.0.0/12']); + }], + ['{fe80::/10}', function (t, value) { + t.deepEqual(value, ['fe80::/10']); + }], + ['{10.0.0.0/8,fc00::/7,192.168.0.0/24}', function (t, value) { + t.deepEqual(value, ['10.0.0.0/8', 'fc00::/7', '192.168.0.0/24']); + }] + ] +} + +exports['array/macaddr'] = { + format: 'text', + id: 1040, + tests: [ + ['{08:00:2b:01:02:03,16:10:9f:0d:66:00}', function (t, value) { + t.deepEqual(value, ['08:00:2b:01:02:03', '16:10:9f:0d:66:00']); + }] + ] +} + +exports['array/numrange'] = { + format: 'text', + id: 3907, + tests: [ + ['{"[1,2]","(4.5,8)","[10,40)","(-21.2,60.3]"}', function (t, value) { + t.deepEqual(value, ['[1,2]', '(4.5,8)', '[10,40)', '(-21.2,60.3]']); + }], + ['{"[,20]","[3,]","[,]","(,35)","(1,)","(,)"}', function (t, value) { + t.deepEqual(value, ['[,20]', '[3,]', '[,]', '(,35)', '(1,)', '(,)']); + }], + ['{"[,20)","[3,)","[,)","[,35)","[1,)","[,)"}', function (t, value) { + t.deepEqual(value, ['[,20)', '[3,)', '[,)', '[,35)', '[1,)', '[,)']); + }] + ] +} + +exports['binary-string/varchar'] = { + format: 'binary', + id: 1043, + tests: [ + ['bang', 'bang'] + ] +} + +exports['binary-integer/int4'] = { + format: 'binary', + id: 23, + tests: [ + [[0, 0, 0, 100], 100] + ] +} + +exports['binary-smallint/int2'] = { + format: 'binary', + id: 21, + tests: [ + [[0, 101], 101] + ] +} + +exports['binary-bigint/int8'] = { + format: 'binary', + id: 20, + tests: [ + [new Buffer([0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), '9223372036854775807'] + ] +} + +exports['binary-oid'] = { + format: 'binary', + id: 26, + tests: [ + [[0, 0, 0, 103], 103] + ] +} + +exports['binary-numeric'] = { + format: 'binary', + id: 1700, + tests: [ + [ + [0, 2, 0, 0, 0, 0, 0, hex('0x64'), 0, 12, hex('0xd'), hex('0x48'), 0, 0, 0, 0], + 12.34 + ] + ] +} + +exports['binary-real/float4'] = { + format: 'binary', + id: 700, + tests: [ + [['0x41', '0x48', '0x00', '0x00'].map(hex), 12.5] + ] +} + +exports['binary-boolean'] = { + format: 'binary', + id: 16, + tests: [ + [[1], true], + [[0], false], + [null, null] + ] +} + +exports['binary-string'] = { + format: 'binary', + id: 25, + tests: [ + [ + new Buffer(['0x73', '0x6c', '0x61', '0x64', '0x64', '0x61'].map(hex)), + 'sladda' + ] + ] +} + +exports.point = { + format: 'text', + id: 600, + tests: [ + ['(25.1,50.5)', function (t, value) { + t.deepEqual(value, {x: 25.1, y: 50.5}) + }] + ] +} + +exports.circle = { + format: 'text', + id: 718, + tests: [ + ['<(25,10),5>', function (t, value) { + t.deepEqual(value, {x: 25, y: 10, radius: 5}) + }] + ] +} + +function hex (string) { + return parseInt(string, 16) +} + +function dateEquals () { + var timestamp = Date.UTC.apply(Date, arguments) + return function (t, value) { + t.equal(value.toUTCString(), new Date(timestamp).toUTCString()) + } +} diff --git a/node_modules/pg/LICENSE b/node_modules/pg/LICENSE new file mode 100644 index 0000000..5c14056 --- /dev/null +++ b/node_modules/pg/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2021 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg/README.md b/node_modules/pg/README.md new file mode 100644 index 0000000..e21f34a --- /dev/null +++ b/node_modules/pg/README.md @@ -0,0 +1,89 @@ +# node-postgres + +[![Build Status](https://secure.travis-ci.org/brianc/node-postgres.svg?branch=master)](http://travis-ci.org/brianc/node-postgres) +NPM version +NPM downloads + +Non-blocking PostgreSQL client for Node.js. Pure JavaScript and optional native libpq bindings. + +## Install + +```sh +$ npm install pg +``` + +--- + +## :star: [Documentation](https://node-postgres.com) :star: + +### Features + +- Pure JavaScript client and native libpq bindings share _the same API_ +- Connection pooling +- Extensible JS ↔ PostgreSQL data-type coercion +- Supported PostgreSQL features + - Parameterized queries + - Named statements with query plan caching + - Async notifications with `LISTEN/NOTIFY` + - Bulk import & export with `COPY TO/COPY FROM` + +### Extras + +node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture. +The entire list can be found on our [wiki](https://github.com/brianc/node-postgres/wiki/Extras). + +## Support + +node-postgres is free software. If you encounter a bug with the library please open an issue on the [GitHub repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better! + +When you open an issue please provide: + +- version of Node +- version of Postgres +- smallest possible snippet of code to reproduce the problem + +You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on Twitter. + +## Sponsorship :two_hearts: + +node-postgres's continued development has been made possible in part by generous finanical support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md). + +If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development. + +## Contributing + +**:heart: contributions!** + +I will **happily** accept your pull request if it: + +- **has tests** +- looks reasonable +- does not break backwards compatibility + +If your change involves breaking backwards compatibility please please point that out in the pull request & we can discuss & plan when and how to release it and what type of documentation or communicate it will require. + +## Troubleshooting and FAQ + +The causes and solutions to common errors can be found among the [Frequently Asked Questions (FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ) + +## License + +Copyright (c) 2010-2020 Brian Carlson (brian.m.carlson@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pg/lib/client.js b/node_modules/pg/lib/client.js new file mode 100644 index 0000000..c6aa3da --- /dev/null +++ b/node_modules/pg/lib/client.js @@ -0,0 +1,631 @@ +'use strict' + +var EventEmitter = require('events').EventEmitter +var utils = require('./utils') +var sasl = require('./crypto/sasl') +var TypeOverrides = require('./type-overrides') + +var ConnectionParameters = require('./connection-parameters') +var Query = require('./query') +var defaults = require('./defaults') +var Connection = require('./connection') +const crypto = require('./crypto/utils') + +class Client extends EventEmitter { + constructor(config) { + super() + + this.connectionParameters = new ConnectionParameters(config) + this.user = this.connectionParameters.user + this.database = this.connectionParameters.database + this.port = this.connectionParameters.port + this.host = this.connectionParameters.host + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: this.connectionParameters.password, + }) + + this.replication = this.connectionParameters.replication + + var c = config || {} + + this._Promise = c.Promise || global.Promise + this._types = new TypeOverrides(c.types) + this._ending = false + this._ended = false + this._connecting = false + this._connected = false + this._connectionError = false + this._queryable = true + + this.connection = + c.connection || + new Connection({ + stream: c.stream, + ssl: this.connectionParameters.ssl, + keepAlive: c.keepAlive || false, + keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0, + encoding: this.connectionParameters.client_encoding || 'utf8', + }) + this.queryQueue = [] + this.binary = c.binary || defaults.binary + this.processID = null + this.secretKey = null + this.ssl = this.connectionParameters.ssl || false + // As with Password, make SSL->Key (the private key) non-enumerable. + // It won't show up in stack traces + // or if the client is console.logged + if (this.ssl && this.ssl.key) { + Object.defineProperty(this.ssl, 'key', { + enumerable: false, + }) + } + + this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0 + } + + _errorAllQueries(err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.handleError(err, this.connection) + }) + } + + if (this.activeQuery) { + enqueueError(this.activeQuery) + this.activeQuery = null + } + + this.queryQueue.forEach(enqueueError) + this.queryQueue.length = 0 + } + + _connect(callback) { + var self = this + var con = this.connection + this._connectionCallback = callback + + if (this._connecting || this._connected) { + const err = new Error('Client has already been connected. You cannot reuse a client.') + process.nextTick(() => { + callback(err) + }) + return + } + this._connecting = true + + this.connectionTimeoutHandle + if (this._connectionTimeoutMillis > 0) { + this.connectionTimeoutHandle = setTimeout(() => { + con._ending = true + con.stream.destroy(new Error('timeout expired')) + }, this._connectionTimeoutMillis) + } + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } + + // once connection is established send startup message + con.on('connect', function () { + if (self.ssl) { + con.requestSsl() + } else { + con.startup(self.getStartupConf()) + } + }) + + con.on('sslconnect', function () { + con.startup(self.getStartupConf()) + }) + + this._attachListeners(con) + + con.once('end', () => { + const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly') + + clearTimeout(this.connectionTimeoutHandle) + this._errorAllQueries(error) + this._ended = true + + if (!this._ending) { + // if the connection is ended without us calling .end() + // on this client then we have an unexpected disconnection + // treat this as an error unless we've already emitted an error + // during connection. + if (this._connecting && !this._connectionError) { + if (this._connectionCallback) { + this._connectionCallback(error) + } else { + this._handleErrorEvent(error) + } + } else if (!this._connectionError) { + this._handleErrorEvent(error) + } + } + + process.nextTick(() => { + this.emit('end') + }) + }) + } + + connect(callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) + } + + _attachListeners(con) { + // password request handling + con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this)) + // password request handling + con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this)) + // password request handling (SASL) + con.on('authenticationSASL', this._handleAuthSASL.bind(this)) + con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this)) + con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this)) + con.on('backendKeyData', this._handleBackendKeyData.bind(this)) + con.on('error', this._handleErrorEvent.bind(this)) + con.on('errorMessage', this._handleErrorMessage.bind(this)) + con.on('readyForQuery', this._handleReadyForQuery.bind(this)) + con.on('notice', this._handleNotice.bind(this)) + con.on('rowDescription', this._handleRowDescription.bind(this)) + con.on('dataRow', this._handleDataRow.bind(this)) + con.on('portalSuspended', this._handlePortalSuspended.bind(this)) + con.on('emptyQuery', this._handleEmptyQuery.bind(this)) + con.on('commandComplete', this._handleCommandComplete.bind(this)) + con.on('parseComplete', this._handleParseComplete.bind(this)) + con.on('copyInResponse', this._handleCopyInResponse.bind(this)) + con.on('copyData', this._handleCopyData.bind(this)) + con.on('notification', this._handleNotification.bind(this)) + } + + // TODO(bmc): deprecate pgpass "built in" integration since this.password can be a function + // it can be supplied by the user if required - this is a breaking change! + _checkPgPass(cb) { + const con = this.connection + if (typeof this.password === 'function') { + this._Promise + .resolve() + .then(() => this.password()) + .then((pass) => { + if (pass !== undefined) { + if (typeof pass !== 'string') { + con.emit('error', new TypeError('Password must be a string')) + return + } + this.connectionParameters.password = this.password = pass + } else { + this.connectionParameters.password = this.password = null + } + cb() + }) + .catch((err) => { + con.emit('error', err) + }) + } else if (this.password !== null) { + cb() + } else { + try { + const pgPass = require('pgpass') + pgPass(this.connectionParameters, (pass) => { + if (undefined !== pass) { + this.connectionParameters.password = this.password = pass + } + cb() + }) + } catch (e) { + this.emit('error', e) + } + } + } + + _handleAuthCleartextPassword(msg) { + this._checkPgPass(() => { + this.connection.password(this.password) + }) + } + + _handleAuthMD5Password(msg) { + this._checkPgPass(async () => { + try { + const hashedPassword = await crypto.postgresMd5PasswordHash(this.user, this.password, msg.salt) + this.connection.password(hashedPassword) + } catch (e) { + this.emit('error', e) + } + }) + } + + _handleAuthSASL(msg) { + this._checkPgPass(() => { + try { + this.saslSession = sasl.startSession(msg.mechanisms) + this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response) + } catch (err) { + this.connection.emit('error', err) + } + }) + } + + async _handleAuthSASLContinue(msg) { + try { + await sasl.continueSession(this.saslSession, this.password, msg.data) + this.connection.sendSCRAMClientFinalMessage(this.saslSession.response) + } catch (err) { + this.connection.emit('error', err) + } + } + + _handleAuthSASLFinal(msg) { + try { + sasl.finalizeSession(this.saslSession, msg.data) + this.saslSession = null + } catch (err) { + this.connection.emit('error', err) + } + } + + _handleBackendKeyData(msg) { + this.processID = msg.processID + this.secretKey = msg.secretKey + } + + _handleReadyForQuery(msg) { + if (this._connecting) { + this._connecting = false + this._connected = true + clearTimeout(this.connectionTimeoutHandle) + + // process possible callback argument to Client#connect + if (this._connectionCallback) { + this._connectionCallback(null, this) + // remove callback for proper error handling + // after the connect event + this._connectionCallback = null + } + this.emit('connect') + } + const { activeQuery } = this + this.activeQuery = null + this.readyForQuery = true + if (activeQuery) { + activeQuery.handleReadyForQuery(this.connection) + } + this._pulseQueryQueue() + } + + // if we receieve an error event or error message + // during the connection process we handle it here + _handleErrorWhileConnecting(err) { + if (this._connectionError) { + // TODO(bmc): this is swallowing errors - we shouldn't do this + return + } + this._connectionError = true + clearTimeout(this.connectionTimeoutHandle) + if (this._connectionCallback) { + return this._connectionCallback(err) + } + this.emit('error', err) + } + + // if we're connected and we receive an error event from the connection + // this means the socket is dead - do a hard abort of all queries and emit + // the socket error on the client as well + _handleErrorEvent(err) { + if (this._connecting) { + return this._handleErrorWhileConnecting(err) + } + this._queryable = false + this._errorAllQueries(err) + this.emit('error', err) + } + + // handle error messages from the postgres backend + _handleErrorMessage(msg) { + if (this._connecting) { + return this._handleErrorWhileConnecting(msg) + } + const activeQuery = this.activeQuery + + if (!activeQuery) { + this._handleErrorEvent(msg) + return + } + + this.activeQuery = null + activeQuery.handleError(msg, this.connection) + } + + _handleRowDescription(msg) { + // delegate rowDescription to active query + this.activeQuery.handleRowDescription(msg) + } + + _handleDataRow(msg) { + // delegate dataRow to active query + this.activeQuery.handleDataRow(msg) + } + + _handlePortalSuspended(msg) { + // delegate portalSuspended to active query + this.activeQuery.handlePortalSuspended(this.connection) + } + + _handleEmptyQuery(msg) { + // delegate emptyQuery to active query + this.activeQuery.handleEmptyQuery(this.connection) + } + + _handleCommandComplete(msg) { + // delegate commandComplete to active query + this.activeQuery.handleCommandComplete(msg, this.connection) + } + + _handleParseComplete(msg) { + // if a prepared statement has a name and properly parses + // we track that its already been executed so we don't parse + // it again on the same client + if (this.activeQuery.name) { + this.connection.parsedStatements[this.activeQuery.name] = this.activeQuery.text + } + } + + _handleCopyInResponse(msg) { + this.activeQuery.handleCopyInResponse(this.connection) + } + + _handleCopyData(msg) { + this.activeQuery.handleCopyData(msg, this.connection) + } + + _handleNotification(msg) { + this.emit('notification', msg) + } + + _handleNotice(msg) { + this.emit('notice', msg) + } + + getStartupConf() { + var params = this.connectionParameters + + var data = { + user: params.user, + database: params.database, + } + + var appName = params.application_name || params.fallback_application_name + if (appName) { + data.application_name = appName + } + if (params.replication) { + data.replication = '' + params.replication + } + if (params.statement_timeout) { + data.statement_timeout = String(parseInt(params.statement_timeout, 10)) + } + if (params.lock_timeout) { + data.lock_timeout = String(parseInt(params.lock_timeout, 10)) + } + if (params.idle_in_transaction_session_timeout) { + data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10)) + } + if (params.options) { + data.options = params.options + } + + return data + } + + cancel(client, query) { + if (client.activeQuery === query) { + var con = this.connection + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } + + // once connection is established send cancel message + con.on('connect', function () { + con.cancel(client.processID, client.secretKey) + }) + } else if (client.queryQueue.indexOf(query) !== -1) { + client.queryQueue.splice(client.queryQueue.indexOf(query), 1) + } + } + + setTypeParser(oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) + } + + getTypeParser(oid, format) { + return this._types.getTypeParser(oid, format) + } + + // escapeIdentifier and escapeLiteral moved to utility functions & exported + // on PG + // re-exported here for backwards compatibility + escapeIdentifier(str) { + return utils.escapeIdentifier(str) + } + + escapeLiteral(str) { + return utils.escapeLiteral(str) + } + + _pulseQueryQueue() { + if (this.readyForQuery === true) { + this.activeQuery = this.queryQueue.shift() + if (this.activeQuery) { + this.readyForQuery = false + this.hasExecuted = true + + const queryError = this.activeQuery.submit(this.connection) + if (queryError) { + process.nextTick(() => { + this.activeQuery.handleError(queryError, this.connection) + this.readyForQuery = true + this._pulseQueryQueue() + }) + } + } else if (this.hasExecuted) { + this.activeQuery = null + this.emit('drain') + } + } + } + + query(config, values, callback) { + // can take in strings, config object or query object + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback + + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + if (typeof values === 'function') { + query.callback = query.callback || values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new Query(config, values, callback) + if (!query.callback) { + result = new this._Promise((resolve, reject) => { + query.callback = (err, res) => (err ? reject(err) : resolve(res)) + }).catch((err) => { + // replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the + // application that created the query + Error.captureStackTrace(err) + throw err + }) + } + } + + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) + }) + + queryCallback(error) + + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this.queryQueue.indexOf(query) + if (index > -1) { + this.queryQueue.splice(index, 1) + } + + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (this.binary && !query.binary) { + query.binary = true + } + + if (query._result && !query._result._types) { + query._result._types = this._types + } + + if (!this._queryable) { + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection) + }) + return result + } + + if (this._ending) { + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable'), this.connection) + }) + return result + } + + this.queryQueue.push(query) + this._pulseQueryQueue() + return result + } + + ref() { + this.connection.ref() + } + + unref() { + this.connection.unref() + } + + end(cb) { + this._ending = true + + // if we have never connected, then end is a noop, callback immediately + if (!this.connection._connecting || this._ended) { + if (cb) { + cb() + } else { + return this._Promise.resolve() + } + } + + if (this.activeQuery || !this._queryable) { + // if we have an active query we need to force a disconnect + // on the socket - otherwise a hung query could block end forever + this.connection.stream.destroy() + } else { + this.connection.end() + } + + if (cb) { + this.connection.once('end', cb) + } else { + return new this._Promise((resolve) => { + this.connection.once('end', resolve) + }) + } + } +} + +// expose a Query constructor +Client.Query = Query + +module.exports = Client diff --git a/node_modules/pg/lib/connection-parameters.js b/node_modules/pg/lib/connection-parameters.js new file mode 100644 index 0000000..6a535a8 --- /dev/null +++ b/node_modules/pg/lib/connection-parameters.js @@ -0,0 +1,167 @@ +'use strict' + +var dns = require('dns') + +var defaults = require('./defaults') + +var parse = require('pg-connection-string').parse // parses a connection string + +var val = function (key, config, envVar) { + if (envVar === undefined) { + envVar = process.env['PG' + key.toUpperCase()] + } else if (envVar === false) { + // do nothing ... use false + } else { + envVar = process.env[envVar] + } + + return config[key] || envVar || defaults[key] +} + +var readSSLConfigFromEnvironment = function () { + switch (process.env.PGSSLMODE) { + case 'disable': + return false + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': + return true + case 'no-verify': + return { rejectUnauthorized: false } + } + return defaults.ssl +} + +// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes +var quoteParamValue = function (value) { + return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'" +} + +var add = function (params, config, paramName) { + var value = config[paramName] + if (value !== undefined && value !== null) { + params.push(paramName + '=' + quoteParamValue(value)) + } +} + +class ConnectionParameters { + constructor(config) { + // if a string is passed, it is a raw connection string so we parse it into a config + config = typeof config === 'string' ? parse(config) : config || {} + + // if the config has a connectionString defined, parse IT into the config we use + // this will override other default values with what is stored in connectionString + if (config.connectionString) { + config = Object.assign({}, config, parse(config.connectionString)) + } + + this.user = val('user', config) + this.database = val('database', config) + + if (this.database === undefined) { + this.database = this.user + } + + this.port = parseInt(val('port', config), 10) + this.host = val('host', config) + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: val('password', config), + }) + + this.binary = val('binary', config) + this.options = val('options', config) + + this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl + + if (typeof this.ssl === 'string') { + if (this.ssl === 'true') { + this.ssl = true + } + } + // support passing in ssl=no-verify via connection string + if (this.ssl === 'no-verify') { + this.ssl = { rejectUnauthorized: false } + } + if (this.ssl && this.ssl.key) { + Object.defineProperty(this.ssl, 'key', { + enumerable: false, + }) + } + + this.client_encoding = val('client_encoding', config) + this.replication = val('replication', config) + // a domain socket begins with '/' + this.isDomainSocket = !(this.host || '').indexOf('/') + + this.application_name = val('application_name', config, 'PGAPPNAME') + this.fallback_application_name = val('fallback_application_name', config, false) + this.statement_timeout = val('statement_timeout', config, false) + this.lock_timeout = val('lock_timeout', config, false) + this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false) + this.query_timeout = val('query_timeout', config, false) + + if (config.connectionTimeoutMillis === undefined) { + this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0 + } else { + this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000) + } + + if (config.keepAlive === false) { + this.keepalives = 0 + } else if (config.keepAlive === true) { + this.keepalives = 1 + } + + if (typeof config.keepAliveInitialDelayMillis === 'number') { + this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000) + } + } + + getLibpqConnectionString(cb) { + var params = [] + add(params, this, 'user') + add(params, this, 'password') + add(params, this, 'port') + add(params, this, 'application_name') + add(params, this, 'fallback_application_name') + add(params, this, 'connect_timeout') + add(params, this, 'options') + + var ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {} + add(params, ssl, 'sslmode') + add(params, ssl, 'sslca') + add(params, ssl, 'sslkey') + add(params, ssl, 'sslcert') + add(params, ssl, 'sslrootcert') + + if (this.database) { + params.push('dbname=' + quoteParamValue(this.database)) + } + if (this.replication) { + params.push('replication=' + quoteParamValue(this.replication)) + } + if (this.host) { + params.push('host=' + quoteParamValue(this.host)) + } + if (this.isDomainSocket) { + return cb(null, params.join(' ')) + } + if (this.client_encoding) { + params.push('client_encoding=' + quoteParamValue(this.client_encoding)) + } + dns.lookup(this.host, function (err, address) { + if (err) return cb(err, null) + params.push('hostaddr=' + quoteParamValue(address)) + return cb(null, params.join(' ')) + }) + } +} + +module.exports = ConnectionParameters diff --git a/node_modules/pg/lib/connection.js b/node_modules/pg/lib/connection.js new file mode 100644 index 0000000..af4b8f1 --- /dev/null +++ b/node_modules/pg/lib/connection.js @@ -0,0 +1,223 @@ +'use strict' + +var net = require('net') +var EventEmitter = require('events').EventEmitter + +const { parse, serialize } = require('pg-protocol') +const { getStream, getSecureStream } = require('./stream') + +const flushBuffer = serialize.flush() +const syncBuffer = serialize.sync() +const endBuffer = serialize.end() + +// TODO(bmc) support binary mode at some point +class Connection extends EventEmitter { + constructor(config) { + super() + config = config || {} + + this.stream = config.stream || getStream(config.ssl) + if (typeof this.stream === 'function') { + this.stream = this.stream(config) + } + + this._keepAlive = config.keepAlive + this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis + this.lastBuffer = false + this.parsedStatements = {} + this.ssl = config.ssl || false + this._ending = false + this._emitMessage = false + var self = this + this.on('newListener', function (eventName) { + if (eventName === 'message') { + self._emitMessage = true + } + }) + } + + connect(port, host) { + var self = this + + this._connecting = true + this.stream.setNoDelay(true) + this.stream.connect(port, host) + + this.stream.once('connect', function () { + if (self._keepAlive) { + self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis) + } + self.emit('connect') + }) + + const reportStreamError = function (error) { + // errors about disconnections should be ignored during disconnect + if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { + return + } + self.emit('error', error) + } + this.stream.on('error', reportStreamError) + + this.stream.on('close', function () { + self.emit('end') + }) + + if (!this.ssl) { + return this.attachListeners(this.stream) + } + + this.stream.once('data', function (buffer) { + var responseCode = buffer.toString('utf8') + switch (responseCode) { + case 'S': // Server supports SSL connections, continue with a secure connection + break + case 'N': // Server does not support SSL connections + self.stream.end() + return self.emit('error', new Error('The server does not support SSL connections')) + default: + // Any other response byte, including 'E' (ErrorResponse) indicating a server error + self.stream.end() + return self.emit('error', new Error('There was an error establishing an SSL connection')) + } + const options = { + socket: self.stream, + } + + if (self.ssl !== true) { + Object.assign(options, self.ssl) + + if ('key' in self.ssl) { + options.key = self.ssl.key + } + } + + var net = require('net') + if (net.isIP && net.isIP(host) === 0) { + options.servername = host + } + try { + self.stream = getSecureStream(options) + } catch (err) { + return self.emit('error', err) + } + self.attachListeners(self.stream) + self.stream.on('error', reportStreamError) + + self.emit('sslconnect') + }) + } + + attachListeners(stream) { + parse(stream, (msg) => { + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (this._emitMessage) { + this.emit('message', msg) + } + this.emit(eventName, msg) + }) + } + + requestSsl() { + this.stream.write(serialize.requestSsl()) + } + + startup(config) { + this.stream.write(serialize.startup(config)) + } + + cancel(processID, secretKey) { + this._send(serialize.cancel(processID, secretKey)) + } + + password(password) { + this._send(serialize.password(password)) + } + + sendSASLInitialResponseMessage(mechanism, initialResponse) { + this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse)) + } + + sendSCRAMClientFinalMessage(additionalData) { + this._send(serialize.sendSCRAMClientFinalMessage(additionalData)) + } + + _send(buffer) { + if (!this.stream.writable) { + return false + } + return this.stream.write(buffer) + } + + query(text) { + this._send(serialize.query(text)) + } + + // send parse message + parse(query) { + this._send(serialize.parse(query)) + } + + // send bind message + bind(config) { + this._send(serialize.bind(config)) + } + + // send execute message + execute(config) { + this._send(serialize.execute(config)) + } + + flush() { + if (this.stream.writable) { + this.stream.write(flushBuffer) + } + } + + sync() { + this._ending = true + this._send(syncBuffer) + } + + ref() { + this.stream.ref() + } + + unref() { + this.stream.unref() + } + + end() { + // 0x58 = 'X' + this._ending = true + if (!this._connecting || !this.stream.writable) { + this.stream.end() + return + } + return this.stream.write(endBuffer, () => { + this.stream.end() + }) + } + + close(msg) { + this._send(serialize.close(msg)) + } + + describe(msg) { + this._send(serialize.describe(msg)) + } + + sendCopyFromChunk(chunk) { + this._send(serialize.copyData(chunk)) + } + + endCopyFrom() { + this._send(serialize.copyDone()) + } + + sendCopyFail(msg) { + this._send(serialize.copyFail(msg)) + } +} + +module.exports = Connection diff --git a/node_modules/pg/lib/crypto/sasl.js b/node_modules/pg/lib/crypto/sasl.js new file mode 100644 index 0000000..04ae197 --- /dev/null +++ b/node_modules/pg/lib/crypto/sasl.js @@ -0,0 +1,186 @@ +'use strict' +const crypto = require('./utils') + +function startSession(mechanisms) { + if (mechanisms.indexOf('SCRAM-SHA-256') === -1) { + throw new Error('SASL: Only mechanism SCRAM-SHA-256 is currently supported') + } + + const clientNonce = crypto.randomBytes(18).toString('base64') + + return { + mechanism: 'SCRAM-SHA-256', + clientNonce, + response: 'n,,n=*,r=' + clientNonce, + message: 'SASLInitialResponse', + } +} + +async function continueSession(session, password, serverData) { + if (session.message !== 'SASLInitialResponse') { + throw new Error('SASL: Last message was not SASLInitialResponse') + } + if (typeof password !== 'string') { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a string') + } + if (password === '') { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a non-empty string') + } + if (typeof serverData !== 'string') { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: serverData must be a string') + } + + const sv = parseServerFirstMessage(serverData) + + if (!sv.nonce.startsWith(session.clientNonce)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce') + } else if (sv.nonce.length === session.clientNonce.length) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce is too short') + } + + var clientFirstMessageBare = 'n=*,r=' + session.clientNonce + var serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration + var clientFinalMessageWithoutProof = 'c=biws,r=' + sv.nonce + var authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof + + var saltBytes = Buffer.from(sv.salt, 'base64') + var saltedPassword = await crypto.deriveKey(password, saltBytes, sv.iteration) + var clientKey = await crypto.hmacSha256(saltedPassword, 'Client Key') + var storedKey = await crypto.sha256(clientKey) + var clientSignature = await crypto.hmacSha256(storedKey, authMessage) + var clientProof = xorBuffers(Buffer.from(clientKey), Buffer.from(clientSignature)).toString('base64') + var serverKey = await crypto.hmacSha256(saltedPassword, 'Server Key') + var serverSignatureBytes = await crypto.hmacSha256(serverKey, authMessage) + + session.message = 'SASLResponse' + session.serverSignature = Buffer.from(serverSignatureBytes).toString('base64') + session.response = clientFinalMessageWithoutProof + ',p=' + clientProof +} + +function finalizeSession(session, serverData) { + if (session.message !== 'SASLResponse') { + throw new Error('SASL: Last message was not SASLResponse') + } + if (typeof serverData !== 'string') { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: serverData must be a string') + } + + const { serverSignature } = parseServerFinalMessage(serverData) + + if (serverSignature !== session.serverSignature) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match') + } +} + +/** + * printable = %x21-2B / %x2D-7E + * ;; Printable ASCII except ",". + * ;; Note that any "printable" is also + * ;; a valid "value". + */ +function isPrintableChars(text) { + if (typeof text !== 'string') { + throw new TypeError('SASL: text must be a string') + } + return text + .split('') + .map((_, i) => text.charCodeAt(i)) + .every((c) => (c >= 0x21 && c <= 0x2b) || (c >= 0x2d && c <= 0x7e)) +} + +/** + * base64-char = ALPHA / DIGIT / "/" / "+" + * + * base64-4 = 4base64-char + * + * base64-3 = 3base64-char "=" + * + * base64-2 = 2base64-char "==" + * + * base64 = *base64-4 [base64-3 / base64-2] + */ +function isBase64(text) { + return /^(?:[a-zA-Z0-9+/]{4})*(?:[a-zA-Z0-9+/]{2}==|[a-zA-Z0-9+/]{3}=)?$/.test(text) +} + +function parseAttributePairs(text) { + if (typeof text !== 'string') { + throw new TypeError('SASL: attribute pairs text must be a string') + } + + return new Map( + text.split(',').map((attrValue) => { + if (!/^.=/.test(attrValue)) { + throw new Error('SASL: Invalid attribute pair entry') + } + const name = attrValue[0] + const value = attrValue.substring(2) + return [name, value] + }) + ) +} + +function parseServerFirstMessage(data) { + const attrPairs = parseAttributePairs(data) + + const nonce = attrPairs.get('r') + if (!nonce) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing') + } else if (!isPrintableChars(nonce)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce must only contain printable characters') + } + const salt = attrPairs.get('s') + if (!salt) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing') + } else if (!isBase64(salt)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt must be base64') + } + const iterationText = attrPairs.get('i') + if (!iterationText) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing') + } else if (!/^[1-9][0-9]*$/.test(iterationText)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: invalid iteration count') + } + const iteration = parseInt(iterationText, 10) + + return { + nonce, + salt, + iteration, + } +} + +function parseServerFinalMessage(serverData) { + const attrPairs = parseAttributePairs(serverData) + const serverSignature = attrPairs.get('v') + if (!serverSignature) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature is missing') + } else if (!isBase64(serverSignature)) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature must be base64') + } + return { + serverSignature, + } +} + +function xorBuffers(a, b) { + if (!Buffer.isBuffer(a)) { + throw new TypeError('first argument must be a Buffer') + } + if (!Buffer.isBuffer(b)) { + throw new TypeError('second argument must be a Buffer') + } + if (a.length !== b.length) { + throw new Error('Buffer lengths must match') + } + if (a.length === 0) { + throw new Error('Buffers cannot be empty') + } + return Buffer.from(a.map((_, i) => a[i] ^ b[i])) +} + +module.exports = { + startSession, + continueSession, + finalizeSession, +} diff --git a/node_modules/pg/lib/crypto/utils-legacy.js b/node_modules/pg/lib/crypto/utils-legacy.js new file mode 100644 index 0000000..86544ad --- /dev/null +++ b/node_modules/pg/lib/crypto/utils-legacy.js @@ -0,0 +1,37 @@ +'use strict' +// This file contains crypto utility functions for versions of Node.js < 15.0.0, +// which does not support the WebCrypto.subtle API. + +const nodeCrypto = require('crypto') + +function md5(string) { + return nodeCrypto.createHash('md5').update(string, 'utf-8').digest('hex') +} + +// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html +function postgresMd5PasswordHash(user, password, salt) { + var inner = md5(password + user) + var outer = md5(Buffer.concat([Buffer.from(inner), salt])) + return 'md5' + outer +} + +function sha256(text) { + return nodeCrypto.createHash('sha256').update(text).digest() +} + +function hmacSha256(key, msg) { + return nodeCrypto.createHmac('sha256', key).update(msg).digest() +} + +async function deriveKey(password, salt, iterations) { + return nodeCrypto.pbkdf2Sync(password, salt, iterations, 32, 'sha256') +} + +module.exports = { + postgresMd5PasswordHash, + randomBytes: nodeCrypto.randomBytes, + deriveKey, + sha256, + hmacSha256, + md5, +} diff --git a/node_modules/pg/lib/crypto/utils-webcrypto.js b/node_modules/pg/lib/crypto/utils-webcrypto.js new file mode 100644 index 0000000..0433f01 --- /dev/null +++ b/node_modules/pg/lib/crypto/utils-webcrypto.js @@ -0,0 +1,83 @@ +const nodeCrypto = require('crypto') + +module.exports = { + postgresMd5PasswordHash, + randomBytes, + deriveKey, + sha256, + hmacSha256, + md5, +} + +/** + * The Web Crypto API - grabbed from the Node.js library or the global + * @type Crypto + */ +const webCrypto = nodeCrypto.webcrypto || globalThis.crypto +/** + * The SubtleCrypto API for low level crypto operations. + * @type SubtleCrypto + */ +const subtleCrypto = webCrypto.subtle +const textEncoder = new TextEncoder() + +/** + * + * @param {*} length + * @returns + */ +function randomBytes(length) { + return webCrypto.getRandomValues(Buffer.alloc(length)) +} + +async function md5(string) { + try { + return nodeCrypto.createHash('md5').update(string, 'utf-8').digest('hex') + } catch (e) { + // `createHash()` failed so we are probably not in Node.js, use the WebCrypto API instead. + // Note that the MD5 algorithm on WebCrypto is not available in Node.js. + // This is why we cannot just use WebCrypto in all environments. + const data = typeof string === 'string' ? textEncoder.encode(string) : string + const hash = await subtleCrypto.digest('MD5', data) + return Array.from(new Uint8Array(hash)) + .map((b) => b.toString(16).padStart(2, '0')) + .join('') + } +} + +// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html +async function postgresMd5PasswordHash(user, password, salt) { + var inner = await md5(password + user) + var outer = await md5(Buffer.concat([Buffer.from(inner), salt])) + return 'md5' + outer +} + +/** + * Create a SHA-256 digest of the given data + * @param {Buffer} data + */ +async function sha256(text) { + return await subtleCrypto.digest('SHA-256', text) +} + +/** + * Sign the message with the given key + * @param {ArrayBuffer} keyBuffer + * @param {string} msg + */ +async function hmacSha256(keyBuffer, msg) { + const key = await subtleCrypto.importKey('raw', keyBuffer, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']) + return await subtleCrypto.sign('HMAC', key, textEncoder.encode(msg)) +} + +/** + * Derive a key from the password and salt + * @param {string} password + * @param {Uint8Array} salt + * @param {number} iterations + */ +async function deriveKey(password, salt, iterations) { + const key = await subtleCrypto.importKey('raw', textEncoder.encode(password), 'PBKDF2', false, ['deriveBits']) + const params = { name: 'PBKDF2', hash: 'SHA-256', salt: salt, iterations: iterations } + return await subtleCrypto.deriveBits(params, key, 32 * 8, ['deriveBits']) +} diff --git a/node_modules/pg/lib/crypto/utils.js b/node_modules/pg/lib/crypto/utils.js new file mode 100644 index 0000000..9644b15 --- /dev/null +++ b/node_modules/pg/lib/crypto/utils.js @@ -0,0 +1,9 @@ +'use strict' + +const useLegacyCrypto = parseInt(process.versions && process.versions.node && process.versions.node.split('.')[0]) < 15 +if (useLegacyCrypto) { + // We are on an old version of Node.js that requires legacy crypto utilities. + module.exports = require('./utils-legacy') +} else { + module.exports = require('./utils-webcrypto') +} diff --git a/node_modules/pg/lib/defaults.js b/node_modules/pg/lib/defaults.js new file mode 100644 index 0000000..5c5d997 --- /dev/null +++ b/node_modules/pg/lib/defaults.js @@ -0,0 +1,84 @@ +'use strict' + +module.exports = { + // database host. defaults to localhost + host: 'localhost', + + // database user's name + user: process.platform === 'win32' ? process.env.USERNAME : process.env.USER, + + // name of database to connect + database: undefined, + + // database user's password + password: null, + + // a Postgres connection string to be used instead of setting individual connection items + // NOTE: Setting this value will cause it to override any other value (such as database or user) defined + // in the defaults object. + connectionString: undefined, + + // database port + port: 5432, + + // number of rows to return at a time from a prepared statement's + // portal. 0 will return all rows at once + rows: 0, + + // binary result mode + binary: false, + + // Connection pool options - see https://github.com/brianc/node-pg-pool + + // number of connections to use in connection pool + // 0 will disable connection pooling + max: 10, + + // max milliseconds a client can go unused before it is removed + // from the pool and destroyed + idleTimeoutMillis: 30000, + + client_encoding: '', + + ssl: false, + + application_name: undefined, + + fallback_application_name: undefined, + + options: undefined, + + parseInputDatesAsUTC: false, + + // max milliseconds any query using this connection will execute for before timing out in error. + // false=unlimited + statement_timeout: false, + + // Abort any statement that waits longer than the specified duration in milliseconds while attempting to acquire a lock. + // false=unlimited + lock_timeout: false, + + // Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds + // false=unlimited + idle_in_transaction_session_timeout: false, + + // max milliseconds to wait for query to complete (client side) + query_timeout: false, + + connect_timeout: 0, + + keepalives: 1, + + keepalives_idle: 0, +} + +var pgTypes = require('pg-types') +// save default parsers +var parseBigInteger = pgTypes.getTypeParser(20, 'text') +var parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text') + +// parse int8 so you can get your count values as actual numbers +module.exports.__defineSetter__('parseInt8', function (val) { + pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger) + pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray) +}) diff --git a/node_modules/pg/lib/index.js b/node_modules/pg/lib/index.js new file mode 100644 index 0000000..1742d16 --- /dev/null +++ b/node_modules/pg/lib/index.js @@ -0,0 +1,58 @@ +'use strict' + +var Client = require('./client') +var defaults = require('./defaults') +var Connection = require('./connection') +var Pool = require('pg-pool') +const { DatabaseError } = require('pg-protocol') +const { escapeIdentifier, escapeLiteral } = require('./utils') + +const poolFactory = (Client) => { + return class BoundPool extends Pool { + constructor(options) { + super(options, Client) + } + } +} + +var PG = function (clientConstructor) { + this.defaults = defaults + this.Client = clientConstructor + this.Query = this.Client.Query + this.Pool = poolFactory(this.Client) + this._pools = [] + this.Connection = Connection + this.types = require('pg-types') + this.DatabaseError = DatabaseError + this.escapeIdentifier = escapeIdentifier + this.escapeLiteral = escapeLiteral +} + +if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') { + module.exports = new PG(require('./native')) +} else { + module.exports = new PG(Client) + + // lazy require native module...the native module may not have installed + Object.defineProperty(module.exports, 'native', { + configurable: true, + enumerable: false, + get() { + var native = null + try { + native = new PG(require('./native')) + } catch (err) { + if (err.code !== 'MODULE_NOT_FOUND') { + throw err + } + } + + // overwrite module.exports.native so that getter is never called again + Object.defineProperty(module.exports, 'native', { + value: native, + }) + + return native + }, + }) +} diff --git a/node_modules/pg/lib/native/client.js b/node_modules/pg/lib/native/client.js new file mode 100644 index 0000000..88bf52c --- /dev/null +++ b/node_modules/pg/lib/native/client.js @@ -0,0 +1,307 @@ +'use strict' + +// eslint-disable-next-line +var Native +try { + // Wrap this `require()` in a try-catch to avoid upstream bundlers from complaining that this might not be available since it is an optional import + Native = require('pg-native') +} catch (e) { + throw e +} +var TypeOverrides = require('../type-overrides') +var EventEmitter = require('events').EventEmitter +var util = require('util') +var ConnectionParameters = require('../connection-parameters') + +var NativeQuery = require('./query') + +var Client = (module.exports = function (config) { + EventEmitter.call(this) + config = config || {} + + this._Promise = config.Promise || global.Promise + this._types = new TypeOverrides(config.types) + + this.native = new Native({ + types: this._types, + }) + + this._queryQueue = [] + this._ending = false + this._connecting = false + this._connected = false + this._queryable = true + + // keep these on the object for legacy reasons + // for the time being. TODO: deprecate all this jazz + var cp = (this.connectionParameters = new ConnectionParameters(config)) + if (config.nativeConnectionString) cp.nativeConnectionString = config.nativeConnectionString + this.user = cp.user + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: cp.password, + }) + this.database = cp.database + this.host = cp.host + this.port = cp.port + + // a hash to hold named queries + this.namedQueries = {} +}) + +Client.Query = NativeQuery + +util.inherits(Client, EventEmitter) + +Client.prototype._errorAllQueries = function (err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.native = this.native + query.handleError(err) + }) + } + + if (this._hasActiveQuery()) { + enqueueError(this._activeQuery) + this._activeQuery = null + } + + this._queryQueue.forEach(enqueueError) + this._queryQueue.length = 0 +} + +// connect to the backend +// pass an optional callback to be called once connected +// or with an error if there was a connection error +Client.prototype._connect = function (cb) { + var self = this + + if (this._connecting) { + process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.'))) + return + } + + this._connecting = true + + this.connectionParameters.getLibpqConnectionString(function (err, conString) { + if (self.connectionParameters.nativeConnectionString) conString = self.connectionParameters.nativeConnectionString + if (err) return cb(err) + self.native.connect(conString, function (err) { + if (err) { + self.native.end() + return cb(err) + } + + // set internal states to connected + self._connected = true + + // handle connection errors from the native layer + self.native.on('error', function (err) { + self._queryable = false + self._errorAllQueries(err) + self.emit('error', err) + }) + + self.native.on('notification', function (msg) { + self.emit('notification', { + channel: msg.relname, + payload: msg.extra, + }) + }) + + // signal we are connected now + self.emit('connect') + self._pulseQueryQueue(true) + + cb() + }) + }) +} + +Client.prototype.connect = function (callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) +} + +// send a query to the server +// this method is highly overloaded to take +// 1) string query, optional array of parameters, optional function callback +// 2) object query with { +// string query +// optional array values, +// optional function callback instead of as a separate parameter +// optional string name to name & cache the query plan +// optional string rowMode = 'array' for an array of results +// } +Client.prototype.query = function (config, values, callback) { + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback + + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + // accept query(new Query(...), (err, res) => { }) style + if (typeof values === 'function') { + config.callback = values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new NativeQuery(config, values, callback) + if (!query.callback) { + let resolveOut, rejectOut + result = new this._Promise((resolve, reject) => { + resolveOut = resolve + rejectOut = reject + }).catch((err) => { + Error.captureStackTrace(err) + throw err + }) + query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res)) + } + } + + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) + }) + + queryCallback(error) + + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this._queryQueue.indexOf(query) + if (index > -1) { + this._queryQueue.splice(index, 1) + } + + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (!this._queryable) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable')) + }) + return result + } + + if (this._ending) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable')) + }) + return result + } + + this._queryQueue.push(query) + this._pulseQueryQueue() + return result +} + +// disconnect from the backend server +Client.prototype.end = function (cb) { + var self = this + + this._ending = true + + if (!this._connected) { + this.once('connect', this.end.bind(this, cb)) + } + var result + if (!cb) { + result = new this._Promise(function (resolve, reject) { + cb = (err) => (err ? reject(err) : resolve()) + }) + } + this.native.end(function () { + self._errorAllQueries(new Error('Connection terminated')) + + process.nextTick(() => { + self.emit('end') + if (cb) cb() + }) + }) + return result +} + +Client.prototype._hasActiveQuery = function () { + return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end' +} + +Client.prototype._pulseQueryQueue = function (initialConnection) { + if (!this._connected) { + return + } + if (this._hasActiveQuery()) { + return + } + var query = this._queryQueue.shift() + if (!query) { + if (!initialConnection) { + this.emit('drain') + } + return + } + this._activeQuery = query + query.submit(this) + var self = this + query.once('_done', function () { + self._pulseQueryQueue() + }) +} + +// attempt to cancel an in-progress query +Client.prototype.cancel = function (query) { + if (this._activeQuery === query) { + this.native.cancel(function () {}) + } else if (this._queryQueue.indexOf(query) !== -1) { + this._queryQueue.splice(this._queryQueue.indexOf(query), 1) + } +} + +Client.prototype.ref = function () {} +Client.prototype.unref = function () {} + +Client.prototype.setTypeParser = function (oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) +} + +Client.prototype.getTypeParser = function (oid, format) { + return this._types.getTypeParser(oid, format) +} diff --git a/node_modules/pg/lib/native/index.js b/node_modules/pg/lib/native/index.js new file mode 100644 index 0000000..eead422 --- /dev/null +++ b/node_modules/pg/lib/native/index.js @@ -0,0 +1,2 @@ +'use strict' +module.exports = require('./client') diff --git a/node_modules/pg/lib/native/query.js b/node_modules/pg/lib/native/query.js new file mode 100644 index 0000000..0cfed1f --- /dev/null +++ b/node_modules/pg/lib/native/query.js @@ -0,0 +1,168 @@ +'use strict' + +var EventEmitter = require('events').EventEmitter +var util = require('util') +var utils = require('../utils') + +var NativeQuery = (module.exports = function (config, values, callback) { + EventEmitter.call(this) + config = utils.normalizeQueryConfig(config, values, callback) + this.text = config.text + this.values = config.values + this.name = config.name + this.queryMode = config.queryMode + this.callback = config.callback + this.state = 'new' + this._arrayMode = config.rowMode === 'array' + + // if the 'row' event is listened for + // then emit them as they come in + // without setting singleRowMode to true + // this has almost no meaning because libpq + // reads all rows into memory befor returning any + this._emitRowEvents = false + this.on( + 'newListener', + function (event) { + if (event === 'row') this._emitRowEvents = true + }.bind(this) + ) +}) + +util.inherits(NativeQuery, EventEmitter) + +var errorFieldMap = { + /* eslint-disable quote-props */ + sqlState: 'code', + statementPosition: 'position', + messagePrimary: 'message', + context: 'where', + schemaName: 'schema', + tableName: 'table', + columnName: 'column', + dataTypeName: 'dataType', + constraintName: 'constraint', + sourceFile: 'file', + sourceLine: 'line', + sourceFunction: 'routine', +} + +NativeQuery.prototype.handleError = function (err) { + // copy pq error fields into the error object + var fields = this.native.pq.resultErrorFields() + if (fields) { + for (var key in fields) { + var normalizedFieldName = errorFieldMap[key] || key + err[normalizedFieldName] = fields[key] + } + } + if (this.callback) { + this.callback(err) + } else { + this.emit('error', err) + } + this.state = 'error' +} + +NativeQuery.prototype.then = function (onSuccess, onFailure) { + return this._getPromise().then(onSuccess, onFailure) +} + +NativeQuery.prototype.catch = function (callback) { + return this._getPromise().catch(callback) +} + +NativeQuery.prototype._getPromise = function () { + if (this._promise) return this._promise + this._promise = new Promise( + function (resolve, reject) { + this._once('end', resolve) + this._once('error', reject) + }.bind(this) + ) + return this._promise +} + +NativeQuery.prototype.submit = function (client) { + this.state = 'running' + var self = this + this.native = client.native + client.native.arrayMode = this._arrayMode + + var after = function (err, rows, results) { + client.native.arrayMode = false + setImmediate(function () { + self.emit('_done') + }) + + // handle possible query error + if (err) { + return self.handleError(err) + } + + // emit row events for each row in the result + if (self._emitRowEvents) { + if (results.length > 1) { + rows.forEach((rowOfRows, i) => { + rowOfRows.forEach((row) => { + self.emit('row', row, results[i]) + }) + }) + } else { + rows.forEach(function (row) { + self.emit('row', row, results) + }) + } + } + + // handle successful result + self.state = 'end' + self.emit('end', results) + if (self.callback) { + self.callback(null, results) + } + } + + if (process.domain) { + after = process.domain.bind(after) + } + + // named query + if (this.name) { + if (this.name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', this.name, this.name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + var values = (this.values || []).map(utils.prepareValue) + + // check if the client has already executed this named query + // if so...just execute it again - skip the planning phase + if (client.namedQueries[this.name]) { + if (this.text && client.namedQueries[this.name] !== this.text) { + const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + return after(err) + } + return client.native.execute(this.name, values, after) + } + // plan the named query the first time, then execute it + return client.native.prepare(this.name, this.text, values.length, function (err) { + if (err) return after(err) + client.namedQueries[self.name] = self.text + return self.native.execute(self.name, values, after) + }) + } else if (this.values) { + if (!Array.isArray(this.values)) { + const err = new Error('Query values must be an array') + return after(err) + } + var vals = this.values.map(utils.prepareValue) + client.native.query(this.text, vals, after) + } else if (this.queryMode === 'extended') { + client.native.query(this.text, [], after) + } else { + client.native.query(this.text, after) + } +} diff --git a/node_modules/pg/lib/query.js b/node_modules/pg/lib/query.js new file mode 100644 index 0000000..0925960 --- /dev/null +++ b/node_modules/pg/lib/query.js @@ -0,0 +1,239 @@ +'use strict' + +const { EventEmitter } = require('events') + +const Result = require('./result') +const utils = require('./utils') + +class Query extends EventEmitter { + constructor(config, values, callback) { + super() + + config = utils.normalizeQueryConfig(config, values, callback) + + this.text = config.text + this.values = config.values + this.rows = config.rows + this.types = config.types + this.name = config.name + this.queryMode = config.queryMode + this.binary = config.binary + // use unique portal name each time + this.portal = config.portal || '' + this.callback = config.callback + this._rowMode = config.rowMode + if (process.domain && config.callback) { + this.callback = process.domain.bind(config.callback) + } + this._result = new Result(this._rowMode, this.types) + + // potential for multiple results + this._results = this._result + this._canceledDueToError = false + } + + requiresPreparation() { + if (this.queryMode === 'extended') { + return true + } + + // named queries must always be prepared + if (this.name) { + return true + } + // always prepare if there are max number of rows expected per + // portal execution + if (this.rows) { + return true + } + // don't prepare empty text queries + if (!this.text) { + return false + } + // prepare if there are values + if (!this.values) { + return false + } + return this.values.length > 0 + } + + _checkForMultirow() { + // if we already have a result with a command property + // then we've already executed one query in a multi-statement simple query + // turn our results into an array of results + if (this._result.command) { + if (!Array.isArray(this._results)) { + this._results = [this._result] + } + this._result = new Result(this._rowMode, this.types) + this._results.push(this._result) + } + } + + // associates row metadata from the supplied + // message with this query object + // metadata used when parsing row results + handleRowDescription(msg) { + this._checkForMultirow() + this._result.addFields(msg.fields) + this._accumulateRows = this.callback || !this.listeners('row').length + } + + handleDataRow(msg) { + let row + + if (this._canceledDueToError) { + return + } + + try { + row = this._result.parseRow(msg.fields) + } catch (err) { + this._canceledDueToError = err + return + } + + this.emit('row', row, this._result) + if (this._accumulateRows) { + this._result.addRow(row) + } + } + + handleCommandComplete(msg, connection) { + this._checkForMultirow() + this._result.addCommandComplete(msg) + // need to sync after each command complete of a prepared statement + // if we were using a row count which results in multiple calls to _getRows + if (this.rows) { + connection.sync() + } + } + + // if a named prepared statement is created with empty query text + // the backend will send an emptyQuery message but *not* a command complete message + // since we pipeline sync immediately after execute we don't need to do anything here + // unless we have rows specified, in which case we did not pipeline the intial sync call + handleEmptyQuery(connection) { + if (this.rows) { + connection.sync() + } + } + + handleError(err, connection) { + // need to sync after error during a prepared statement + if (this._canceledDueToError) { + err = this._canceledDueToError + this._canceledDueToError = false + } + // if callback supplied do not emit error event as uncaught error + // events will bubble up to node process + if (this.callback) { + return this.callback(err) + } + this.emit('error', err) + } + + handleReadyForQuery(con) { + if (this._canceledDueToError) { + return this.handleError(this._canceledDueToError, con) + } + if (this.callback) { + try { + this.callback(null, this._results) + } catch (err) { + process.nextTick(() => { + throw err + }) + } + } + this.emit('end', this._results) + } + + submit(connection) { + if (typeof this.text !== 'string' && typeof this.name !== 'string') { + return new Error('A query must have either text or a name. Supplying neither is unsupported.') + } + const previous = connection.parsedStatements[this.name] + if (this.text && previous && this.text !== previous) { + return new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + } + if (this.values && !Array.isArray(this.values)) { + return new Error('Query values must be an array') + } + if (this.requiresPreparation()) { + this.prepare(connection) + } else { + connection.query(this.text) + } + return null + } + + hasBeenParsed(connection) { + return this.name && connection.parsedStatements[this.name] + } + + handlePortalSuspended(connection) { + this._getRows(connection, this.rows) + } + + _getRows(connection, rows) { + connection.execute({ + portal: this.portal, + rows: rows, + }) + // if we're not reading pages of rows send the sync command + // to indicate the pipeline is finished + if (!rows) { + connection.sync() + } else { + // otherwise flush the call out to read more rows + connection.flush() + } + } + + // http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY + prepare(connection) { + // TODO refactor this poor encapsulation + if (!this.hasBeenParsed(connection)) { + connection.parse({ + text: this.text, + name: this.name, + types: this.types, + }) + } + + // because we're mapping user supplied values to + // postgres wire protocol compatible values it could + // throw an exception, so try/catch this section + try { + connection.bind({ + portal: this.portal, + statement: this.name, + values: this.values, + binary: this.binary, + valueMapper: utils.prepareValue, + }) + } catch (err) { + this.handleError(err, connection) + return + } + + connection.describe({ + type: 'P', + name: this.portal || '', + }) + + this._getRows(connection, this.rows) + } + + handleCopyInResponse(connection) { + connection.sendCopyFail('No source stream defined') + } + + // eslint-disable-next-line no-unused-vars + handleCopyData(msg, connection) { + // noop + } +} + +module.exports = Query diff --git a/node_modules/pg/lib/result.js b/node_modules/pg/lib/result.js new file mode 100644 index 0000000..98018a7 --- /dev/null +++ b/node_modules/pg/lib/result.js @@ -0,0 +1,107 @@ +'use strict' + +var types = require('pg-types') + +var matchRegexp = /^([A-Za-z]+)(?: (\d+))?(?: (\d+))?/ + +// result object returned from query +// in the 'end' event and also +// passed as second argument to provided callback +class Result { + constructor(rowMode, types) { + this.command = null + this.rowCount = null + this.oid = null + this.rows = [] + this.fields = [] + this._parsers = undefined + this._types = types + this.RowCtor = null + this.rowAsArray = rowMode === 'array' + if (this.rowAsArray) { + this.parseRow = this._parseRowAsArray + } + this._prebuiltEmptyResultObject = null + } + + // adds a command complete message + addCommandComplete(msg) { + var match + if (msg.text) { + // pure javascript + match = matchRegexp.exec(msg.text) + } else { + // native bindings + match = matchRegexp.exec(msg.command) + } + if (match) { + this.command = match[1] + if (match[3]) { + // COMMMAND OID ROWS + this.oid = parseInt(match[2], 10) + this.rowCount = parseInt(match[3], 10) + } else if (match[2]) { + // COMMAND ROWS + this.rowCount = parseInt(match[2], 10) + } + } + } + + _parseRowAsArray(rowData) { + var row = new Array(rowData.length) + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + if (rawValue !== null) { + row[i] = this._parsers[i](rawValue) + } else { + row[i] = null + } + } + return row + } + + parseRow(rowData) { + var row = { ...this._prebuiltEmptyResultObject } + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + var field = this.fields[i].name + if (rawValue !== null) { + row[field] = this._parsers[i](rawValue) + } else { + row[field] = null + } + } + return row + } + + addRow(row) { + this.rows.push(row) + } + + addFields(fieldDescriptions) { + // clears field definitions + // multiple query statements in 1 action can result in multiple sets + // of rowDescriptions...eg: 'select NOW(); select 1::int;' + // you need to reset the fields + this.fields = fieldDescriptions + if (this.fields.length) { + this._parsers = new Array(fieldDescriptions.length) + } + + var row = {} + + for (var i = 0; i < fieldDescriptions.length; i++) { + var desc = fieldDescriptions[i] + row[desc.name] = null + + if (this._types) { + this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } else { + this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } + } + this._prebuiltEmptyResultObject = { ...row } + } +} + +module.exports = Result diff --git a/node_modules/pg/lib/stream.js b/node_modules/pg/lib/stream.js new file mode 100644 index 0000000..67b1b3c --- /dev/null +++ b/node_modules/pg/lib/stream.js @@ -0,0 +1,28 @@ +/** + * Get a socket stream compatible with the current runtime environment. + * @returns {Duplex} + */ +module.exports.getStream = function getStream(ssl) { + const net = require('net') + if (typeof net.Socket === 'function') { + return new net.Socket() + } else { + const { CloudflareSocket } = require('pg-cloudflare') + return new CloudflareSocket(ssl) + } +} + +/** + * Get a TLS secured socket, compatible with the current environment, + * using the socket and other settings given in `options`. + * @returns {Duplex} + */ +module.exports.getSecureStream = function getSecureStream(options) { + var tls = require('tls') + if (tls.connect) { + return tls.connect(options) + } else { + options.socket.startTls(options) + return options.socket + } +} diff --git a/node_modules/pg/lib/type-overrides.js b/node_modules/pg/lib/type-overrides.js new file mode 100644 index 0000000..6669348 --- /dev/null +++ b/node_modules/pg/lib/type-overrides.js @@ -0,0 +1,35 @@ +'use strict' + +var types = require('pg-types') + +function TypeOverrides(userTypes) { + this._types = userTypes || types + this.text = {} + this.binary = {} +} + +TypeOverrides.prototype.getOverrides = function (format) { + switch (format) { + case 'text': + return this.text + case 'binary': + return this.binary + default: + return {} + } +} + +TypeOverrides.prototype.setTypeParser = function (oid, format, parseFn) { + if (typeof format === 'function') { + parseFn = format + format = 'text' + } + this.getOverrides(format)[oid] = parseFn +} + +TypeOverrides.prototype.getTypeParser = function (oid, format) { + format = format || 'text' + return this.getOverrides(format)[oid] || this._types.getTypeParser(oid, format) +} + +module.exports = TypeOverrides diff --git a/node_modules/pg/lib/utils.js b/node_modules/pg/lib/utils.js new file mode 100644 index 0000000..09b8d3d --- /dev/null +++ b/node_modules/pg/lib/utils.js @@ -0,0 +1,213 @@ +'use strict' + +const defaults = require('./defaults') + +function escapeElement(elementRepresentation) { + var escaped = elementRepresentation.replace(/\\/g, '\\\\').replace(/"/g, '\\"') + + return '"' + escaped + '"' +} + +// convert a JS array to a postgres array literal +// uses comma separator so won't work for types like box that use +// a different array separator. +function arrayString(val) { + var result = '{' + for (var i = 0; i < val.length; i++) { + if (i > 0) { + result = result + ',' + } + if (val[i] === null || typeof val[i] === 'undefined') { + result = result + 'NULL' + } else if (Array.isArray(val[i])) { + result = result + arrayString(val[i]) + } else if (ArrayBuffer.isView(val[i])) { + var item = val[i] + if (!(item instanceof Buffer)) { + var buf = Buffer.from(item.buffer, item.byteOffset, item.byteLength) + if (buf.length === item.byteLength) { + item = buf + } else { + item = buf.slice(item.byteOffset, item.byteOffset + item.byteLength) + } + } + result += '\\\\x' + item.toString('hex') + } else { + result += escapeElement(prepareValue(val[i])) + } + } + result = result + '}' + return result +} + +// converts values from javascript types +// to their 'raw' counterparts for use as a postgres parameter +// note: you can override this function to provide your own conversion mechanism +// for complex types, etc... +var prepareValue = function (val, seen) { + // null and undefined are both null for postgres + if (val == null) { + return null + } + if (val instanceof Buffer) { + return val + } + if (ArrayBuffer.isView(val)) { + var buf = Buffer.from(val.buffer, val.byteOffset, val.byteLength) + if (buf.length === val.byteLength) { + return buf + } + return buf.slice(val.byteOffset, val.byteOffset + val.byteLength) // Node.js v4 does not support those Buffer.from params + } + if (val instanceof Date) { + if (defaults.parseInputDatesAsUTC) { + return dateToStringUTC(val) + } else { + return dateToString(val) + } + } + if (Array.isArray(val)) { + return arrayString(val) + } + if (typeof val === 'object') { + return prepareObject(val, seen) + } + return val.toString() +} + +function prepareObject(val, seen) { + if (val && typeof val.toPostgres === 'function') { + seen = seen || [] + if (seen.indexOf(val) !== -1) { + throw new Error('circular reference detected while preparing "' + val + '" for query') + } + seen.push(val) + + return prepareValue(val.toPostgres(prepareValue), seen) + } + return JSON.stringify(val) +} + +function pad(number, digits) { + number = '' + number + while (number.length < digits) { + number = '0' + number + } + return number +} + +function dateToString(date) { + var offset = -date.getTimezoneOffset() + + var year = date.getFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + + var ret = + pad(year, 4) + + '-' + + pad(date.getMonth() + 1, 2) + + '-' + + pad(date.getDate(), 2) + + 'T' + + pad(date.getHours(), 2) + + ':' + + pad(date.getMinutes(), 2) + + ':' + + pad(date.getSeconds(), 2) + + '.' + + pad(date.getMilliseconds(), 3) + + if (offset < 0) { + ret += '-' + offset *= -1 + } else { + ret += '+' + } + + ret += pad(Math.floor(offset / 60), 2) + ':' + pad(offset % 60, 2) + if (isBCYear) ret += ' BC' + return ret +} + +function dateToStringUTC(date) { + var year = date.getUTCFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + + var ret = + pad(year, 4) + + '-' + + pad(date.getUTCMonth() + 1, 2) + + '-' + + pad(date.getUTCDate(), 2) + + 'T' + + pad(date.getUTCHours(), 2) + + ':' + + pad(date.getUTCMinutes(), 2) + + ':' + + pad(date.getUTCSeconds(), 2) + + '.' + + pad(date.getUTCMilliseconds(), 3) + + ret += '+00:00' + if (isBCYear) ret += ' BC' + return ret +} + +function normalizeQueryConfig(config, values, callback) { + // can take in strings or config objects + config = typeof config === 'string' ? { text: config } : config + if (values) { + if (typeof values === 'function') { + config.callback = values + } else { + config.values = values + } + } + if (callback) { + config.callback = callback + } + return config +} + +// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c +const escapeIdentifier = function (str) { + return '"' + str.replace(/"/g, '""') + '"' +} + +const escapeLiteral = function (str) { + var hasBackslash = false + var escaped = "'" + + for (var i = 0; i < str.length; i++) { + var c = str[i] + if (c === "'") { + escaped += c + c + } else if (c === '\\') { + escaped += c + c + hasBackslash = true + } else { + escaped += c + } + } + + escaped += "'" + + if (hasBackslash === true) { + escaped = ' E' + escaped + } + + return escaped +} + +module.exports = { + prepareValue: function prepareValueWrapper(value) { + // this ensures that extra arguments do not get passed into prepareValue + // by accident, eg: from calling values.map(utils.prepareValue) + return prepareValue(value) + }, + normalizeQueryConfig, + escapeIdentifier, + escapeLiteral, +} diff --git a/node_modules/pg/package.json b/node_modules/pg/package.json new file mode 100644 index 0000000..e24d1ee --- /dev/null +++ b/node_modules/pg/package.json @@ -0,0 +1,62 @@ +{ + "name": "pg", + "version": "8.12.0", + "description": "PostgreSQL client - pure javascript & libpq with the same API", + "keywords": [ + "database", + "libpq", + "pg", + "postgre", + "postgres", + "postgresql", + "rdbms" + ], + "homepage": "https://github.com/brianc/node-postgres", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg" + }, + "author": "Brian Carlson ", + "main": "./lib", + "dependencies": { + "pg-connection-string": "^2.6.4", + "pg-pool": "^3.6.2", + "pg-protocol": "^1.6.1", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20230404.0", + "async": "2.6.4", + "bluebird": "3.5.2", + "co": "4.6.0", + "pg-copy-streams": "0.3.0", + "typescript": "^4.0.3", + "workerd": "^1.20230419.0", + "wrangler": "3.58.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.1.1" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + }, + "scripts": { + "test": "make test-all" + }, + "files": [ + "lib", + "SPONSORS.md" + ], + "license": "MIT", + "engines": { + "node": ">= 8.0.0" + }, + "gitHead": "0f42880861951970e193d31359508d460a67d25a" +} diff --git a/node_modules/pgpass/README.md b/node_modules/pgpass/README.md new file mode 100644 index 0000000..bbc5193 --- /dev/null +++ b/node_modules/pgpass/README.md @@ -0,0 +1,74 @@ +# pgpass + +[![Build Status](https://github.com/hoegaarden/pgpass/workflows/CI/badge.svg?branch=master)](https://github.com/hoegaarden/pgpass/actions?query=workflow%3ACI+branch%3Amaster) + +## Install + +```sh +npm install pgpass +``` + +## Usage +```js +var pgPass = require('pgpass'); + +var connInfo = { + 'host' : 'pgserver' , + 'user' : 'the_user_name' , +}; + +pgPass(connInfo, function(pass){ + conn_info.password = pass; + // connect to postgresql server +}); +``` + +## Description + +This module tries to read the `~/.pgpass` file (or the equivalent for windows systems). If the environment variable `PGPASSFILE` is set, this file is used instead. If everything goes right, the password from said file is passed to the callback; if the password cannot be read `undefined` is passed to the callback. + +Cases where `undefined` is returned: + +- the environment variable `PGPASSWORD` is set +- the file cannot be read (wrong permissions, no such file, ...) +- for non windows systems: the file is write-/readable by the group or by other users +- there is no matching line for the given connection info + +There should be no need to use this module directly; it is already included in `node-postgres`. + +## Configuration + +The module reads the environment variable `PGPASS_NO_DEESCAPE` to decide if the the read tokens from the password file should be de-escaped or not. Default is to do de-escaping. For further information on this see [this commit](https://github.com/postgres/postgres/commit/8d15e3ec4fcb735875a8a70a09ec0c62153c3329). + + +## Tests + +There are tests in `./test/`; including linting and coverage testing. Running `npm test` runs: + +- `jshint` +- `mocha` tests +- `jscoverage` and `mocha -R html-cov` + +You can see the coverage report in `coverage.html`. + + +## Development, Patches, Bugs, ... + +If you find Bugs or have improvements, please feel free to open a issue on GitHub. If you provide a pull request, I'm more than happy to merge them, just make sure to add tests for your changes. + +## Links + +- https://github.com/hoegaarden/node-pgpass +- http://www.postgresql.org/docs/current/static/libpq-pgpass.html +- https://wiki.postgresql.org/wiki/Pgpass +- https://github.com/postgres/postgres/blob/master/src/interfaces/libpq/fe-connect.c + +## License + +Copyright (c) 2013-2016 Hannes Hörl + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pgpass/lib/helper.js b/node_modules/pgpass/lib/helper.js new file mode 100644 index 0000000..f988460 --- /dev/null +++ b/node_modules/pgpass/lib/helper.js @@ -0,0 +1,233 @@ +'use strict'; + +var path = require('path') + , Stream = require('stream').Stream + , split = require('split2') + , util = require('util') + , defaultPort = 5432 + , isWin = (process.platform === 'win32') + , warnStream = process.stderr +; + + +var S_IRWXG = 56 // 00070(8) + , S_IRWXO = 7 // 00007(8) + , S_IFMT = 61440 // 00170000(8) + , S_IFREG = 32768 // 0100000(8) +; +function isRegFile(mode) { + return ((mode & S_IFMT) == S_IFREG); +} + +var fieldNames = [ 'host', 'port', 'database', 'user', 'password' ]; +var nrOfFields = fieldNames.length; +var passKey = fieldNames[ nrOfFields -1 ]; + + +function warn() { + var isWritable = ( + warnStream instanceof Stream && + true === warnStream.writable + ); + + if (isWritable) { + var args = Array.prototype.slice.call(arguments).concat("\n"); + warnStream.write( util.format.apply(util, args) ); + } +} + + +Object.defineProperty(module.exports, 'isWin', { + get : function() { + return isWin; + } , + set : function(val) { + isWin = val; + } +}); + + +module.exports.warnTo = function(stream) { + var old = warnStream; + warnStream = stream; + return old; +}; + +module.exports.getFileName = function(rawEnv){ + var env = rawEnv || process.env; + var file = env.PGPASSFILE || ( + isWin ? + path.join( env.APPDATA || './' , 'postgresql', 'pgpass.conf' ) : + path.join( env.HOME || './', '.pgpass' ) + ); + return file; +}; + +module.exports.usePgPass = function(stats, fname) { + if (Object.prototype.hasOwnProperty.call(process.env, 'PGPASSWORD')) { + return false; + } + + if (isWin) { + return true; + } + + fname = fname || ''; + + if (! isRegFile(stats.mode)) { + warn('WARNING: password file "%s" is not a plain file', fname); + return false; + } + + if (stats.mode & (S_IRWXG | S_IRWXO)) { + /* If password file is insecure, alert the user and ignore it. */ + warn('WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less', fname); + return false; + } + + return true; +}; + + +var matcher = module.exports.match = function(connInfo, entry) { + return fieldNames.slice(0, -1).reduce(function(prev, field, idx){ + if (idx == 1) { + // the port + if ( Number( connInfo[field] || defaultPort ) === Number( entry[field] ) ) { + return prev && true; + } + } + return prev && ( + entry[field] === '*' || + entry[field] === connInfo[field] + ); + }, true); +}; + + +module.exports.getPassword = function(connInfo, stream, cb) { + var pass; + var lineStream = stream.pipe(split()); + + function onLine(line) { + var entry = parseLine(line); + if (entry && isValidEntry(entry) && matcher(connInfo, entry)) { + pass = entry[passKey]; + lineStream.end(); // -> calls onEnd(), but pass is set now + } + } + + var onEnd = function() { + stream.destroy(); + cb(pass); + }; + + var onErr = function(err) { + stream.destroy(); + warn('WARNING: error on reading file: %s', err); + cb(undefined); + }; + + stream.on('error', onErr); + lineStream + .on('data', onLine) + .on('end', onEnd) + .on('error', onErr) + ; + +}; + + +var parseLine = module.exports.parseLine = function(line) { + if (line.length < 11 || line.match(/^\s+#/)) { + return null; + } + + var curChar = ''; + var prevChar = ''; + var fieldIdx = 0; + var startIdx = 0; + var endIdx = 0; + var obj = {}; + var isLastField = false; + var addToObj = function(idx, i0, i1) { + var field = line.substring(i0, i1); + + if (! Object.hasOwnProperty.call(process.env, 'PGPASS_NO_DEESCAPE')) { + field = field.replace(/\\([:\\])/g, '$1'); + } + + obj[ fieldNames[idx] ] = field; + }; + + for (var i = 0 ; i < line.length-1 ; i += 1) { + curChar = line.charAt(i+1); + prevChar = line.charAt(i); + + isLastField = (fieldIdx == nrOfFields-1); + + if (isLastField) { + addToObj(fieldIdx, startIdx); + break; + } + + if (i >= 0 && curChar == ':' && prevChar !== '\\') { + addToObj(fieldIdx, startIdx, i+1); + + startIdx = i+2; + fieldIdx += 1; + } + } + + obj = ( Object.keys(obj).length === nrOfFields ) ? obj : null; + + return obj; +}; + + +var isValidEntry = module.exports.isValidEntry = function(entry){ + var rules = { + // host + 0 : function(x){ + return x.length > 0; + } , + // port + 1 : function(x){ + if (x === '*') { + return true; + } + x = Number(x); + return ( + isFinite(x) && + x > 0 && + x < 9007199254740992 && + Math.floor(x) === x + ); + } , + // database + 2 : function(x){ + return x.length > 0; + } , + // username + 3 : function(x){ + return x.length > 0; + } , + // password + 4 : function(x){ + return x.length > 0; + } + }; + + for (var idx = 0 ; idx < fieldNames.length ; idx += 1) { + var rule = rules[idx]; + var value = entry[ fieldNames[idx] ] || ''; + + var res = rule(value); + if (!res) { + return false; + } + } + + return true; +}; + diff --git a/node_modules/pgpass/lib/index.js b/node_modules/pgpass/lib/index.js new file mode 100644 index 0000000..ecfcf30 --- /dev/null +++ b/node_modules/pgpass/lib/index.js @@ -0,0 +1,23 @@ +'use strict'; + +var path = require('path') + , fs = require('fs') + , helper = require('./helper.js') +; + + +module.exports = function(connInfo, cb) { + var file = helper.getFileName(); + + fs.stat(file, function(err, stat){ + if (err || !helper.usePgPass(stat, file)) { + return cb(undefined); + } + + var st = fs.createReadStream(file); + + helper.getPassword(connInfo, st, cb); + }); +}; + +module.exports.warnTo = helper.warnTo; diff --git a/node_modules/pgpass/package.json b/node_modules/pgpass/package.json new file mode 100644 index 0000000..22bfe84 --- /dev/null +++ b/node_modules/pgpass/package.json @@ -0,0 +1,41 @@ +{ + "name": "pgpass", + "version": "1.0.5", + "description": "Module for reading .pgpass", + "main": "lib/index", + "scripts": { + "pretest": "chmod 600 ./test/_pgpass", + "_hint": "jshint --exclude node_modules --verbose lib test", + "_test": "mocha --recursive -R list", + "_covered_test": "nyc --reporter html --reporter text \"$npm_execpath\" run _test", + "test": "\"$npm_execpath\" run _hint && \"$npm_execpath\" run _covered_test" + }, + "author": "Hannes Hörl ", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + }, + "devDependencies": { + "jshint": "^2.12.0", + "mocha": "^8.2.0", + "nyc": "^15.1.0", + "pg": "^8.4.1", + "pg-escape": "^0.2.0", + "pg-native": "3.0.0", + "resumer": "0.0.0", + "tmp": "^0.2.1", + "which": "^2.0.2" + }, + "keywords": [ + "postgres", + "pg", + "pgpass", + "password", + "postgresql" + ], + "bugs": "https://github.com/hoegaarden/pgpass/issues", + "repository": { + "type": "git", + "url": "https://github.com/hoegaarden/pgpass.git" + } +} diff --git a/node_modules/picomatch/CHANGELOG.md b/node_modules/picomatch/CHANGELOG.md new file mode 100644 index 0000000..8ccc6c1 --- /dev/null +++ b/node_modules/picomatch/CHANGELOG.md @@ -0,0 +1,136 @@ +# Release history + +**All notable changes to this project will be documented in this file.** + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## 2.3.1 (2022-01-02) + +### Fixed + +* Fixes bug when a pattern containing an expression after the closing parenthesis (`/!(*.d).{ts,tsx}`) was incorrectly converted to regexp ([9f241ef](https://github.com/micromatch/picomatch/commit/9f241ef)). + +### Changed + +* Some documentation improvements ([f81d236](https://github.com/micromatch/picomatch/commit/f81d236), [421e0e7](https://github.com/micromatch/picomatch/commit/421e0e7)). + +## 2.3.0 (2021-05-21) + +### Fixed + +* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) + +## 2.2.3 (2021-04-10) + +### Fixed + +* Do not skip pattern seperator for square brackets ([fb08a30](https://github.com/micromatch/picomatch/commit/fb08a30)). +* Set negatedExtGlob also if it does not span the whole pattern ([032e3f5](https://github.com/micromatch/picomatch/commit/032e3f5)). + +## 2.2.2 (2020-03-21) + +### Fixed + +* Correctly handle parts of the pattern after parentheses in the `scan` method ([e15b920](https://github.com/micromatch/picomatch/commit/e15b920)). + +## 2.2.1 (2020-01-04) + +* Fixes [#49](https://github.com/micromatch/picomatch/issues/49), so that braces with no sets or ranges are now propertly treated as literals. + +## 2.2.0 (2020-01-04) + +* Disable fastpaths mode for the parse method ([5b8d33f](https://github.com/micromatch/picomatch/commit/5b8d33f)) +* Add `tokens`, `slashes`, and `parts` to the object returned by `picomatch.scan()`. + +## 2.1.0 (2019-10-31) + +* add benchmarks for scan ([4793b92](https://github.com/micromatch/picomatch/commit/4793b92)) +* Add eslint object-curly-spacing rule ([707c650](https://github.com/micromatch/picomatch/commit/707c650)) +* Add prefer-const eslint rule ([5c7501c](https://github.com/micromatch/picomatch/commit/5c7501c)) +* Add support for nonegate in scan API ([275c9b9](https://github.com/micromatch/picomatch/commit/275c9b9)) +* Change lets to consts. Move root import up. ([4840625](https://github.com/micromatch/picomatch/commit/4840625)) +* closes https://github.com/micromatch/picomatch/issues/21 ([766bcb0](https://github.com/micromatch/picomatch/commit/766bcb0)) +* Fix "Extglobs" table in readme ([eb19da8](https://github.com/micromatch/picomatch/commit/eb19da8)) +* fixes https://github.com/micromatch/picomatch/issues/20 ([9caca07](https://github.com/micromatch/picomatch/commit/9caca07)) +* fixes https://github.com/micromatch/picomatch/issues/26 ([fa58f45](https://github.com/micromatch/picomatch/commit/fa58f45)) +* Lint test ([d433a34](https://github.com/micromatch/picomatch/commit/d433a34)) +* lint unit tests ([0159b55](https://github.com/micromatch/picomatch/commit/0159b55)) +* Make scan work with noext ([6c02e03](https://github.com/micromatch/picomatch/commit/6c02e03)) +* minor linting ([c2a2b87](https://github.com/micromatch/picomatch/commit/c2a2b87)) +* minor parser improvements ([197671d](https://github.com/micromatch/picomatch/commit/197671d)) +* remove eslint since it... ([07876fa](https://github.com/micromatch/picomatch/commit/07876fa)) +* remove funding file ([8ebe96d](https://github.com/micromatch/picomatch/commit/8ebe96d)) +* Remove unused funks ([cbc6d54](https://github.com/micromatch/picomatch/commit/cbc6d54)) +* Run eslint during pretest, fix existing eslint findings ([0682367](https://github.com/micromatch/picomatch/commit/0682367)) +* support `noparen` in scan ([3d37569](https://github.com/micromatch/picomatch/commit/3d37569)) +* update changelog ([7b34e77](https://github.com/micromatch/picomatch/commit/7b34e77)) +* update travis ([777f038](https://github.com/micromatch/picomatch/commit/777f038)) +* Use eslint-disable-next-line instead of eslint-disable ([4e7c1fd](https://github.com/micromatch/picomatch/commit/4e7c1fd)) + +## 2.0.7 (2019-05-14) + +* 2.0.7 ([9eb9a71](https://github.com/micromatch/picomatch/commit/9eb9a71)) +* supports lookbehinds ([1f63f7e](https://github.com/micromatch/picomatch/commit/1f63f7e)) +* update .verb.md file with typo change ([2741279](https://github.com/micromatch/picomatch/commit/2741279)) +* fix: typo in README ([0753e44](https://github.com/micromatch/picomatch/commit/0753e44)) + +## 2.0.4 (2019-04-10) + +### Fixed + +- Readme link [fixed](https://github.com/micromatch/picomatch/pull/13/commits/a96ab3aa2b11b6861c23289964613d85563b05df) by @danez. +- `options.capture` now works as expected when fastpaths are enabled. See https://github.com/micromatch/picomatch/pull/12/commits/26aefd71f1cfaf95c37f1c1fcab68a693b037304. Thanks to @DrPizza. + +## 2.0.0 (2019-04-10) + +### Added + +- Adds support for `options.onIgnore`. See the readme for details +- Adds support for `options.onResult`. See the readme for details + +### Breaking changes + +- The unixify option was renamed to `windows` +- caching and all related options and methods have been removed + +## 1.0.0 (2018-11-05) + +- adds `.onMatch` option +- improvements to `.scan` method +- numerous improvements and optimizations for matching and parsing +- better windows path handling + +## 0.1.0 - 2017-04-13 + +First release. + + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE new file mode 100644 index 0000000..3608dca --- /dev/null +++ b/node_modules/picomatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/picomatch/README.md b/node_modules/picomatch/README.md new file mode 100644 index 0000000..b0526e2 --- /dev/null +++ b/node_modules/picomatch/README.md @@ -0,0 +1,708 @@ +

Picomatch

+ +

+ +version + + +test status + + +coverage status + + +downloads + +

+ +
+
+ +

+Blazing fast and accurate glob matcher written in JavaScript.
+No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. +

+ +
+
+ +## Why picomatch? + +* **Lightweight** - No dependencies +* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. +* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) +* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) +* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. +* **Well tested** - Thousands of unit tests + +See the [library comparison](#library-comparisons) to other libraries. + +
+
+ +## Table of Contents + +
Click to expand + +- [Install](#install) +- [Usage](#usage) +- [API](#api) + * [picomatch](#picomatch) + * [.test](#test) + * [.matchBase](#matchbase) + * [.isMatch](#ismatch) + * [.parse](#parse) + * [.scan](#scan) + * [.compileRe](#compilere) + * [.makeRe](#makere) + * [.toRegex](#toregex) +- [Options](#options) + * [Picomatch options](#picomatch-options) + * [Scan Options](#scan-options) + * [Options Examples](#options-examples) +- [Globbing features](#globbing-features) + * [Basic globbing](#basic-globbing) + * [Advanced globbing](#advanced-globbing) + * [Braces](#braces) + * [Matching special characters as literals](#matching-special-characters-as-literals) +- [Library Comparisons](#library-comparisons) +- [Benchmarks](#benchmarks) +- [Philosophies](#philosophies) +- [About](#about) + * [Author](#author) + * [License](#license) + +_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ + +
+ +
+
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +npm install --save picomatch +``` + +
+ +## Usage + +The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. + +```js +const pm = require('picomatch'); +const isMatch = pm('*.js'); + +console.log(isMatch('abcd')); //=> false +console.log(isMatch('a.js')); //=> true +console.log(isMatch('a.md')); //=> false +console.log(isMatch('a/b.js')); //=> false +``` + +
+ +## API + +### [picomatch](lib/picomatch.js#L32) + +Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. + +**Params** + +* `globs` **{String|Array}**: One or more glob patterns. +* `options` **{Object=}** +* `returns` **{Function=}**: Returns a matcher function. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch(glob[, options]); + +const isMatch = picomatch('*.!(*a)'); +console.log(isMatch('a.a')); //=> false +console.log(isMatch('a.b')); //=> true +``` + +### [.test](lib/picomatch.js#L117) + +Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. + +**Params** + +* `input` **{String}**: String to test. +* `regex` **{RegExp}** +* `returns` **{Object}**: Returns an object with matching info. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.test(input, regex[, options]); + +console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); +// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } +``` + +### [.matchBase](lib/picomatch.js#L161) + +Match the basename of a filepath. + +**Params** + +* `input` **{String}**: String to test. +* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). +* `returns` **{Boolean}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.matchBase(input, glob[, options]); +console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true +``` + +### [.isMatch](lib/picomatch.js#L183) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* **{String|Array}**: str The string to test. +* **{String|Array}**: patterns One or more glob patterns to use for matching. +* **{Object}**: See available [options](#options). +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.isMatch(string, patterns[, options]); + +console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true +console.log(picomatch.isMatch('a.a', 'b.*')); //=> false +``` + +### [.parse](lib/picomatch.js#L199) + +Parse a glob pattern to create the source string for a regular expression. + +**Params** + +* `pattern` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.parse(pattern[, options]); +``` + +### [.scan](lib/picomatch.js#L231) + +Scan a glob pattern to separate the pattern into segments. + +**Params** + +* `input` **{String}**: Glob pattern to scan. +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.scan(input[, options]); + +const result = picomatch.scan('!./foo/*.js'); +console.log(result); +{ prefix: '!./', + input: '!./foo/*.js', + start: 3, + base: 'foo', + glob: '*.js', + isBrace: false, + isBracket: false, + isGlob: true, + isExtglob: false, + isGlobstar: false, + negated: true } +``` + +### [.compileRe](lib/picomatch.js#L245) + +Compile a regular expression from the `state` object returned by the +[parse()](#parse) method. + +**Params** + +* `state` **{Object}** +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. +* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. +* `returns` **{RegExp}** + +### [.makeRe](lib/picomatch.js#L286) + +Create a regular expression from a parsed glob pattern. + +**Params** + +* `state` **{String}**: The object returned from the `.parse` method. +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. +* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +const picomatch = require('picomatch'); +const state = picomatch.parse('*.js'); +// picomatch.compileRe(state[, options]); + +console.log(picomatch.compileRe(state)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +### [.toRegex](lib/picomatch.js#L321) + +Create a regular expression from the given regex source string. + +**Params** + +* `source` **{String}**: Regular expression source string. +* `options` **{Object}** +* `returns` **{RegExp}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.toRegex(source[, options]); + +const { output } = picomatch.parse('*.js'); +console.log(picomatch.toRegex(output)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +
+ +## Options + +### Picomatch options + +The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | +| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | +| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | +| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | +| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | +| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | +| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | +| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | +| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | +| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | +| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | +| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | +| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | +| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | +| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | +| `matchBase` | `boolean` | `false` | Alias for `basename` | +| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | +| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | +| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | +| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | +| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | +| `noext` | `boolean` | `false` | Alias for `noextglob` | +| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | +| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | +| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | +| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | +| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | +| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | +| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | +| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | +| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | +| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | +| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | +| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | +| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | +| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | +| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | + +picomatch has automatic detection for regex positive and negative lookbehinds. If the pattern contains a negative lookbehind, you must be using Node.js >= 8.10 or else picomatch will throw an error. + +### Scan Options + +In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | +| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.scan('!./foo/*.js', { tokens: true }); +console.log(result); +// { +// prefix: '!./', +// input: '!./foo/*.js', +// start: 3, +// base: 'foo', +// glob: '*.js', +// isBrace: false, +// isBracket: false, +// isGlob: true, +// isExtglob: false, +// isGlobstar: false, +// negated: true, +// maxDepth: 2, +// tokens: [ +// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, +// { value: 'foo', depth: 1, isGlob: false }, +// { value: '*.js', depth: 1, isGlob: true } +// ], +// slashes: [ 2, 6 ], +// parts: [ 'foo', '*.js' ] +// } +``` + +
+ +### Options Examples + +#### options.expandRange + +**Type**: `function` + +**Default**: `undefined` + +Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. + +**Example** + +The following example shows how to create a glob that matches a folder + +```js +const fill = require('fill-range'); +const regex = pm.makeRe('foo/{01..25}/bar', { + expandRange(a, b) { + return `(${fill(a, b, { toRegex: true })})`; + } +}); + +console.log(regex); +//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ + +console.log(regex.test('foo/00/bar')) // false +console.log(regex.test('foo/01/bar')) // true +console.log(regex.test('foo/10/bar')) // true +console.log(regex.test('foo/22/bar')) // true +console.log(regex.test('foo/25/bar')) // true +console.log(regex.test('foo/26/bar')) // false +``` + +#### options.format + +**Type**: `function` + +**Default**: `undefined` + +Custom function for formatting strings before they're matched. + +**Example** + +```js +// strip leading './' from strings +const format = str => str.replace(/^\.\//, ''); +const isMatch = picomatch('foo/*.js', { format }); +console.log(isMatch('./foo/bar.js')); //=> true +``` + +#### options.onMatch + +```js +const onMatch = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onMatch }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onIgnore + +```js +const onIgnore = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onResult + +```js +const onResult = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onResult, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +
+
+ +## Globbing features + +* [Basic globbing](#basic-globbing) (Wildcard matching) +* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) + +### Basic globbing + +| **Character** | **Description** | +| --- | --- | +| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | +| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` on Windows) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | +| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | +| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | + +#### Matching behavior vs. Bash + +Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: + +* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. +* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. + +
+ +### Advanced globbing + +* [extglobs](#extglobs) +* [POSIX brackets](#posix-brackets) +* [Braces](#brace-expansion) + +#### Extglobs + +| **Pattern** | **Description** | +| --- | --- | +| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | +| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | +| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | +| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | +| `!(pattern)` | Match _anything but_ `pattern` | + +**Examples** + +```js +const pm = require('picomatch'); + +// *(pattern) matches ZERO or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// +(pattern) matches ONE or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// supports multiple extglobs +console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false + +// supports nested extglobs +console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true +``` + +#### POSIX brackets + +POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. + +**Enable POSIX bracket support** + +```js +console.log(pm.makeRe('[[:word:]]+', { posix: true })); +//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ +``` + +**Supported POSIX classes** + +The following named POSIX bracket expressions are supported: + +* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` +* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. +* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. +* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. +* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. +* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. +* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. +* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. +* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. +* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. +* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. +* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. +* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. +* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. + +See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. + +### Braces + +Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. + +### Matching special characters as literals + +If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: + +**Special Characters** + +Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. + +To match any of the following characters as literals: `$^*+?()[] + +Examples: + +```js +console.log(pm.makeRe('foo/bar \\(1\\)')); +console.log(pm.makeRe('foo/bar \\(1\\)')); +``` + +
+
+ +## Library Comparisons + +The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). + +| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | +| --- | --- | --- | --- | --- | --- | --- | --- | +| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | +| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | +| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | +| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | +| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | +| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | +| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | +| File system operations | - | - | - | - | - | - | - | + +
+
+ +## Benchmarks + +Performance comparison of picomatch and minimatch. + +``` +# .makeRe star + picomatch x 1,993,050 ops/sec ±0.51% (91 runs sampled) + minimatch x 627,206 ops/sec ±1.96% (87 runs sampled)) + +# .makeRe star; dot=true + picomatch x 1,436,640 ops/sec ±0.62% (91 runs sampled) + minimatch x 525,876 ops/sec ±0.60% (88 runs sampled) + +# .makeRe globstar + picomatch x 1,592,742 ops/sec ±0.42% (90 runs sampled) + minimatch x 962,043 ops/sec ±1.76% (91 runs sampled)d) + +# .makeRe globstars + picomatch x 1,615,199 ops/sec ±0.35% (94 runs sampled) + minimatch x 477,179 ops/sec ±1.33% (91 runs sampled) + +# .makeRe with leading star + picomatch x 1,220,856 ops/sec ±0.40% (92 runs sampled) + minimatch x 453,564 ops/sec ±1.43% (94 runs sampled) + +# .makeRe - basic braces + picomatch x 392,067 ops/sec ±0.70% (90 runs sampled) + minimatch x 99,532 ops/sec ±2.03% (87 runs sampled)) +``` + +
+
+ +## Philosophies + +The goal of this library is to be blazing fast, without compromising on accuracy. + +**Accuracy** + +The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. + +Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. + +**Performance** + +Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. + +
+
+ +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js new file mode 100644 index 0000000..d2f2bc5 --- /dev/null +++ b/node_modules/picomatch/index.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./lib/picomatch'); diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js new file mode 100644 index 0000000..a62ef38 --- /dev/null +++ b/node_modules/picomatch/lib/constants.js @@ -0,0 +1,179 @@ +'use strict'; + +const path = require('path'); +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + +/** + * Posix glob regex + */ + +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; + +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR +}; + +/** + * Windows glob regex + */ + +const WINDOWS_CHARS = { + ...POSIX_CHARS, + + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)` +}; + +/** + * POSIX Bracket Regex + */ + +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; + +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, + + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, + + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ + + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ + + CHAR_ASTERISK: 42, /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + + SEP: path.sep, + + /** + * Create EXTGLOB_CHARS + */ + + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, + + /** + * Create GLOB_CHARS + */ + + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js new file mode 100644 index 0000000..58269d0 --- /dev/null +++ b/node_modules/picomatch/lib/parse.js @@ -0,0 +1,1091 @@ +'use strict'; + +const constants = require('./constants'); +const utils = require('./utils'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; + +/** + * Helpers + */ + +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } + + args.sort(); + const value = `[${args.join('-')}]`; + + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; +}; + +/** + * Create the message for a syntax error + */ + +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(win32); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; + + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; + + input = utils.removePrefix(input, state); + len = input.length; + + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; + + /** + * Tokenizing helpers + */ + + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; + + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; + + const negate = () => { + let count = 1; + + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } + + if (count % 2 === 0) { + return false; + } + + state.negated = true; + state.start++; + return true; + }; + + const increment = type => { + state[type]++; + stack.push(type); + }; + + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } + + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } + + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } + + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; + + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; + + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; + + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; + + if (token.type === 'negate') { + let extglobStar = star; + + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } + + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } + + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } + + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; + + /** + * Fast paths + */ + + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; + + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } + + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } + + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } + + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); + + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } + + if (output === input && opts.contains === true) { + state.output = input; + return state; + } + + state.output = utils.wrapOutput(output, state, options); + return state; + } + + /** + * Tokenize input until we reach end-of-string + */ + + while (!eos()) { + value = advance(); + + if (value === '\u0000') { + continue; + } + + /** + * Escaped characters + */ + + if (value === '\\') { + const next = peek(); + + if (next === '/' && opts.bash !== true) { + continue; + } + + if (next === '.' || next === ';') { + continue; + } + + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } + + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; + + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } + + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } + + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } + + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ + + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; + + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); + + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } + + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } + + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } + + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } + + prev.value += value; + append({ value }); + continue; + } + + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ + + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } + + /** + * Double quotes + */ + + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } + + /** + * Parentheses + */ + + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } + + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } + + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } + + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } + + /** + * Square brackets + */ + + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + + value = `\\${value}`; + } else { + increment('brackets'); + } + + push({ type: 'bracket', value }); + continue; + } + + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + decrement('brackets'); + + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } + + prev.value += value; + append({ value }); + + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } + + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); + + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } + + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } + + /** + * Braces + */ + + if (value === '{' && opts.nobrace !== true) { + increment('braces'); + + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; + + braces.push(open); + push(open); + continue; + } + + if (value === '}') { + const brace = braces[braces.length - 1]; + + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } + + let output = ')'; + + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; + + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } + + output = expandRange(range, opts); + state.backtrack = true; + } + + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } + + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } + + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } + + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } + + push({ type: 'comma', value, output }); + continue; + } + + /** + * Slashes + */ + + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } + + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } + + /** + * Dots + */ + + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } + + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } + + /** + * Question marks + */ + + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } + + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); + } + + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } + + push({ type: 'text', value, output }); + continue; + } + + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } + + push({ type: 'qmark', value, output: QMARK }); + continue; + } + + /** + * Exclamation + */ + + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } + + /** + * Plus + */ + + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } + + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } + + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } + + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } + + /** + * Plain text + */ + + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Plain text + */ + + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } + + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Stars + */ + + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } + + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } + + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } + + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); + + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } + + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } + + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; + + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; + + state.output += prior.output + prev.output; + state.globstar = true; + + consume(value + advance()); + + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); + + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; + + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + + const token = { type: 'star', value, output: star }; + + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } + + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } + + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; + + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; + + } else { + state.output += nodot; + prev.output += nodot; + } + + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } + + push(token); + } + + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } + + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } + + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } + + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } + + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; + + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; + + if (token.suffix) { + state.output += token.suffix; + } + } + } + + return state; +}; + +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ + +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + input = REPLACEMENTS[input] || input; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(win32); + + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; + + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + + case '**': + return nodot + globstar(opts); + + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; + + const source = create(match[1]); + if (!source) return; + + return source + DOT_LITERAL + match[2]; + } + } + }; + + const output = utils.removePrefix(input, state); + let source = create(output); + + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } + + return source; +}; + +module.exports = parse; diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js new file mode 100644 index 0000000..782d809 --- /dev/null +++ b/node_modules/picomatch/lib/picomatch.js @@ -0,0 +1,342 @@ +'use strict'; + +const path = require('path'); +const scan = require('./scan'); +const parse = require('./parse'); +const utils = require('./utils'); +const constants = require('./constants'); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ + +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } + + const isState = isObject(glob) && glob.tokens && glob.input; + + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } + + const opts = options || {}; + const posix = utils.isWindows(options); + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); + + const state = regex.state; + delete regex.state; + + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } + + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; + + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } + + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } + + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } + + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; + + if (returnState) { + matcher.state = state; + } + + return matcher; +}; + +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ + +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } + + if (input === '') { + return { isMatch: false, output: '' }; + } + + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; + + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } + + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } + + return { isMatch: Boolean(match), match, output }; +}; + +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ + +picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(path.basename(input)); +}; + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ + +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; + +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +picomatch.scan = (input, options) => scan(input, options); + +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } + + let parsed = { negated: false, fastpaths: true }; + + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } + + if (!parsed.output) { + parsed = parse(input, options); + } + + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; + +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; + +/** + * Picomatch constants. + * @return {Object} + */ + +picomatch.constants = constants; + +/** + * Expose "picomatch" + */ + +module.exports = picomatch; diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js new file mode 100644 index 0000000..e59cd7a --- /dev/null +++ b/node_modules/picomatch/lib/scan.js @@ -0,0 +1,391 @@ +'use strict'; + +const utils = require('./utils'); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = require('./constants'); + +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; + +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; + +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ + +const scan = (input, options) => { + const opts = options || {}; + + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; + + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; + + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; + + while (index < length) { + code = advance(); + let next; + + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } + + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; + + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } + + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; + + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; + + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } + + lastIndex = index + 1; + continue; + } + + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } + + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js new file mode 100644 index 0000000..c3ca766 --- /dev/null +++ b/node_modules/picomatch/lib/utils.js @@ -0,0 +1,64 @@ +'use strict'; + +const path = require('path'); +const win32 = process.platform === 'win32'; +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = require('./constants'); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.supportsLookbehinds = () => { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + return false; +}; + +exports.isWindows = options => { + if (options && typeof options.windows === 'boolean') { + return options.windows; + } + return win32 === true || path.sep === '\\'; +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json new file mode 100644 index 0000000..3db22d4 --- /dev/null +++ b/node_modules/picomatch/package.json @@ -0,0 +1,81 @@ +{ + "name": "picomatch", + "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", + "version": "2.3.1", + "homepage": "https://github.com/micromatch/picomatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "funding": "https://github.com/sponsors/jonschlinkert", + "repository": "micromatch/picomatch", + "bugs": { + "url": "https://github.com/micromatch/picomatch/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8.6" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "devDependencies": { + "eslint": "^6.8.0", + "fill-range": "^7.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.2.2", + "nyc": "^15.0.0", + "time-require": "github:jonschlinkert/time-require" + }, + "keywords": [ + "glob", + "match", + "picomatch" + ], + "nyc": { + "reporter": [ + "html", + "lcov", + "text-summary" + ] + }, + "verb": { + "toc": { + "render": true, + "method": "preWrite", + "maxdepth": 3 + }, + "layout": "empty", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "braces", + "micromatch" + ] + }, + "reflinks": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "micromatch", + "minimatch", + "nanomatch", + "picomatch" + ] + } +} diff --git a/node_modules/postgres-array/index.d.ts b/node_modules/postgres-array/index.d.ts new file mode 100644 index 0000000..88665bd --- /dev/null +++ b/node_modules/postgres-array/index.d.ts @@ -0,0 +1,4 @@ + +export function parse(source: string): string[]; +export function parse(source: string, transform: (value: string) => T): T[]; + diff --git a/node_modules/postgres-array/index.js b/node_modules/postgres-array/index.js new file mode 100644 index 0000000..18bfd16 --- /dev/null +++ b/node_modules/postgres-array/index.js @@ -0,0 +1,97 @@ +'use strict' + +exports.parse = function (source, transform) { + return new ArrayParser(source, transform).parse() +} + +class ArrayParser { + constructor (source, transform) { + this.source = source + this.transform = transform || identity + this.position = 0 + this.entries = [] + this.recorded = [] + this.dimension = 0 + } + + isEof () { + return this.position >= this.source.length + } + + nextCharacter () { + var character = this.source[this.position++] + if (character === '\\') { + return { + value: this.source[this.position++], + escaped: true + } + } + return { + value: character, + escaped: false + } + } + + record (character) { + this.recorded.push(character) + } + + newEntry (includeEmpty) { + var entry + if (this.recorded.length > 0 || includeEmpty) { + entry = this.recorded.join('') + if (entry === 'NULL' && !includeEmpty) { + entry = null + } + if (entry !== null) entry = this.transform(entry) + this.entries.push(entry) + this.recorded = [] + } + } + + consumeDimensions () { + if (this.source[0] === '[') { + while (!this.isEof()) { + var char = this.nextCharacter() + if (char.value === '=') break + } + } + } + + parse (nested) { + var character, parser, quote + this.consumeDimensions() + while (!this.isEof()) { + character = this.nextCharacter() + if (character.value === '{' && !quote) { + this.dimension++ + if (this.dimension > 1) { + parser = new ArrayParser(this.source.substr(this.position - 1), this.transform) + this.entries.push(parser.parse(true)) + this.position += parser.position - 2 + } + } else if (character.value === '}' && !quote) { + this.dimension-- + if (!this.dimension) { + this.newEntry() + if (nested) return this.entries + } + } else if (character.value === '"' && !character.escaped) { + if (quote) this.newEntry(true) + quote = !quote + } else if (character.value === ',' && !quote) { + this.newEntry() + } else { + this.record(character.value) + } + } + if (this.dimension !== 0) { + throw new Error('array dimension not balanced') + } + return this.entries + } +} + +function identity (value) { + return value +} diff --git a/node_modules/postgres-array/license b/node_modules/postgres-array/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/node_modules/postgres-array/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-array/package.json b/node_modules/postgres-array/package.json new file mode 100644 index 0000000..d6aa94e --- /dev/null +++ b/node_modules/postgres-array/package.json @@ -0,0 +1,35 @@ +{ + "name": "postgres-array", + "main": "index.js", + "version": "2.0.0", + "description": "Parse postgres array columns", + "license": "MIT", + "repository": "bendrucker/postgres-array", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "types": "index.d.ts", + "keywords": [ + "postgres", + "array", + "parser" + ], + "dependencies": {}, + "devDependencies": { + "standard": "^12.0.1", + "tape": "^4.0.0" + }, + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ] +} diff --git a/node_modules/postgres-array/readme.md b/node_modules/postgres-array/readme.md new file mode 100644 index 0000000..b74b369 --- /dev/null +++ b/node_modules/postgres-array/readme.md @@ -0,0 +1,43 @@ +# postgres-array [![Build Status](https://travis-ci.org/bendrucker/postgres-array.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-array) + +> Parse postgres array columns + + +## Install + +``` +$ npm install --save postgres-array +``` + + +## Usage + +```js +var postgresArray = require('postgres-array') + +postgresArray.parse('{1,2,3}', (value) => parseInt(value, 10)) +//=> [1, 2, 3] +``` + +## API + +#### `parse(input, [transform])` -> `array` + +##### input + +*Required* +Type: `string` + +A Postgres array string. + +##### transform + +Type: `function` +Default: `identity` + +A function that transforms non-null values inserted into the array. + + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/postgres-bytea/index.js b/node_modules/postgres-bytea/index.js new file mode 100644 index 0000000..d1107a0 --- /dev/null +++ b/node_modules/postgres-bytea/index.js @@ -0,0 +1,31 @@ +'use strict' + +module.exports = function parseBytea (input) { + if (/^\\x/.test(input)) { + // new 'hex' style response (pg >9.0) + return new Buffer(input.substr(2), 'hex') + } + var output = '' + var i = 0 + while (i < input.length) { + if (input[i] !== '\\') { + output += input[i] + ++i + } else { + if (/[0-7]{3}/.test(input.substr(i + 1, 3))) { + output += String.fromCharCode(parseInt(input.substr(i + 1, 3), 8)) + i += 4 + } else { + var backslashes = 1 + while (i + backslashes < input.length && input[i + backslashes] === '\\') { + backslashes++ + } + for (var k = 0; k < Math.floor(backslashes / 2); ++k) { + output += '\\' + } + i += Math.floor(backslashes / 2) * 2 + } + } + } + return new Buffer(output, 'binary') +} diff --git a/node_modules/postgres-bytea/license b/node_modules/postgres-bytea/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/node_modules/postgres-bytea/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-bytea/package.json b/node_modules/postgres-bytea/package.json new file mode 100644 index 0000000..cac1741 --- /dev/null +++ b/node_modules/postgres-bytea/package.json @@ -0,0 +1,34 @@ +{ + "name": "postgres-bytea", + "main": "index.js", + "version": "1.0.0", + "description": "Postgres bytea parser", + "license": "MIT", + "repository": "bendrucker/postgres-bytea", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "keywords": [ + "bytea", + "postgres", + "binary", + "parser" + ], + "dependencies": {}, + "devDependencies": { + "tape": "^4.0.0", + "standard": "^4.0.0" + }, + "files": [ + "index.js", + "readme.md" + ] +} diff --git a/node_modules/postgres-bytea/readme.md b/node_modules/postgres-bytea/readme.md new file mode 100644 index 0000000..4939c3b --- /dev/null +++ b/node_modules/postgres-bytea/readme.md @@ -0,0 +1,34 @@ +# postgres-bytea [![Build Status](https://travis-ci.org/bendrucker/postgres-bytea.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-bytea) + +> Postgres bytea parser + + +## Install + +``` +$ npm install --save postgres-bytea +``` + + +## Usage + +```js +var bytea = require('postgres-bytea'); +bytea('\\000\\100\\200') +//=> buffer +``` + +## API + +#### `bytea(input)` -> `buffer` + +##### input + +*Required* +Type: `string` + +A Postgres bytea binary string. + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/postgres-date/index.js b/node_modules/postgres-date/index.js new file mode 100644 index 0000000..5dc73fb --- /dev/null +++ b/node_modules/postgres-date/index.js @@ -0,0 +1,116 @@ +'use strict' + +var DATE_TIME = /(\d{1,})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?.*?( BC)?$/ +var DATE = /^(\d{1,})-(\d{2})-(\d{2})( BC)?$/ +var TIME_ZONE = /([Z+-])(\d{2})?:?(\d{2})?:?(\d{2})?/ +var INFINITY = /^-?infinity$/ + +module.exports = function parseDate (isoDate) { + if (INFINITY.test(isoDate)) { + // Capitalize to Infinity before passing to Number + return Number(isoDate.replace('i', 'I')) + } + var matches = DATE_TIME.exec(isoDate) + + if (!matches) { + // Force YYYY-MM-DD dates to be parsed as local time + return getDate(isoDate) || null + } + + var isBC = !!matches[8] + var year = parseInt(matches[1], 10) + if (isBC) { + year = bcYearToNegativeYear(year) + } + + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + var hour = parseInt(matches[4], 10) + var minute = parseInt(matches[5], 10) + var second = parseInt(matches[6], 10) + + var ms = matches[7] + ms = ms ? 1000 * parseFloat(ms) : 0 + + var date + var offset = timeZoneOffset(isoDate) + if (offset != null) { + date = new Date(Date.UTC(year, month, day, hour, minute, second, ms)) + + // Account for years from 0 to 99 being interpreted as 1900-1999 + // by Date.UTC / the multi-argument form of the Date constructor + if (is0To99(year)) { + date.setUTCFullYear(year) + } + + if (offset !== 0) { + date.setTime(date.getTime() - offset) + } + } else { + date = new Date(year, month, day, hour, minute, second, ms) + + if (is0To99(year)) { + date.setFullYear(year) + } + } + + return date +} + +function getDate (isoDate) { + var matches = DATE.exec(isoDate) + if (!matches) { + return + } + + var year = parseInt(matches[1], 10) + var isBC = !!matches[4] + if (isBC) { + year = bcYearToNegativeYear(year) + } + + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + // YYYY-MM-DD will be parsed as local time + var date = new Date(year, month, day) + + if (is0To99(year)) { + date.setFullYear(year) + } + + return date +} + +// match timezones: +// Z (UTC) +// -05 +// +06:30 +function timeZoneOffset (isoDate) { + if (isoDate.endsWith('+00')) { + return 0 + } + + var zone = TIME_ZONE.exec(isoDate.split(' ')[1]) + if (!zone) return + var type = zone[1] + + if (type === 'Z') { + return 0 + } + var sign = type === '-' ? -1 : 1 + var offset = parseInt(zone[2], 10) * 3600 + + parseInt(zone[3] || 0, 10) * 60 + + parseInt(zone[4] || 0, 10) + + return offset * sign * 1000 +} + +function bcYearToNegativeYear (year) { + // Account for numerical difference between representations of BC years + // See: https://github.com/bendrucker/postgres-date/issues/5 + return -(year - 1) +} + +function is0To99 (num) { + return num >= 0 && num < 100 +} diff --git a/node_modules/postgres-date/license b/node_modules/postgres-date/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/node_modules/postgres-date/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-date/package.json b/node_modules/postgres-date/package.json new file mode 100644 index 0000000..6fddec7 --- /dev/null +++ b/node_modules/postgres-date/package.json @@ -0,0 +1,33 @@ +{ + "name": "postgres-date", + "main": "index.js", + "version": "1.0.7", + "description": "Postgres date column parser", + "license": "MIT", + "repository": "bendrucker/postgres-date", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "keywords": [ + "postgres", + "date", + "parser" + ], + "dependencies": {}, + "devDependencies": { + "standard": "^14.0.0", + "tape": "^5.0.0" + }, + "files": [ + "index.js", + "readme.md" + ] +} diff --git a/node_modules/postgres-date/readme.md b/node_modules/postgres-date/readme.md new file mode 100644 index 0000000..095431a --- /dev/null +++ b/node_modules/postgres-date/readme.md @@ -0,0 +1,49 @@ +# postgres-date [![Build Status](https://travis-ci.org/bendrucker/postgres-date.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-date) [![Greenkeeper badge](https://badges.greenkeeper.io/bendrucker/postgres-date.svg)](https://greenkeeper.io/) + +> Postgres date output parser + +This package parses [date/time outputs](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME-OUTPUT) from Postgres into Javascript `Date` objects. Its goal is to match Postgres behavior and preserve data accuracy. + +If you find a case where a valid Postgres output results in incorrect parsing (including loss of precision), please [create a pull request](https://github.com/bendrucker/postgres-date/compare) and provide a failing test. + +**Supported Postgres Versions:** `>= 9.6` + +All prior versions of Postgres are likely compatible but not officially supported. + +## Install + +``` +$ npm install --save postgres-date +``` + + +## Usage + +```js +var parse = require('postgres-date') +parse('2011-01-23 22:15:51Z') +// => 2011-01-23T22:15:51.000Z +``` + +## API + +#### `parse(isoDate)` -> `date` + +##### isoDate + +*Required* +Type: `string` + +A date string from Postgres. + +## Releases + +The following semantic versioning increments will be used for changes: + +* **Major**: Removal of support for Node.js versions or Postgres versions (not expected) +* **Minor**: Unused, since Postgres returns dates in standard ISO 8601 format +* **Patch**: Any fix for parsing behavior + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/postgres-interval/index.d.ts b/node_modules/postgres-interval/index.d.ts new file mode 100644 index 0000000..f82b4c3 --- /dev/null +++ b/node_modules/postgres-interval/index.d.ts @@ -0,0 +1,20 @@ +declare namespace PostgresInterval { + export interface IPostgresInterval { + years?: number; + months?: number; + days?: number; + hours?: number; + minutes?: number; + seconds?: number; + milliseconds?: number; + + toPostgres(): string; + + toISO(): string; + toISOString(): string; + } +} + +declare function PostgresInterval(raw: string): PostgresInterval.IPostgresInterval; + +export = PostgresInterval; diff --git a/node_modules/postgres-interval/index.js b/node_modules/postgres-interval/index.js new file mode 100644 index 0000000..8ecca80 --- /dev/null +++ b/node_modules/postgres-interval/index.js @@ -0,0 +1,125 @@ +'use strict' + +var extend = require('xtend/mutable') + +module.exports = PostgresInterval + +function PostgresInterval (raw) { + if (!(this instanceof PostgresInterval)) { + return new PostgresInterval(raw) + } + extend(this, parse(raw)) +} +var properties = ['seconds', 'minutes', 'hours', 'days', 'months', 'years'] +PostgresInterval.prototype.toPostgres = function () { + var filtered = properties.filter(this.hasOwnProperty, this) + + // In addition to `properties`, we need to account for fractions of seconds. + if (this.milliseconds && filtered.indexOf('seconds') < 0) { + filtered.push('seconds') + } + + if (filtered.length === 0) return '0' + return filtered + .map(function (property) { + var value = this[property] || 0 + + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/\.?0+$/, '') + } + + return value + ' ' + property + }, this) + .join(' ') +} + +var propertiesISOEquivalent = { + years: 'Y', + months: 'M', + days: 'D', + hours: 'H', + minutes: 'M', + seconds: 'S' +} +var dateProperties = ['years', 'months', 'days'] +var timeProperties = ['hours', 'minutes', 'seconds'] +// according to ISO 8601 +PostgresInterval.prototype.toISOString = PostgresInterval.prototype.toISO = function () { + var datePart = dateProperties + .map(buildProperty, this) + .join('') + + var timePart = timeProperties + .map(buildProperty, this) + .join('') + + return 'P' + datePart + 'T' + timePart + + function buildProperty (property) { + var value = this[property] || 0 + + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/0+$/, '') + } + + return value + propertiesISOEquivalent[property] + } +} + +var NUMBER = '([+-]?\\d+)' +var YEAR = NUMBER + '\\s+years?' +var MONTH = NUMBER + '\\s+mons?' +var DAY = NUMBER + '\\s+days?' +var TIME = '([+-])?([\\d]*):(\\d\\d):(\\d\\d)\\.?(\\d{1,6})?' +var INTERVAL = new RegExp([YEAR, MONTH, DAY, TIME].map(function (regexString) { + return '(' + regexString + ')?' +}) + .join('\\s*')) + +// Positions of values in regex match +var positions = { + years: 2, + months: 4, + days: 6, + hours: 9, + minutes: 10, + seconds: 11, + milliseconds: 12 +} +// We can use negative time +var negatives = ['hours', 'minutes', 'seconds', 'milliseconds'] + +function parseMilliseconds (fraction) { + // add omitted zeroes + var microseconds = fraction + '000000'.slice(fraction.length) + return parseInt(microseconds, 10) / 1000 +} + +function parse (interval) { + if (!interval) return {} + var matches = INTERVAL.exec(interval) + var isNegative = matches[8] === '-' + return Object.keys(positions) + .reduce(function (parsed, property) { + var position = positions[property] + var value = matches[position] + // no empty string + if (!value) return parsed + // milliseconds are actually microseconds (up to 6 digits) + // with omitted trailing zeroes. + value = property === 'milliseconds' + ? parseMilliseconds(value) + : parseInt(value, 10) + // no zeros + if (!value) return parsed + if (isNegative && ~negatives.indexOf(property)) { + value *= -1 + } + parsed[property] = value + return parsed + }, {}) +} diff --git a/node_modules/postgres-interval/license b/node_modules/postgres-interval/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/node_modules/postgres-interval/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-interval/package.json b/node_modules/postgres-interval/package.json new file mode 100644 index 0000000..95520a0 --- /dev/null +++ b/node_modules/postgres-interval/package.json @@ -0,0 +1,36 @@ +{ + "name": "postgres-interval", + "main": "index.js", + "version": "1.2.0", + "description": "Parse Postgres interval columns", + "license": "MIT", + "repository": "bendrucker/postgres-interval", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "keywords": [ + "postgres", + "interval", + "parser" + ], + "dependencies": { + "xtend": "^4.0.0" + }, + "devDependencies": { + "tape": "^4.0.0", + "standard": "^12.0.1" + }, + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ] +} diff --git a/node_modules/postgres-interval/readme.md b/node_modules/postgres-interval/readme.md new file mode 100644 index 0000000..53cda4a --- /dev/null +++ b/node_modules/postgres-interval/readme.md @@ -0,0 +1,48 @@ +# postgres-interval [![Build Status](https://travis-ci.org/bendrucker/postgres-interval.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-interval) [![Greenkeeper badge](https://badges.greenkeeper.io/bendrucker/postgres-interval.svg)](https://greenkeeper.io/) + +> Parse Postgres interval columns + + +## Install + +``` +$ npm install --save postgres-interval +``` + + +## Usage + +```js +var parse = require('postgres-interval') +var interval = parse('01:02:03') +//=> {hours: 1, minutes: 2, seconds: 3} +interval.toPostgres() +// 3 seconds 2 minutes 1 hours +interval.toISO() +// P0Y0M0DT1H2M3S +``` + +## API + +#### `parse(pgInterval)` -> `interval` + +##### pgInterval + +*Required* +Type: `string` + +A Postgres interval string. + +#### `interval.toPostgres()` -> `string` + +Returns an interval string. This allows the interval object to be passed into prepared statements. + +#### `interval.toISOString()` -> `string` + +Returns an [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) compliant string. + +Also available as `interval.toISO()` for backwards compatibility. + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/pstree.remy/.travis.yml b/node_modules/pstree.remy/.travis.yml new file mode 100644 index 0000000..5bf093e --- /dev/null +++ b/node_modules/pstree.remy/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +cache: + directories: + - ~/.npm +notifications: + email: false +node_js: + - '8' diff --git a/node_modules/pstree.remy/LICENSE b/node_modules/pstree.remy/LICENSE new file mode 100644 index 0000000..e83bea6 --- /dev/null +++ b/node_modules/pstree.remy/LICENSE @@ -0,0 +1,7 @@ +The MIT License (MIT) +Copyright © 2019 Remy Sharp, https://remysharp.com +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pstree.remy/README.md b/node_modules/pstree.remy/README.md new file mode 100644 index 0000000..5f44c62 --- /dev/null +++ b/node_modules/pstree.remy/README.md @@ -0,0 +1,26 @@ +# pstree.remy + +> Cross platform ps-tree (including unix flavours without ps) + +## Installation + +```shel +npm install pstree.remy +``` + +## Usage + +```js +const psTree = psTree require('pstree.remy'); + +psTree(PID, (err, pids) => { + if (err) { + console.error(err); + } + console.log(pids) +}); + +console.log(psTree.hasPS + ? "This platform has the ps shell command" + : "This platform does not have the ps shell command"); +``` diff --git a/node_modules/pstree.remy/lib/index.js b/node_modules/pstree.remy/lib/index.js new file mode 100644 index 0000000..743e997 --- /dev/null +++ b/node_modules/pstree.remy/lib/index.js @@ -0,0 +1,37 @@ +const exec = require('child_process').exec; +const tree = require('./tree'); +const utils = require('./utils'); +var hasPS = true; + +// discover if the OS has `ps`, and therefore can use psTree +exec('ps', (error) => { + module.exports.hasPS = hasPS = !error; +}); + +module.exports = function main(pid, callback) { + if (typeof pid === 'number') { + pid = pid.toString(); + } + + if (hasPS && !process.env.NO_PS) { + return tree(pid, callback); + } + + utils + .getStat() + .then(utils.tree) + .then((tree) => utils.pidsForTree(tree, pid)) + .then((res) => + callback( + null, + res.map((p) => p.PID) + ) + ) + .catch((error) => callback(error)); +}; + +if (!module.parent) { + module.exports(process.argv[2], (e, pids) => console.log(pids)); +} + +module.exports.hasPS = hasPS; diff --git a/node_modules/pstree.remy/lib/tree.js b/node_modules/pstree.remy/lib/tree.js new file mode 100644 index 0000000..bac7cce --- /dev/null +++ b/node_modules/pstree.remy/lib/tree.js @@ -0,0 +1,37 @@ +const spawn = require('child_process').spawn; + +module.exports = function (rootPid, callback) { + const pidsOfInterest = new Set([parseInt(rootPid, 10)]); + var output = ''; + + // *nix + const ps = spawn('ps', ['-A', '-o', 'ppid,pid']); + ps.stdout.on('data', (data) => { + output += data.toString('ascii'); + }); + + ps.on('close', () => { + try { + const res = output + .split('\n') + .slice(1) + .map((_) => _.trim()) + .reduce((acc, line) => { + const pids = line.split(/\s+/); + const ppid = parseInt(pids[0], 10); + + if (pidsOfInterest.has(ppid)) { + const pid = parseInt(pids[1], 10); + acc.push(pid); + pidsOfInterest.add(pid); + } + + return acc; + }, []); + + callback(null, res); + } catch (e) { + callback(e, null); + } + }); +}; diff --git a/node_modules/pstree.remy/lib/utils.js b/node_modules/pstree.remy/lib/utils.js new file mode 100644 index 0000000..8fa5719 --- /dev/null +++ b/node_modules/pstree.remy/lib/utils.js @@ -0,0 +1,53 @@ +const spawn = require('child_process').spawn; + +module.exports = { tree, pidsForTree, getStat }; + +function getStat() { + return new Promise((resolve) => { + const command = `ls /proc | grep -E '^[0-9]+$' | xargs -I{} cat /proc/{}/stat`; + const spawned = spawn('sh', ['-c', command], { + stdio: ['pipe', 'pipe', 'pipe'], + }); + + var res = ''; + spawned.stdout.on('data', (data) => (res += data)); + spawned.on('close', () => resolve(res)); + }); +} + +function template(s) { + var stat = null; + // 'pid', 'comm', 'state', 'ppid', 'pgrp' + // %d (%s) %c %d %d + s.replace( + /(\d+) \((.*?)\)\s(.+?)\s(\d+)\s/g, + (all, PID, COMMAND, STAT, PPID) => { + stat = { PID, COMMAND, PPID, STAT }; + } + ); + + return stat; +} + +function tree(stats) { + const processes = stats.split('\n').map(template).filter(Boolean); + + return processes; +} + +function pidsForTree(tree, pid) { + if (typeof pid === 'number') { + pid = pid.toString(); + } + const parents = [pid]; + const pids = []; + + tree.forEach((proc) => { + if (parents.indexOf(proc.PPID) !== -1) { + parents.push(proc.PID); + pids.push(proc); + } + }); + + return pids; +} diff --git a/node_modules/pstree.remy/package.json b/node_modules/pstree.remy/package.json new file mode 100644 index 0000000..35c7068 --- /dev/null +++ b/node_modules/pstree.remy/package.json @@ -0,0 +1,33 @@ +{ + "name": "pstree.remy", + "version": "1.1.8", + "main": "lib/index.js", + "prettier": { + "trailingComma": "es5", + "semi": true, + "singleQuote": true + }, + "scripts": { + "test": "tap tests/*.test.js", + "_prepublish": "npm test" + }, + "keywords": [ + "ps", + "pstree", + "ps tree" + ], + "author": "Remy Sharp", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/remy/pstree.git" + }, + "devDependencies": { + "tap": "^11.0.0" + }, + "directories": { + "test": "tests" + }, + "dependencies": {}, + "description": "Collects the full tree of processes from /proc" +} diff --git a/node_modules/pstree.remy/tests/fixtures/index.js b/node_modules/pstree.remy/tests/fixtures/index.js new file mode 100644 index 0000000..4cdbcb1 --- /dev/null +++ b/node_modules/pstree.remy/tests/fixtures/index.js @@ -0,0 +1,13 @@ +const spawn = require('child_process').spawn; +function run() { + spawn( + 'sh', + ['-c', 'node -e "setInterval(() => console.log(`running`), 200)"'], + { + stdio: 'pipe', + } + ); +} + +var runCallCount = process.argv[2] || 1; +for (var i = 0; i < runCallCount; i++) run(); diff --git a/node_modules/pstree.remy/tests/fixtures/out1 b/node_modules/pstree.remy/tests/fixtures/out1 new file mode 100644 index 0000000..abfe581 --- /dev/null +++ b/node_modules/pstree.remy/tests/fixtures/out1 @@ -0,0 +1,10 @@ +1 (npm) S 0 1 1 34816 1 4210944 11112 0 0 0 45 8 0 0 20 0 10 0 330296 1089871872 11809 18446744073709551615 4194304 29343848 140726436642896 0 0 0 0 4096 2072112895 0 0 0 17 0 0 0 0 0 0 31441000 31537208 37314560 140726436650815 140726436650847 140726436650847 140726436650986 0 +15 (sh) S 1 1 1 34816 1 4210688 115 0 0 0 0 0 0 0 20 0 1 0 330372 4399104 187 18446744073709551615 94374393548800 94374393655428 140722913272992 0 0 0 0 0 65538 0 0 0 17 0 0 0 0 0 0 94374395756424 94374395761184 94374404673536 140722913278928 140722913278959 140722913278959 140722913284080 0 +16 (node) S 15 1 1 34816 1 4210688 6930 103 0 0 32 2 0 0 20 0 10 0 330373 1068478464 8412 18446744073709551615 4194304 29343848 140727228046064 0 0 0 0 4096 134300162 0 0 0 17 1 0 0 1 0 0 31441000 31537208 52584448 140727228050313 140727228050383 140727228050383 140727228055530 0 +27 (sh) S 16 1 1 34816 1 4210688 111 0 0 0 0 0 0 0 20 0 1 0 330410 4399104 193 18446744073709551615 94848235986944 94848236093572 140727019991184 0 0 0 0 0 65538 0 0 0 17 1 0 0 0 0 0 94848238194568 94848238199328 94848261660672 140727019998122 140727019998165 140727019998165 140727020003312 0 +28 (node) S 27 1 1 34816 1 4210688 3576 268 0 0 12 2 0 0 20 0 10 0 330411 930213888 6760 18446744073709551615 4194304 29343848 140726559664992 0 0 0 0 4096 134300162 0 0 0 17 1 0 0 0 0 0 31441000 31537208 32591872 140726559669117 140726559669199 140726559669199 140726559674346 0 +39 (node) S 28 1 1 34816 1 4210688 47517 0 0 0 151 9 0 0 20 0 6 0 330427 985739264 31859 18446744073709551615 4194304 29343848 140737324503920 0 0 0 0 4096 134234626 0 0 0 17 0 0 0 0 0 0 31441000 31537208 51585024 140737324510060 140737324510159 140737324510159 140737324515306 0 +45 (bash) S 0 45 45 34817 50 4210944 752 256 0 0 2 0 0 0 20 0 1 0 331039 18628608 789 18446744073709551615 4194304 5242124 140724425887696 0 0 0 65536 3670020 1266777851 0 0 0 17 1 0 0 0 0 0 7341384 7388228 30310400 140724425891678 140724425891683 140724425891683 140724425891822 0 +cat: /proc/50/stat: No such file or directory +cat: /proc/51/stat: No such file or directory +52 (xargs) S 45 50 45 34817 50 4210688 179 661 0 0 0 0 0 0 20 0 1 0 331544 4608000 346 18446744073709551615 94587588550656 94587588614028 140735223856048 0 0 0 0 0 2560 0 0 0 17 1 0 0 0 0 0 94587590711464 94587590713504 94587603169280 140735223861006 140735223861035 140735223861035 140735223861225 0 diff --git a/node_modules/pstree.remy/tests/fixtures/out2 b/node_modules/pstree.remy/tests/fixtures/out2 new file mode 100644 index 0000000..3b31137 --- /dev/null +++ b/node_modules/pstree.remy/tests/fixtures/out2 @@ -0,0 +1,29 @@ +cat: /proc/4087/stat: No such file or directory +cat: /proc/4088/stat: No such file or directory +1 (init) S 0 1 1 0 -1 4210944 9227 55994 29 319 7 5 68 16 20 0 1 0 1286281 33660928 855 18446744073709551615 1 1 0 0 0 0 0 4096 536962595 0 0 0 17 4 0 0 3 0 0 0 0 0 0 0 0 0 0 +1032 (ntpd) S 1 1032 1032 0 -1 4211008 178 0 1 0 0 0 0 0 20 0 1 0 1287033 25743360 1058 18446744073709551615 1 1 0 0 0 0 0 4096 27207 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +126 (irqbalance) S 1 126 126 0 -1 1077952832 1217 0 0 0 1 6 0 0 20 0 1 0 1286749 20189184 647 18446744073709551615 1 1 0 0 0 0 0 0 3 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +181 (mysqld) S 1 181 181 0 -1 4210944 6399 0 46 0 8 6 0 0 20 0 22 0 1286761 748453888 14476 18446744073709551615 1 1 0 0 0 0 552967 4096 26345 0 0 0 17 4 0 0 10 0 0 0 0 0 0 0 0 0 0 +194 (memcached) S 1 187 187 0 -1 4210944 252 0 4 0 0 0 0 0 20 0 6 0 1286766 333221888 648 18446744073709551615 1 1 0 0 0 0 0 4096 2 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +243 (dbus-daemon) S 1 243 243 0 -1 4211008 67 0 0 0 0 0 0 0 20 0 1 0 1286779 40087552 598 18446744073709551615 1 1 0 0 0 0 0 0 16385 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +254 (rsyslogd) S 1 254 254 0 -1 4211008 107 0 0 0 2 2 0 0 20 0 3 0 1286782 186601472 696 18446744073709551615 1 1 0 0 0 0 0 16781830 1133601 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +265 (systemd-logind) S 1 265 265 0 -1 4210944 276 0 2 0 0 0 0 0 20 0 1 0 1286786 35880960 720 18446744073709551615 1 1 0 0 0 0 0 0 0 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +333 (postgres) S 1 303 303 0 -1 4210688 3169 3466 15 18 0 1 1 1 20 0 1 0 1286817 156073984 5002 18446744073709551615 1 1 0 0 0 0 0 19935232 84487 0 0 0 17 5 0 0 1 0 0 0 0 0 0 0 0 0 0 +359 (postgres) S 333 359 359 0 -1 4210752 90 0 0 0 0 0 0 0 20 0 1 0 1286822 156073984 827 18446744073709551615 1 1 0 0 0 0 0 16805888 2567 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +360 (postgres) S 333 360 360 0 -1 4210752 119 0 0 0 0 0 0 0 20 0 1 0 1286822 156073984 827 18446744073709551615 1 1 0 0 0 0 0 16791554 16901 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +361 (postgres) S 333 361 361 0 -1 4210752 87 0 0 0 0 0 0 0 20 0 1 0 1286822 156073984 827 18446744073709551615 1 1 0 0 0 0 0 16791552 16903 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +362 (postgres) S 333 362 362 0 -1 4210752 292 0 3 0 0 0 0 0 20 0 1 0 1286822 156930048 1373 18446744073709551615 1 1 0 0 0 0 0 19927040 27271 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +363 (postgres) S 333 363 363 0 -1 4210752 82 0 0 0 0 0 0 0 20 0 1 0 1286822 115924992 887 18446744073709551615 1 1 0 0 0 0 0 16808450 5 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +4050 (npm) S 50 50 50 34817 50 4210688 5109 0 0 0 36 3 0 0 20 0 10 0 1292968 738025472 10051 18446744073709551615 4194304 33165900 140723623956256 0 0 0 0 4096 134300162 0 0 0 17 4 0 0 0 0 0 35263056 35370992 48369664 140723623964237 140723623964294 140723623964294 140723623968712 0 +4060 (sh) S 4050 50 50 34817 50 4210688 121 0 0 0 0 0 0 0 20 0 1 0 1293007 4579328 174 18446744073709551615 94347643936768 94347644049516 140735136055088 0 0 0 0 0 65538 1 0 0 17 5 0 0 0 0 0 94347646148008 94347646153216 94347660038144 140735136063095 140735136063129 140735136063129 140735136071664 0 +4061 (node) S 4060 50 50 34817 50 4210688 6501 0 0 0 42 2 0 0 20 0 6 0 1293008 705769472 10211 18446744073709551615 4194304 33165900 140730532686288 0 0 0 0 4096 2072111671 0 0 0 17 5 0 0 0 0 0 35263056 35370992 45867008 140730532695579 140730532695657 140730532695657 140730532704200 0 +4067 (node) S 4061 50 50 34817 50 4210688 6746 221 0 0 38 3 0 0 20 0 10 0 1293051 738910208 10527 18446744073709551615 4194304 33165900 140724824971632 0 0 0 0 4096 2072111671 0 0 0 17 4 0 0 0 0 0 35263056 35370992 68595712 140724824980995 140724824981063 140724824981063 140724824989640 0 +4079 (sh) S 4067 50 50 34817 50 4210688 118 0 0 0 0 0 0 0 20 0 1 0 1293092 4579328 194 18446744073709551615 94573702131712 94573702244460 140724712357120 0 0 0 0 0 65538 1 0 0 17 4 0 0 0 0 0 94573704342952 94573704348160 94573718511616 140724712361487 140724712361583 140724712361583 140724712370160 0 +4080 (node) S 4079 50 50 34817 50 4210688 2428 0 0 0 8 1 0 0 20 0 6 0 1293093 693059584 7251 18446744073709551615 4194304 33165900 140726023392816 0 0 0 0 4096 134234626 0 0 0 17 5 0 0 0 0 0 35263056 35370992 55226368 140726023396847 140726023396935 140726023396935 140726023405512 0 +4086 (sh) S 4067 50 50 34817 50 4210688 131 244 0 0 0 0 0 0 20 0 1 0 1293143 4579328 200 18446744073709551615 94347550273536 94347550386284 140737219399136 0 0 0 0 0 65538 1 0 0 17 5 0 0 0 0 0 94347552484776 94347552489984 94347554299904 140737219403308 140737219403375 140737219403375 140737219411952 0 +4089 (xargs) S 4086 50 50 34817 50 4210688 333 1924 0 0 0 0 0 0 20 0 1 0 1293143 17600512 477 18446744073709551615 4194304 4232732 140721633759248 0 0 0 0 0 0 1 0 0 17 5 0 0 0 0 0 6331920 6332980 32182272 140721633762891 140721633762920 140721633762920 140721633771497 0 +50 (bash) S 0 50 50 34817 50 4210944 43914 1032463 9 705 44 21 4213 818 20 0 1 0 1286336 42266624 3599 18446744073709551615 4194304 5173404 140732749083280 0 0 0 65536 4 1132560123 1 0 0 17 4 0 0 410 0 0 7273968 7310504 21196800 140732749086490 140732749086517 140732749086517 140732749086702 0 +79 (acpid) S 1 79 79 0 -1 4210752 46 0 0 0 0 0 0 0 20 0 1 0 1286717 4493312 407 18446744073709551615 1 1 0 0 0 0 0 4096 16391 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +83 (sshd) S 1 83 83 0 -1 4210944 354 0 27 0 0 0 0 0 20 0 1 0 1286718 62873600 1290 18446744073709551615 1 1 0 0 0 0 0 4096 81925 0 0 0 17 4 0 0 30 0 0 0 0 0 0 0 0 0 0 +94 (cron) S 1 94 94 0 -1 1077952576 103 449 0 1 0 0 0 0 20 0 1 0 1286743 24240128 559 18446744073709551615 1 1 0 0 0 0 0 0 65537 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +95 (atd) S 1 95 95 0 -1 1077952576 28 0 0 0 0 0 0 0 20 0 1 0 1286743 19615744 41 18446744073709551615 1 1 0 0 0 0 0 0 81923 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 diff --git a/node_modules/pstree.remy/tests/index.test.js b/node_modules/pstree.remy/tests/index.test.js new file mode 100644 index 0000000..50096b9 --- /dev/null +++ b/node_modules/pstree.remy/tests/index.test.js @@ -0,0 +1,51 @@ +const tap = require('tap'); +const test = tap.test; +const readFile = require('fs').readFileSync; +const spawn = require('child_process').spawn; +const pstree = require('../'); +const { tree, pidsForTree, getStat } = require('../lib/utils'); + +if (process.platform !== 'darwin') { + test('reads from /proc', async (t) => { + const ps = await getStat(); + t.ok(ps.split('\n').length > 1); + }); +} + +test('tree for live env', async (t) => { + const pid = 4079; + const fixture = readFile(__dirname + '/fixtures/out2', 'utf8'); + const ps = await tree(fixture); + t.deepEqual( + pidsForTree(ps, pid).map((_) => _.PID), + ['4080'] + ); +}); + +function testTree(t, runCallCount) { + const sub = spawn('node', [`${__dirname}/fixtures/index.js`, runCallCount], { + stdio: 'pipe', + }); + setTimeout(() => { + const pid = sub.pid; + + pstree(pid, (error, pids) => { + pids.concat([pid]).forEach((p) => { + spawn('kill', ['-s', 'SIGTERM', p]); + }); + + // the fixture launches `sh` which launches node which is why we + // are looking for two processes. + // Important: IDKW but MacOS seems to skip the `sh` process. no idea. + t.equal(pids.length, runCallCount * 2); + t.end(); + }); + }, 1000); +} + +test('can read full process tree', (t) => { + testTree(t, 1); +}); +test('can read full process tree with multiple processes', (t) => { + testTree(t, 2); +}); diff --git a/node_modules/readdirp/LICENSE b/node_modules/readdirp/LICENSE new file mode 100644 index 0000000..037cbb4 --- /dev/null +++ b/node_modules/readdirp/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/readdirp/README.md b/node_modules/readdirp/README.md new file mode 100644 index 0000000..465593c --- /dev/null +++ b/node_modules/readdirp/README.md @@ -0,0 +1,122 @@ +# readdirp [![Weekly downloads](https://img.shields.io/npm/dw/readdirp.svg)](https://github.com/paulmillr/readdirp) + +Recursive version of [fs.readdir](https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback). Exposes a **stream API** and a **promise API**. + + +```sh +npm install readdirp +``` + +```javascript +const readdirp = require('readdirp'); + +// Use streams to achieve small RAM & CPU footprint. +// 1) Streams example with for-await. +for await (const entry of readdirp('.')) { + const {path} = entry; + console.log(`${JSON.stringify({path})}`); +} + +// 2) Streams example, non for-await. +// Print out all JS files along with their size within the current folder & subfolders. +readdirp('.', {fileFilter: '*.js', alwaysStat: true}) + .on('data', (entry) => { + const {path, stats: {size}} = entry; + console.log(`${JSON.stringify({path, size})}`); + }) + // Optionally call stream.destroy() in `warn()` in order to abort and cause 'close' to be emitted + .on('warn', error => console.error('non-fatal error', error)) + .on('error', error => console.error('fatal error', error)) + .on('end', () => console.log('done')); + +// 3) Promise example. More RAM and CPU than streams / for-await. +const files = await readdirp.promise('.'); +console.log(files.map(file => file.path)); + +// Other options. +readdirp('test', { + fileFilter: '*.js', + directoryFilter: ['!.git', '!*modules'] + // directoryFilter: (di) => di.basename.length === 9 + type: 'files_directories', + depth: 1 +}); +``` + +For more examples, check out `examples` directory. + +## API + +`const stream = readdirp(root[, options])` — **Stream API** + +- Reads given root recursively and returns a `stream` of [entry infos](#entryinfo) +- Optionally can be used like `for await (const entry of stream)` with node.js 10+ (`asyncIterator`). +- `on('data', (entry) => {})` [entry info](#entryinfo) for every file / dir. +- `on('warn', (error) => {})` non-fatal `Error` that prevents a file / dir from being processed. Example: inaccessible to the user. +- `on('error', (error) => {})` fatal `Error` which also ends the stream. Example: illegal options where passed. +- `on('end')` — we are done. Called when all entries were found and no more will be emitted. +- `on('close')` — stream is destroyed via `stream.destroy()`. + Could be useful if you want to manually abort even on a non fatal error. + At that point the stream is no longer `readable` and no more entries, warning or errors are emitted +- To learn more about streams, consult the very detailed [nodejs streams documentation](https://nodejs.org/api/stream.html) + or the [stream-handbook](https://github.com/substack/stream-handbook) + +`const entries = await readdirp.promise(root[, options])` — **Promise API**. Returns a list of [entry infos](#entryinfo). + +First argument is awalys `root`, path in which to start reading and recursing into subdirectories. + +### options + +- `fileFilter: ["*.js"]`: filter to include or exclude files. A `Function`, Glob string or Array of glob strings. + - **Function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry + - **Glob string**: a string (e.g., `*.js`) which is matched using [picomatch](https://github.com/micromatch/picomatch), so go there for more + information. Globstars (`**`) are not supported since specifying a recursive pattern for an already recursive function doesn't make sense. Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files. + - **Array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown. + `['*.json', '*.js']` includes all JavaScript and Json files. + `['!.git', '!node_modules']` includes all directories except the '.git' and 'node_modules'. + - Directories that do not pass a filter will not be recursed into. +- `directoryFilter: ['!.git']`: filter to include/exclude directories found and to recurse into. Directories that do not pass a filter will not be recursed into. +- `depth: 5`: depth at which to stop recursing even if more subdirectories are found +- `type: 'files'`: determines if data events on the stream should be emitted for `'files'` (default), `'directories'`, `'files_directories'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. +- `alwaysStat: false`: always return `stats` property for every file. Default is `false`, readdirp will return `Dirent` entries. Setting it to `true` can double readdir execution time - use it only when you need file `size`, `mtime` etc. Cannot be enabled on node <10.10.0. +- `lstat: false`: include symlink entries in the stream along with files. When `true`, `fs.lstat` would be used instead of `fs.stat` + +### `EntryInfo` + +Has the following properties: + +- `path: 'assets/javascripts/react.js'`: path to the file/directory (relative to given root) +- `fullPath: '/Users/dev/projects/app/assets/javascripts/react.js'`: full path to the file/directory found +- `basename: 'react.js'`: name of the file/directory +- `dirent: fs.Dirent`: built-in [dir entry object](https://nodejs.org/api/fs.html#fs_class_fs_dirent) - only with `alwaysStat: false` +- `stats: fs.Stats`: built in [stat object](https://nodejs.org/api/fs.html#fs_class_fs_stats) - only with `alwaysStat: true` + +## Changelog + +- 3.5 (Oct 13, 2020) disallows recursive directory-based symlinks. + Before, it could have entered infinite loop. +- 3.4 (Mar 19, 2020) adds support for directory-based symlinks. +- 3.3 (Dec 6, 2019) stabilizes RAM consumption and enables perf management with `highWaterMark` option. Fixes race conditions related to `for-await` looping. +- 3.2 (Oct 14, 2019) improves performance by 250% and makes streams implementation more idiomatic. +- 3.1 (Jul 7, 2019) brings `bigint` support to `stat` output on Windows. This is backwards-incompatible for some cases. Be careful. It you use it incorrectly, you'll see "TypeError: Cannot mix BigInt and other types, use explicit conversions". +- 3.0 brings huge performance improvements and stream backpressure support. +- Upgrading 2.x to 3.x: + - Signature changed from `readdirp(options)` to `readdirp(root, options)` + - Replaced callback API with promise API. + - Renamed `entryType` option to `type` + - Renamed `entryType: 'both'` to `'files_directories'` + - `EntryInfo` + - Renamed `stat` to `stats` + - Emitted only when `alwaysStat: true` + - `dirent` is emitted instead of `stats` by default with `alwaysStat: false` + - Renamed `name` to `basename` + - Removed `parentDir` and `fullParentDir` properties +- Supported node.js versions: + - 3.x: node 8+ + - 2.x: node 0.6+ + +## License + +Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller () + +MIT License, see [LICENSE](LICENSE) file. diff --git a/node_modules/readdirp/index.d.ts b/node_modules/readdirp/index.d.ts new file mode 100644 index 0000000..cbbd76c --- /dev/null +++ b/node_modules/readdirp/index.d.ts @@ -0,0 +1,43 @@ +// TypeScript Version: 3.2 + +/// + +import * as fs from 'fs'; +import { Readable } from 'stream'; + +declare namespace readdir { + interface EntryInfo { + path: string; + fullPath: string; + basename: string; + stats?: fs.Stats; + dirent?: fs.Dirent; + } + + interface ReaddirpOptions { + root?: string; + fileFilter?: string | string[] | ((entry: EntryInfo) => boolean); + directoryFilter?: string | string[] | ((entry: EntryInfo) => boolean); + type?: 'files' | 'directories' | 'files_directories' | 'all'; + lstat?: boolean; + depth?: number; + alwaysStat?: boolean; + } + + interface ReaddirpStream extends Readable, AsyncIterable { + read(): EntryInfo; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + function promise( + root: string, + options?: ReaddirpOptions + ): Promise; +} + +declare function readdir( + root: string, + options?: readdir.ReaddirpOptions +): readdir.ReaddirpStream; + +export = readdir; diff --git a/node_modules/readdirp/index.js b/node_modules/readdirp/index.js new file mode 100644 index 0000000..cf739b2 --- /dev/null +++ b/node_modules/readdirp/index.js @@ -0,0 +1,287 @@ +'use strict'; + +const fs = require('fs'); +const { Readable } = require('stream'); +const sysPath = require('path'); +const { promisify } = require('util'); +const picomatch = require('picomatch'); + +const readdir = promisify(fs.readdir); +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const realpath = promisify(fs.realpath); + +/** + * @typedef {Object} EntryInfo + * @property {String} path + * @property {String} fullPath + * @property {fs.Stats=} stats + * @property {fs.Dirent=} dirent + * @property {String} basename + */ + +const BANG = '!'; +const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR'; +const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]); +const FILE_TYPE = 'files'; +const DIR_TYPE = 'directories'; +const FILE_DIR_TYPE = 'files_directories'; +const EVERYTHING_TYPE = 'all'; +const ALL_TYPES = [FILE_TYPE, DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE]; + +const isNormalFlowError = error => NORMAL_FLOW_ERRORS.has(error.code); +const [maj, min] = process.versions.node.split('.').slice(0, 2).map(n => Number.parseInt(n, 10)); +const wantBigintFsStats = process.platform === 'win32' && (maj > 10 || (maj === 10 && min >= 5)); + +const normalizeFilter = filter => { + if (filter === undefined) return; + if (typeof filter === 'function') return filter; + + if (typeof filter === 'string') { + const glob = picomatch(filter.trim()); + return entry => glob(entry.basename); + } + + if (Array.isArray(filter)) { + const positive = []; + const negative = []; + for (const item of filter) { + const trimmed = item.trim(); + if (trimmed.charAt(0) === BANG) { + negative.push(picomatch(trimmed.slice(1))); + } else { + positive.push(picomatch(trimmed)); + } + } + + if (negative.length > 0) { + if (positive.length > 0) { + return entry => + positive.some(f => f(entry.basename)) && !negative.some(f => f(entry.basename)); + } + return entry => !negative.some(f => f(entry.basename)); + } + return entry => positive.some(f => f(entry.basename)); + } +}; + +class ReaddirpStream extends Readable { + static get defaultOptions() { + return { + root: '.', + /* eslint-disable no-unused-vars */ + fileFilter: (path) => true, + directoryFilter: (path) => true, + /* eslint-enable no-unused-vars */ + type: FILE_TYPE, + lstat: false, + depth: 2147483648, + alwaysStat: false + }; + } + + constructor(options = {}) { + super({ + objectMode: true, + autoDestroy: true, + highWaterMark: options.highWaterMark || 4096 + }); + const opts = { ...ReaddirpStream.defaultOptions, ...options }; + const { root, type } = opts; + + this._fileFilter = normalizeFilter(opts.fileFilter); + this._directoryFilter = normalizeFilter(opts.directoryFilter); + + const statMethod = opts.lstat ? lstat : stat; + // Use bigint stats if it's windows and stat() supports options (node 10+). + if (wantBigintFsStats) { + this._stat = path => statMethod(path, { bigint: true }); + } else { + this._stat = statMethod; + } + + this._maxDepth = opts.depth; + this._wantsDir = [DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); + this._wantsFile = [FILE_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); + this._wantsEverything = type === EVERYTHING_TYPE; + this._root = sysPath.resolve(root); + this._isDirent = ('Dirent' in fs) && !opts.alwaysStat; + this._statsProp = this._isDirent ? 'dirent' : 'stats'; + this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent }; + + // Launch stream with one parent, the root dir. + this.parents = [this._exploreDir(root, 1)]; + this.reading = false; + this.parent = undefined; + } + + async _read(batch) { + if (this.reading) return; + this.reading = true; + + try { + while (!this.destroyed && batch > 0) { + const { path, depth, files = [] } = this.parent || {}; + + if (files.length > 0) { + const slice = files.splice(0, batch).map(dirent => this._formatEntry(dirent, path)); + for (const entry of await Promise.all(slice)) { + if (this.destroyed) return; + + const entryType = await this._getEntryType(entry); + if (entryType === 'directory' && this._directoryFilter(entry)) { + if (depth <= this._maxDepth) { + this.parents.push(this._exploreDir(entry.fullPath, depth + 1)); + } + + if (this._wantsDir) { + this.push(entry); + batch--; + } + } else if ((entryType === 'file' || this._includeAsFile(entry)) && this._fileFilter(entry)) { + if (this._wantsFile) { + this.push(entry); + batch--; + } + } + } + } else { + const parent = this.parents.pop(); + if (!parent) { + this.push(null); + break; + } + this.parent = await parent; + if (this.destroyed) return; + } + } + } catch (error) { + this.destroy(error); + } finally { + this.reading = false; + } + } + + async _exploreDir(path, depth) { + let files; + try { + files = await readdir(path, this._rdOptions); + } catch (error) { + this._onError(error); + } + return { files, depth, path }; + } + + async _formatEntry(dirent, path) { + let entry; + try { + const basename = this._isDirent ? dirent.name : dirent; + const fullPath = sysPath.resolve(sysPath.join(path, basename)); + entry = { path: sysPath.relative(this._root, fullPath), fullPath, basename }; + entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath); + } catch (err) { + this._onError(err); + } + return entry; + } + + _onError(err) { + if (isNormalFlowError(err) && !this.destroyed) { + this.emit('warn', err); + } else { + this.destroy(err); + } + } + + async _getEntryType(entry) { + // entry may be undefined, because a warning or an error were emitted + // and the statsProp is undefined + const stats = entry && entry[this._statsProp]; + if (!stats) { + return; + } + if (stats.isFile()) { + return 'file'; + } + if (stats.isDirectory()) { + return 'directory'; + } + if (stats && stats.isSymbolicLink()) { + const full = entry.fullPath; + try { + const entryRealPath = await realpath(full); + const entryRealPathStats = await lstat(entryRealPath); + if (entryRealPathStats.isFile()) { + return 'file'; + } + if (entryRealPathStats.isDirectory()) { + const len = entryRealPath.length; + if (full.startsWith(entryRealPath) && full.substr(len, 1) === sysPath.sep) { + const recursiveError = new Error( + `Circular symlink detected: "${full}" points to "${entryRealPath}"` + ); + recursiveError.code = RECURSIVE_ERROR_CODE; + return this._onError(recursiveError); + } + return 'directory'; + } + } catch (error) { + this._onError(error); + } + } + } + + _includeAsFile(entry) { + const stats = entry && entry[this._statsProp]; + + return stats && this._wantsEverything && !stats.isDirectory(); + } +} + +/** + * @typedef {Object} ReaddirpArguments + * @property {Function=} fileFilter + * @property {Function=} directoryFilter + * @property {String=} type + * @property {Number=} depth + * @property {String=} root + * @property {Boolean=} lstat + * @property {Boolean=} bigint + */ + +/** + * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. + * @param {String} root Root directory + * @param {ReaddirpArguments=} options Options to specify root (start directory), filters and recursion depth + */ +const readdirp = (root, options = {}) => { + let type = options.entryType || options.type; + if (type === 'both') type = FILE_DIR_TYPE; // backwards-compatibility + if (type) options.type = type; + if (!root) { + throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)'); + } else if (typeof root !== 'string') { + throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)'); + } else if (type && !ALL_TYPES.includes(type)) { + throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`); + } + + options.root = root; + return new ReaddirpStream(options); +}; + +const readdirpPromise = (root, options = {}) => { + return new Promise((resolve, reject) => { + const files = []; + readdirp(root, options) + .on('data', entry => files.push(entry)) + .on('end', () => resolve(files)) + .on('error', error => reject(error)); + }); +}; + +readdirp.promise = readdirpPromise; +readdirp.ReaddirpStream = ReaddirpStream; +readdirp.default = readdirp; + +module.exports = readdirp; diff --git a/node_modules/readdirp/package.json b/node_modules/readdirp/package.json new file mode 100644 index 0000000..dba5388 --- /dev/null +++ b/node_modules/readdirp/package.json @@ -0,0 +1,122 @@ +{ + "name": "readdirp", + "description": "Recursive version of fs.readdir with streaming API.", + "version": "3.6.0", + "homepage": "https://github.com/paulmillr/readdirp", + "repository": { + "type": "git", + "url": "git://github.com/paulmillr/readdirp.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/paulmillr/readdirp/issues" + }, + "author": "Thorsten Lorenz (thlorenz.com)", + "contributors": [ + "Thorsten Lorenz (thlorenz.com)", + "Paul Miller (https://paulmillr.com)" + ], + "main": "index.js", + "engines": { + "node": ">=8.10.0" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "recursive", + "fs", + "stream", + "streams", + "readdir", + "filesystem", + "find", + "filter" + ], + "scripts": { + "dtslint": "dtslint", + "nyc": "nyc", + "mocha": "mocha --exit", + "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", + "test": "npm run lint && nyc npm run mocha" + }, + "dependencies": { + "picomatch": "^2.2.1" + }, + "devDependencies": { + "@types/node": "^14", + "chai": "^4.2", + "chai-subset": "^1.6", + "dtslint": "^3.3.0", + "eslint": "^7.0.0", + "mocha": "^7.1.1", + "nyc": "^15.0.0", + "rimraf": "^3.0.0", + "typescript": "^4.0.3" + }, + "nyc": { + "reporter": [ + "html", + "text" + ] + }, + "eslintConfig": { + "root": true, + "extends": "eslint:recommended", + "parserOptions": { + "ecmaVersion": 9, + "sourceType": "script" + }, + "env": { + "node": true, + "es6": true + }, + "rules": { + "array-callback-return": "error", + "no-empty": [ + "error", + { + "allowEmptyCatch": true + } + ], + "no-else-return": [ + "error", + { + "allowElseIf": false + } + ], + "no-lonely-if": "error", + "no-var": "error", + "object-shorthand": "error", + "prefer-arrow-callback": [ + "error", + { + "allowNamedFunctions": true + } + ], + "prefer-const": [ + "error", + { + "ignoreReadBeforeAssign": true + } + ], + "prefer-destructuring": [ + "error", + { + "object": true, + "array": false + } + ], + "prefer-spread": "error", + "prefer-template": "error", + "radix": "error", + "semi": "error", + "strict": "error", + "quotes": [ + "error", + "single" + ] + } + } +} diff --git a/node_modules/semver/LICENSE b/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md new file mode 100644 index 0000000..ede7b7d --- /dev/null +++ b/node_modules/semver/README.md @@ -0,0 +1,654 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +You can also just load the module for the function that you care about if +you'd like to minimize your footprint. + +```js +// load the whole API at once in a single object +const semver = require('semver') + +// or just load the bits you need +// all of them listed here, just pick and choose what you want + +// classes +const SemVer = require('semver/classes/semver') +const Comparator = require('semver/classes/comparator') +const Range = require('semver/classes/range') + +// functions for working with versions +const semverParse = require('semver/functions/parse') +const semverValid = require('semver/functions/valid') +const semverClean = require('semver/functions/clean') +const semverInc = require('semver/functions/inc') +const semverDiff = require('semver/functions/diff') +const semverMajor = require('semver/functions/major') +const semverMinor = require('semver/functions/minor') +const semverPatch = require('semver/functions/patch') +const semverPrerelease = require('semver/functions/prerelease') +const semverCompare = require('semver/functions/compare') +const semverRcompare = require('semver/functions/rcompare') +const semverCompareLoose = require('semver/functions/compare-loose') +const semverCompareBuild = require('semver/functions/compare-build') +const semverSort = require('semver/functions/sort') +const semverRsort = require('semver/functions/rsort') + +// low-level comparators between versions +const semverGt = require('semver/functions/gt') +const semverLt = require('semver/functions/lt') +const semverEq = require('semver/functions/eq') +const semverNeq = require('semver/functions/neq') +const semverGte = require('semver/functions/gte') +const semverLte = require('semver/functions/lte') +const semverCmp = require('semver/functions/cmp') +const semverCoerce = require('semver/functions/coerce') + +// working with ranges +const semverSatisfies = require('semver/functions/satisfies') +const semverMaxSatisfying = require('semver/ranges/max-satisfying') +const semverMinSatisfying = require('semver/ranges/min-satisfying') +const semverToComparators = require('semver/ranges/to-comparators') +const semverMinVersion = require('semver/ranges/min-version') +const semverValidRange = require('semver/ranges/valid') +const semverOutside = require('semver/ranges/outside') +const semverGtr = require('semver/ranges/gtr') +const semverLtr = require('semver/ranges/ltr') +const semverIntersects = require('semver/ranges/intersects') +const semverSimplifyRange = require('semver/ranges/simplify') +const semverRangeSubset = require('semver/ranges/subset') +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-n <0|1> + This is the base to be used for the prerelease identifier. + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` that specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and +would match the versions `2.0.0` and `3.1.0`, but not the versions +`1.0.1` or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. +Version `3.4.5` *would* satisfy the range because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose of this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range-matching +semantics. + +Second, a user who has opted into using a prerelease version has +indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for range-matching) +by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +#### Prerelease Identifier Base + +The method `.inc` takes an optional parameter 'identifierBase' string +that will let you let your prerelease number as zero-based or one-based. +Set to `false` to omit the prerelease number altogether. +If you do not specify this parameter, it will default to zero-based. + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', '1') +// '1.2.4-beta.1' +``` + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', false) +// '1.2.4-beta' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n 1 +1.2.4-beta.1 +``` + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n false +1.2.4-beta +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless + `includePrerelease` is specified, in which case any version at all + satisfies) +* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0-0` +* `^0.2.3` := `>=0.2.3 <0.3.0-0` +* `^0.0.3` := `>=0.0.3 <0.0.4-0` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0-0` +* `^0.0.x` := `>=0.0.0 <0.1.0-0` +* `^0.0` := `>=0.0.0 <0.1.0-0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0-0` +* `^0.x` := `>=0.0.0 <1.0.0-0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose`: Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease`: Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release, options, identifier, identifierBase)`: + Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, `prerelease` will work the + same as `prepatch`. It increments the patch version and then makes a + prerelease. If the input version is already a prerelease it simply + increments it. + * `identifier` can be used to prefix `premajor`, `preminor`, + `prepatch`, or `prerelease` version increments. `identifierBase` + is the base to be used for the `prerelease` identifier. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. +* `compareLoose(v1, v2)`: Short for `compare(v1, v2, { loose: true })`. +* `diff(v1, v2)`: Returns the difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Sorting + +* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild` + function. +* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on + the `compareBuild` function in descending order. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can match + the given range. +* `gtr(version, range)`: Return `true` if the version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if the version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the range comparators intersect. +* `simplifyRange(versions, range)`: Return a "simplified" range that + matches the same items in the `versions` list as the range specified. Note + that it does *not* guarantee that it would match the same versions in all + cases, only for the set of versions provided. This is useful when + generating ranges by joining together multiple versions with `||` + programmatically, to provide the user with something a bit more + ergonomic. If the provided range is shorter in string-length than the + generated range, then that is returned. +* `subset(subRange, superRange)`: Return `true` if the `subRange` range is + entirely contained by the `superRange` range. + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +If the `options.includePrerelease` flag is set, then the `coerce` result will contain +prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2` +will preserve prerelease `rc.1` and build `rev.2` in the result. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided +version is not valid a null will be returned. This does not work for +ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `'2.1.5'` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` + +## Constants + +As a convenience, helper constants are exported to provide information about what `node-semver` supports: + +### `RELEASE_TYPES` + +- major +- premajor +- minor +- preminor +- patch +- prepatch +- prerelease + +``` +const semver = require('semver'); + +if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) { + console.log('This is a valid release type!'); +} else { + console.warn('This is NOT a valid release type!'); +} +``` + +### `SEMVER_SPEC_VERSION` + +2.0.0 + +``` +const semver = require('semver'); + +console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION); +``` + +## Exported Modules + + + +You may pull in just the part of this semver utility that you need if you +are sensitive to packing and tree-shaking concerns. The main +`require('semver')` export uses getter functions to lazily load the parts +of the API that are used. + +The following modules are available: + +* `require('semver')` +* `require('semver/classes')` +* `require('semver/classes/comparator')` +* `require('semver/classes/range')` +* `require('semver/classes/semver')` +* `require('semver/functions/clean')` +* `require('semver/functions/cmp')` +* `require('semver/functions/coerce')` +* `require('semver/functions/compare')` +* `require('semver/functions/compare-build')` +* `require('semver/functions/compare-loose')` +* `require('semver/functions/diff')` +* `require('semver/functions/eq')` +* `require('semver/functions/gt')` +* `require('semver/functions/gte')` +* `require('semver/functions/inc')` +* `require('semver/functions/lt')` +* `require('semver/functions/lte')` +* `require('semver/functions/major')` +* `require('semver/functions/minor')` +* `require('semver/functions/neq')` +* `require('semver/functions/parse')` +* `require('semver/functions/patch')` +* `require('semver/functions/prerelease')` +* `require('semver/functions/rcompare')` +* `require('semver/functions/rsort')` +* `require('semver/functions/satisfies')` +* `require('semver/functions/sort')` +* `require('semver/functions/valid')` +* `require('semver/ranges/gtr')` +* `require('semver/ranges/intersects')` +* `require('semver/ranges/ltr')` +* `require('semver/ranges/max-satisfying')` +* `require('semver/ranges/min-satisfying')` +* `require('semver/ranges/min-version')` +* `require('semver/ranges/outside')` +* `require('semver/ranges/simplify')` +* `require('semver/ranges/subset')` +* `require('semver/ranges/to-comparators')` +* `require('semver/ranges/valid')` + diff --git a/node_modules/semver/bin/semver.js b/node_modules/semver/bin/semver.js new file mode 100755 index 0000000..f62b566 --- /dev/null +++ b/node_modules/semver/bin/semver.js @@ -0,0 +1,188 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +let identifierBase + +const semver = require('../') +const parseOptions = require('../internal/parse-options') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-n': + identifierBase = argv.shift() + if (identifierBase === 'false') { + identifierBase = false + } + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = parseOptions({ loose, includePrerelease, rtl }) + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + versions + .sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options)) + .map(v => semver.clean(v, options)) + .map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v) + .forEach(v => console.log(v)) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +-n + Base number to be used for the prerelease identifier. + Can be either 0 or 1, or false to omit the number altogether. + Defaults to 0. + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/node_modules/semver/classes/comparator.js b/node_modules/semver/classes/comparator.js new file mode 100644 index 0000000..3d39c0e --- /dev/null +++ b/node_modules/semver/classes/comparator.js @@ -0,0 +1,141 @@ +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { safeRe: re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/node_modules/semver/classes/index.js b/node_modules/semver/classes/index.js new file mode 100644 index 0000000..5e3f5c9 --- /dev/null +++ b/node_modules/semver/classes/index.js @@ -0,0 +1,5 @@ +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/node_modules/semver/classes/range.js b/node_modules/semver/classes/range.js new file mode 100644 index 0000000..ceee231 --- /dev/null +++ b/node_modules/semver/classes/range.js @@ -0,0 +1,554 @@ +const SPACE_CHARACTERS = /\s+/g + +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.formatted = undefined + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range.trim().replace(SPACE_CHARACTERS, ' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.formatted = undefined + } + + get range () { + if (this.formatted === undefined) { + this.formatted = '' + for (let i = 0; i < this.set.length; i++) { + if (i > 0) { + this.formatted += '||' + } + const comps = this.set[i] + for (let k = 0; k < comps.length; k++) { + if (k > 0) { + this.formatted += ' ' + } + this.formatted += comps[k].toString().trim() + } + } + } + return this.formatted + } + + format () { + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = require('../internal/lrucache') +const cache = new LRU() + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +// TODO build? +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/node_modules/semver/classes/semver.js b/node_modules/semver/classes/semver.js new file mode 100644 index 0000000..13e66ce --- /dev/null +++ b/node_modules/semver/classes/semver.js @@ -0,0 +1,302 @@ +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { safeRe: re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('build compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer diff --git a/node_modules/semver/functions/clean.js b/node_modules/semver/functions/clean.js new file mode 100644 index 0000000..811fe6b --- /dev/null +++ b/node_modules/semver/functions/clean.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/node_modules/semver/functions/cmp.js b/node_modules/semver/functions/cmp.js new file mode 100644 index 0000000..4011909 --- /dev/null +++ b/node_modules/semver/functions/cmp.js @@ -0,0 +1,52 @@ +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/node_modules/semver/functions/coerce.js b/node_modules/semver/functions/coerce.js new file mode 100644 index 0000000..b378dce --- /dev/null +++ b/node_modules/semver/functions/coerce.js @@ -0,0 +1,60 @@ +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { safeRe: re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce diff --git a/node_modules/semver/functions/compare-build.js b/node_modules/semver/functions/compare-build.js new file mode 100644 index 0000000..9eb881b --- /dev/null +++ b/node_modules/semver/functions/compare-build.js @@ -0,0 +1,7 @@ +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/node_modules/semver/functions/compare-loose.js b/node_modules/semver/functions/compare-loose.js new file mode 100644 index 0000000..4881fbe --- /dev/null +++ b/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/node_modules/semver/functions/compare.js b/node_modules/semver/functions/compare.js new file mode 100644 index 0000000..748b7af --- /dev/null +++ b/node_modules/semver/functions/compare.js @@ -0,0 +1,5 @@ +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/node_modules/semver/functions/diff.js b/node_modules/semver/functions/diff.js new file mode 100644 index 0000000..fc224e3 --- /dev/null +++ b/node_modules/semver/functions/diff.js @@ -0,0 +1,65 @@ +const parse = require('./parse.js') + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff diff --git a/node_modules/semver/functions/eq.js b/node_modules/semver/functions/eq.js new file mode 100644 index 0000000..271fed9 --- /dev/null +++ b/node_modules/semver/functions/eq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/node_modules/semver/functions/gt.js b/node_modules/semver/functions/gt.js new file mode 100644 index 0000000..d9b2156 --- /dev/null +++ b/node_modules/semver/functions/gt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/node_modules/semver/functions/gte.js b/node_modules/semver/functions/gte.js new file mode 100644 index 0000000..5aeaa63 --- /dev/null +++ b/node_modules/semver/functions/gte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/node_modules/semver/functions/inc.js b/node_modules/semver/functions/inc.js new file mode 100644 index 0000000..7670b1b --- /dev/null +++ b/node_modules/semver/functions/inc.js @@ -0,0 +1,19 @@ +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/node_modules/semver/functions/lt.js b/node_modules/semver/functions/lt.js new file mode 100644 index 0000000..b440ab7 --- /dev/null +++ b/node_modules/semver/functions/lt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/node_modules/semver/functions/lte.js b/node_modules/semver/functions/lte.js new file mode 100644 index 0000000..6dcc956 --- /dev/null +++ b/node_modules/semver/functions/lte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/node_modules/semver/functions/major.js b/node_modules/semver/functions/major.js new file mode 100644 index 0000000..4283165 --- /dev/null +++ b/node_modules/semver/functions/major.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/node_modules/semver/functions/minor.js b/node_modules/semver/functions/minor.js new file mode 100644 index 0000000..57b3455 --- /dev/null +++ b/node_modules/semver/functions/minor.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/node_modules/semver/functions/neq.js b/node_modules/semver/functions/neq.js new file mode 100644 index 0000000..f944c01 --- /dev/null +++ b/node_modules/semver/functions/neq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/node_modules/semver/functions/parse.js b/node_modules/semver/functions/parse.js new file mode 100644 index 0000000..459b3b1 --- /dev/null +++ b/node_modules/semver/functions/parse.js @@ -0,0 +1,16 @@ +const SemVer = require('../classes/semver') +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse diff --git a/node_modules/semver/functions/patch.js b/node_modules/semver/functions/patch.js new file mode 100644 index 0000000..63afca2 --- /dev/null +++ b/node_modules/semver/functions/patch.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/node_modules/semver/functions/prerelease.js b/node_modules/semver/functions/prerelease.js new file mode 100644 index 0000000..06aa132 --- /dev/null +++ b/node_modules/semver/functions/prerelease.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/node_modules/semver/functions/rcompare.js b/node_modules/semver/functions/rcompare.js new file mode 100644 index 0000000..0ac509e --- /dev/null +++ b/node_modules/semver/functions/rcompare.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/node_modules/semver/functions/rsort.js b/node_modules/semver/functions/rsort.js new file mode 100644 index 0000000..82404c5 --- /dev/null +++ b/node_modules/semver/functions/rsort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/node_modules/semver/functions/satisfies.js b/node_modules/semver/functions/satisfies.js new file mode 100644 index 0000000..50af1c1 --- /dev/null +++ b/node_modules/semver/functions/satisfies.js @@ -0,0 +1,10 @@ +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/node_modules/semver/functions/sort.js b/node_modules/semver/functions/sort.js new file mode 100644 index 0000000..4d10917 --- /dev/null +++ b/node_modules/semver/functions/sort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/node_modules/semver/functions/valid.js b/node_modules/semver/functions/valid.js new file mode 100644 index 0000000..f27bae1 --- /dev/null +++ b/node_modules/semver/functions/valid.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/node_modules/semver/index.js b/node_modules/semver/index.js new file mode 100644 index 0000000..86d42ac --- /dev/null +++ b/node_modules/semver/index.js @@ -0,0 +1,89 @@ +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/constants.js b/node_modules/semver/internal/constants.js new file mode 100644 index 0000000..94be1c5 --- /dev/null +++ b/node_modules/semver/internal/constants.js @@ -0,0 +1,35 @@ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} diff --git a/node_modules/semver/internal/debug.js b/node_modules/semver/internal/debug.js new file mode 100644 index 0000000..1c00e13 --- /dev/null +++ b/node_modules/semver/internal/debug.js @@ -0,0 +1,9 @@ +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/node_modules/semver/internal/identifiers.js b/node_modules/semver/internal/identifiers.js new file mode 100644 index 0000000..e612d0a --- /dev/null +++ b/node_modules/semver/internal/identifiers.js @@ -0,0 +1,23 @@ +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/lrucache.js b/node_modules/semver/internal/lrucache.js new file mode 100644 index 0000000..6d89ec9 --- /dev/null +++ b/node_modules/semver/internal/lrucache.js @@ -0,0 +1,40 @@ +class LRUCache { + constructor () { + this.max = 1000 + this.map = new Map() + } + + get (key) { + const value = this.map.get(key) + if (value === undefined) { + return undefined + } else { + // Remove the key from the map and add it to the end + this.map.delete(key) + this.map.set(key, value) + return value + } + } + + delete (key) { + return this.map.delete(key) + } + + set (key, value) { + const deleted = this.delete(key) + + if (!deleted && value !== undefined) { + // If cache is full, delete the least recently used item + if (this.map.size >= this.max) { + const firstKey = this.map.keys().next().value + this.delete(firstKey) + } + + this.map.set(key, value) + } + + return this + } +} + +module.exports = LRUCache diff --git a/node_modules/semver/internal/parse-options.js b/node_modules/semver/internal/parse-options.js new file mode 100644 index 0000000..10d64ce --- /dev/null +++ b/node_modules/semver/internal/parse-options.js @@ -0,0 +1,15 @@ +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions diff --git a/node_modules/semver/internal/re.js b/node_modules/semver/internal/re.js new file mode 100644 index 0000000..fd8920e --- /dev/null +++ b/node_modules/semver/internal/re.js @@ -0,0 +1,217 @@ +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json new file mode 100644 index 0000000..663d370 --- /dev/null +++ b/node_modules/semver/package.json @@ -0,0 +1,77 @@ +{ + "name": "semver", + "version": "7.6.3", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "benchmark": "^2.1.4", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "timeout": 30, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "engines": ">=10", + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf", + "/benchmarks" + ], + "publish": "true" + } +} diff --git a/node_modules/semver/preload.js b/node_modules/semver/preload.js new file mode 100644 index 0000000..947cd4f --- /dev/null +++ b/node_modules/semver/preload.js @@ -0,0 +1,2 @@ +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/node_modules/semver/range.bnf b/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/semver/ranges/gtr.js b/node_modules/semver/ranges/gtr.js new file mode 100644 index 0000000..db7e355 --- /dev/null +++ b/node_modules/semver/ranges/gtr.js @@ -0,0 +1,4 @@ +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/node_modules/semver/ranges/intersects.js b/node_modules/semver/ranges/intersects.js new file mode 100644 index 0000000..e0e9b7c --- /dev/null +++ b/node_modules/semver/ranges/intersects.js @@ -0,0 +1,7 @@ +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects diff --git a/node_modules/semver/ranges/ltr.js b/node_modules/semver/ranges/ltr.js new file mode 100644 index 0000000..528a885 --- /dev/null +++ b/node_modules/semver/ranges/ltr.js @@ -0,0 +1,4 @@ +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/node_modules/semver/ranges/max-satisfying.js b/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 0000000..6e3d993 --- /dev/null +++ b/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,25 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/node_modules/semver/ranges/min-satisfying.js b/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 0000000..9b60974 --- /dev/null +++ b/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,24 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/node_modules/semver/ranges/min-version.js b/node_modules/semver/ranges/min-version.js new file mode 100644 index 0000000..350e1f7 --- /dev/null +++ b/node_modules/semver/ranges/min-version.js @@ -0,0 +1,61 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/node_modules/semver/ranges/outside.js b/node_modules/semver/ranges/outside.js new file mode 100644 index 0000000..ae99b10 --- /dev/null +++ b/node_modules/semver/ranges/outside.js @@ -0,0 +1,80 @@ +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/node_modules/semver/ranges/simplify.js b/node_modules/semver/ranges/simplify.js new file mode 100644 index 0000000..618d5b6 --- /dev/null +++ b/node_modules/semver/ranges/simplify.js @@ -0,0 +1,47 @@ +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/node_modules/semver/ranges/subset.js b/node_modules/semver/ranges/subset.js new file mode 100644 index 0000000..1e5c268 --- /dev/null +++ b/node_modules/semver/ranges/subset.js @@ -0,0 +1,247 @@ +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/node_modules/semver/ranges/to-comparators.js b/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 0000000..6c8bc7e --- /dev/null +++ b/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,8 @@ +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/node_modules/semver/ranges/valid.js b/node_modules/semver/ranges/valid.js new file mode 100644 index 0000000..365f356 --- /dev/null +++ b/node_modules/semver/ranges/valid.js @@ -0,0 +1,11 @@ +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/node_modules/simple-update-notifier/LICENSE b/node_modules/simple-update-notifier/LICENSE new file mode 100644 index 0000000..1e0b0c1 --- /dev/null +++ b/node_modules/simple-update-notifier/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Alex Brazier + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/simple-update-notifier/README.md b/node_modules/simple-update-notifier/README.md new file mode 100644 index 0000000..ec17794 --- /dev/null +++ b/node_modules/simple-update-notifier/README.md @@ -0,0 +1,82 @@ +# simple-update-notifier [![GitHub stars](https://img.shields.io/github/stars/alexbrazier/simple-update-notifier?label=Star%20Project&style=social)](https://github.com/alexbrazier/simple-update-notifier/stargazers) + +[![CI](https://github.com/alexbrazier/simple-update-notifier/workflows/Build%20and%20Deploy/badge.svg)](https://github.com/alexbrazier/simple-update-notifier/actions) +[![Dependencies](https://img.shields.io/librariesio/release/npm/simple-update-notifier)](https://www.npmjs.com/package/simple-update-notifier?activeTab=dependencies) +[![npm](https://img.shields.io/npm/v/simple-update-notifier)](https://www.npmjs.com/package/simple-update-notifier) +[![npm bundle size](https://img.shields.io/bundlephobia/min/simple-update-notifier)](https://bundlephobia.com/result?p=simple-update-notifier) +[![npm downloads](https://img.shields.io/npm/dw/simple-update-notifier)](https://www.npmjs.com/package/simple-update-notifier) +[![License](https://img.shields.io/npm/l/simple-update-notifier)](./LICENSE) + +Simple update notifier to check for npm updates for cli applications. + +Demo in terminal showing an update is required + +Checks for updates for an npm module and outputs to the command line if there is one available. The result is cached for the specified time so it doesn't check every time the app runs. + +## Install + +```bash +npm install simple-update-notifier +OR +yarn add simple-update-notifier +``` + +## Usage + +```js +import updateNotifier from 'simple-update-notifier'; +import packageJson from './package.json' assert { type: 'json' }; + +updateNotifier({ pkg: packageJson }); +``` + +### Options + +#### pkg + +Type: `object` + +##### name + +_Required_\ +Type: `string` + +##### version + +_Required_\ +Type: `string` + +#### updateCheckInterval + +Type: `number`\ +Default: `1000 * 60 * 60 * 24` _(1 day)_ + +How often to check for updates. + +#### shouldNotifyInNpmScript + +Type: `boolean`\ +Default: `false` + +Allows notification to be shown when running as an npm script. + +#### distTag + +Type: `string`\ +Default: `'latest'` + +Which [dist-tag](https://docs.npmjs.com/adding-dist-tags-to-packages) to use to find the latest version. + +#### alwaysRun + +Type: `boolean`\ +Default: `false` + +When set, `updateCheckInterval` will not be respected and a check for an update will always be performed. + +#### debug + +Type: `boolean`\ +Default: `false` + +When set, logs explaining the decision will be output to `stderr` whenever the module opts to not print an update notification diff --git a/node_modules/simple-update-notifier/build/index.d.ts b/node_modules/simple-update-notifier/build/index.d.ts new file mode 100644 index 0000000..60f53e0 --- /dev/null +++ b/node_modules/simple-update-notifier/build/index.d.ts @@ -0,0 +1,13 @@ +interface IUpdate { + pkg: { + name: string; + version: string; + }; + updateCheckInterval?: number; + shouldNotifyInNpmScript?: boolean; + distTag?: string; + alwaysRun?: boolean; + debug?: boolean; +} +declare const simpleUpdateNotifier: (args: IUpdate) => Promise; +export { simpleUpdateNotifier as default }; diff --git a/node_modules/simple-update-notifier/build/index.js b/node_modules/simple-update-notifier/build/index.js new file mode 100644 index 0000000..d7c3cde --- /dev/null +++ b/node_modules/simple-update-notifier/build/index.js @@ -0,0 +1,210 @@ +'use strict'; + +var process$1 = require('process'); +var semver = require('semver'); +var os = require('os'); +var path = require('path'); +var fs = require('fs'); +var https = require('https'); + +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + + +function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +var packageJson = process$1.env.npm_package_json; +var userAgent = process$1.env.npm_config_user_agent; +var isNpm6 = Boolean(userAgent && userAgent.startsWith('npm')); +var isNpm7 = Boolean(packageJson && packageJson.endsWith('package.json')); +var isNpm = isNpm6 || isNpm7; +var isYarn = Boolean(userAgent && userAgent.startsWith('yarn')); +var isNpmOrYarn = isNpm || isYarn; + +var homeDirectory = os.homedir(); +var configDir = process.env.XDG_CONFIG_HOME || + path.join(homeDirectory, '.config', 'simple-update-notifier'); +var getConfigFile = function (packageName) { + return path.join(configDir, "".concat(packageName.replace('@', '').replace('/', '__'), ".json")); +}; +var createConfigDir = function () { + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }); + } +}; +var getLastUpdate = function (packageName) { + var configFile = getConfigFile(packageName); + try { + if (!fs.existsSync(configFile)) { + return undefined; + } + var file = JSON.parse(fs.readFileSync(configFile, 'utf8')); + return file.lastUpdateCheck; + } + catch (_a) { + return undefined; + } +}; +var saveLastUpdate = function (packageName) { + var configFile = getConfigFile(packageName); + fs.writeFileSync(configFile, JSON.stringify({ lastUpdateCheck: new Date().getTime() })); +}; + +var getDistVersion = function (packageName, distTag) { return __awaiter(void 0, void 0, void 0, function () { + var url; + return __generator(this, function (_a) { + url = "https://registry.npmjs.org/-/package/".concat(packageName, "/dist-tags"); + return [2 /*return*/, new Promise(function (resolve, reject) { + https + .get(url, function (res) { + var body = ''; + res.on('data', function (chunk) { return (body += chunk); }); + res.on('end', function () { + try { + var json = JSON.parse(body); + var version = json[distTag]; + if (!version) { + reject(new Error('Error getting version')); + } + resolve(version); + } + catch (_a) { + reject(new Error('Could not parse version response')); + } + }); + }) + .on('error', function (err) { return reject(err); }); + })]; + }); +}); }; + +var hasNewVersion = function (_a) { + var pkg = _a.pkg, _b = _a.updateCheckInterval, updateCheckInterval = _b === void 0 ? 1000 * 60 * 60 * 24 : _b, _c = _a.distTag, distTag = _c === void 0 ? 'latest' : _c, alwaysRun = _a.alwaysRun, debug = _a.debug; + return __awaiter(void 0, void 0, void 0, function () { + var lastUpdateCheck, latestVersion; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + createConfigDir(); + lastUpdateCheck = getLastUpdate(pkg.name); + if (!(alwaysRun || + !lastUpdateCheck || + lastUpdateCheck < new Date().getTime() - updateCheckInterval)) return [3 /*break*/, 2]; + return [4 /*yield*/, getDistVersion(pkg.name, distTag)]; + case 1: + latestVersion = _d.sent(); + saveLastUpdate(pkg.name); + if (semver.gt(latestVersion, pkg.version)) { + return [2 /*return*/, latestVersion]; + } + else if (debug) { + console.error("Latest version (".concat(latestVersion, ") not newer than current version (").concat(pkg.version, ")")); + } + return [3 /*break*/, 3]; + case 2: + if (debug) { + console.error("Too recent to check for a new update. simpleUpdateNotifier() interval set to ".concat(updateCheckInterval, "ms but only ").concat(new Date().getTime() - lastUpdateCheck, "ms since last check.")); + } + _d.label = 3; + case 3: return [2 /*return*/, false]; + } + }); + }); +}; + +var borderedText = function (text) { + var lines = text.split('\n'); + var width = Math.max.apply(Math, lines.map(function (l) { return l.length; })); + var res = ["\u250C".concat('─'.repeat(width + 2), "\u2510")]; + for (var _i = 0, lines_1 = lines; _i < lines_1.length; _i++) { + var line = lines_1[_i]; + res.push("\u2502 ".concat(line.padEnd(width), " \u2502")); + } + res.push("\u2514".concat('─'.repeat(width + 2), "\u2518")); + return res.join('\n'); +}; + +var simpleUpdateNotifier = function (args) { return __awaiter(void 0, void 0, void 0, function () { + var latestVersion, err_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!args.alwaysRun && + (!process.stdout.isTTY || (isNpmOrYarn && !args.shouldNotifyInNpmScript))) { + if (args.debug) { + console.error('Opting out of running simpleUpdateNotifier()'); + } + return [2 /*return*/]; + } + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, hasNewVersion(args)]; + case 2: + latestVersion = _a.sent(); + if (latestVersion) { + console.error(borderedText("New version of ".concat(args.pkg.name, " available!\nCurrent Version: ").concat(args.pkg.version, "\nLatest Version: ").concat(latestVersion))); + } + return [3 /*break*/, 4]; + case 3: + err_1 = _a.sent(); + // Catch any network errors or cache writing errors so module doesn't cause a crash + if (args.debug && err_1 instanceof Error) { + console.error('Unexpected error in simpleUpdateNotifier():', err_1); + } + return [3 /*break*/, 4]; + case 4: return [2 /*return*/]; + } + }); +}); }; + +module.exports = simpleUpdateNotifier; diff --git a/node_modules/simple-update-notifier/package.json b/node_modules/simple-update-notifier/package.json new file mode 100644 index 0000000..4d710a7 --- /dev/null +++ b/node_modules/simple-update-notifier/package.json @@ -0,0 +1,100 @@ +{ + "name": "simple-update-notifier", + "version": "2.0.0", + "description": "Simple update notifier to check for npm updates for cli applications", + "main": "build/index.js", + "types": "build/index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/alexbrazier/simple-update-notifier.git" + }, + "homepage": "https://github.com/alexbrazier/simple-update-notifier.git", + "author": "alexbrazier", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "jest src --noStackTrace", + "build": "rollup -c rollup.config.js --bundleConfigAsCjs", + "prettier:check": "prettier --check src/**/*.ts", + "prettier": "prettier --write src/**/*.ts", + "eslint": "eslint src/**/*.ts", + "lint": "yarn prettier:check && yarn eslint", + "prepare": "yarn lint && yarn build", + "release": "release-it" + }, + "dependencies": { + "semver": "^7.5.3" + }, + "devDependencies": { + "@babel/preset-env": "^7.22.5", + "@babel/preset-typescript": "^7.22.5", + "@release-it/conventional-changelog": "^5.1.1", + "@types/jest": "^29.5.2", + "@types/node": "^20.3.1", + "@typescript-eslint/eslint-plugin": "^5.60.0", + "@typescript-eslint/parser": "^5.60.0", + "eslint": "^8.43.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-prettier": "^4.0.0", + "jest": "^29.5.0", + "prettier": "^2.8.8", + "release-it": "^15.11.0", + "rollup": "^3.25.2", + "rollup-plugin-ts": "^3.2.0", + "typescript": "^5.1.3" + }, + "resolutions": { + "semver": "^7.5.3" + }, + "publishConfig": { + "registry": "https://registry.npmjs.org/" + }, + "files": [ + "build", + "src" + ], + "release-it": { + "git": { + "commitMessage": "chore: release ${version}", + "tagName": "v${version}" + }, + "npm": { + "publish": true + }, + "github": { + "release": true + }, + "plugins": { + "@release-it/conventional-changelog": { + "preset": "angular", + "infile": "CHANGELOG.md" + } + } + }, + "eslintConfig": { + "plugins": [ + "@typescript-eslint", + "prettier" + ], + "extends": [ + "prettier", + "eslint:recommended", + "plugin:@typescript-eslint/recommended" + ], + "parser": "@typescript-eslint/parser", + "rules": { + "prettier/prettier": [ + "error", + { + "quoteProps": "consistent", + "singleQuote": true, + "tabWidth": 2, + "trailingComma": "es5", + "useTabs": false + } + ] + } + } +} diff --git a/node_modules/simple-update-notifier/src/borderedText.ts b/node_modules/simple-update-notifier/src/borderedText.ts new file mode 100644 index 0000000..7145ac2 --- /dev/null +++ b/node_modules/simple-update-notifier/src/borderedText.ts @@ -0,0 +1,12 @@ +const borderedText = (text: string) => { + const lines = text.split('\n'); + const width = Math.max(...lines.map((l) => l.length)); + const res = [`┌${'─'.repeat(width + 2)}┐`]; + for (const line of lines) { + res.push(`│ ${line.padEnd(width)} │`); + } + res.push(`└${'─'.repeat(width + 2)}┘`); + return res.join('\n'); +}; + +export default borderedText; diff --git a/node_modules/simple-update-notifier/src/cache.spec.ts b/node_modules/simple-update-notifier/src/cache.spec.ts new file mode 100644 index 0000000..49e1cb2 --- /dev/null +++ b/node_modules/simple-update-notifier/src/cache.spec.ts @@ -0,0 +1,17 @@ +import { createConfigDir, getLastUpdate, saveLastUpdate } from './cache'; + +createConfigDir(); + +jest.useFakeTimers().setSystemTime(new Date('2022-01-01')); + +const fakeTime = new Date('2022-01-01').getTime(); + +test('can save update then get the update details', () => { + saveLastUpdate('test'); + expect(getLastUpdate('test')).toBe(fakeTime); +}); + +test('prefixed module can save update then get the update details', () => { + saveLastUpdate('@alexbrazier/test'); + expect(getLastUpdate('@alexbrazier/test')).toBe(fakeTime); +}); diff --git a/node_modules/simple-update-notifier/src/cache.ts b/node_modules/simple-update-notifier/src/cache.ts new file mode 100644 index 0000000..e11deba --- /dev/null +++ b/node_modules/simple-update-notifier/src/cache.ts @@ -0,0 +1,44 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs'; + +const homeDirectory = os.homedir(); +const configDir = + process.env.XDG_CONFIG_HOME || + path.join(homeDirectory, '.config', 'simple-update-notifier'); + +const getConfigFile = (packageName: string) => { + return path.join( + configDir, + `${packageName.replace('@', '').replace('/', '__')}.json` + ); +}; + +export const createConfigDir = () => { + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }); + } +}; + +export const getLastUpdate = (packageName: string) => { + const configFile = getConfigFile(packageName); + + try { + if (!fs.existsSync(configFile)) { + return undefined; + } + const file = JSON.parse(fs.readFileSync(configFile, 'utf8')); + return file.lastUpdateCheck as number; + } catch { + return undefined; + } +}; + +export const saveLastUpdate = (packageName: string) => { + const configFile = getConfigFile(packageName); + + fs.writeFileSync( + configFile, + JSON.stringify({ lastUpdateCheck: new Date().getTime() }) + ); +}; diff --git a/node_modules/simple-update-notifier/src/getDistVersion.spec.ts b/node_modules/simple-update-notifier/src/getDistVersion.spec.ts new file mode 100644 index 0000000..b78a42e --- /dev/null +++ b/node_modules/simple-update-notifier/src/getDistVersion.spec.ts @@ -0,0 +1,35 @@ +import Stream from 'stream'; +import https from 'https'; +import getDistVersion from './getDistVersion'; + +jest.mock('https', () => ({ + get: jest.fn(), +})); + +test('Valid response returns version', async () => { + const st = new Stream(); + (https.get as jest.Mock).mockImplementation((url, cb) => { + cb(st); + + st.emit('data', '{"latest":"1.0.0"}'); + st.emit('end'); + }); + + const version = await getDistVersion('test', 'latest'); + + expect(version).toEqual('1.0.0'); +}); + +test('Invalid response throws error', async () => { + const st = new Stream(); + (https.get as jest.Mock).mockImplementation((url, cb) => { + cb(st); + + st.emit('data', 'some invalid json'); + st.emit('end'); + }); + + expect(getDistVersion('test', 'latest')).rejects.toThrow( + 'Could not parse version response' + ); +}); diff --git a/node_modules/simple-update-notifier/src/getDistVersion.ts b/node_modules/simple-update-notifier/src/getDistVersion.ts new file mode 100644 index 0000000..d474e1f --- /dev/null +++ b/node_modules/simple-update-notifier/src/getDistVersion.ts @@ -0,0 +1,29 @@ +import https from 'https'; + +const getDistVersion = async (packageName: string, distTag: string) => { + const url = `https://registry.npmjs.org/-/package/${packageName}/dist-tags`; + + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let body = ''; + + res.on('data', (chunk) => (body += chunk)); + res.on('end', () => { + try { + const json = JSON.parse(body); + const version = json[distTag]; + if (!version) { + reject(new Error('Error getting version')); + } + resolve(version); + } catch { + reject(new Error('Could not parse version response')); + } + }); + }) + .on('error', (err) => reject(err)); + }); +}; + +export default getDistVersion; diff --git a/node_modules/simple-update-notifier/src/hasNewVersion.spec.ts b/node_modules/simple-update-notifier/src/hasNewVersion.spec.ts new file mode 100644 index 0000000..af7ab22 --- /dev/null +++ b/node_modules/simple-update-notifier/src/hasNewVersion.spec.ts @@ -0,0 +1,82 @@ +import hasNewVersion from './hasNewVersion'; +import { getLastUpdate } from './cache'; +import getDistVersion from './getDistVersion'; + +jest.mock('./getDistVersion', () => jest.fn().mockReturnValue('1.0.0')); +jest.mock('./cache', () => ({ + getLastUpdate: jest.fn().mockReturnValue(undefined), + createConfigDir: jest.fn(), + saveLastUpdate: jest.fn(), +})); + +const pkg = { name: 'test', version: '1.0.0' }; + +afterEach(() => jest.clearAllMocks()); + +const defaultArgs = { + pkg, + shouldNotifyInNpmScript: true, + alwaysRun: true, +}; + +test('it should not trigger update for same version', async () => { + const newVersion = await hasNewVersion(defaultArgs); + + expect(newVersion).toBe(false); +}); + +test('it should trigger update for patch version bump', async () => { + (getDistVersion as jest.Mock).mockReturnValue('1.0.1'); + + const newVersion = await hasNewVersion(defaultArgs); + + expect(newVersion).toBe('1.0.1'); +}); + +test('it should trigger update for minor version bump', async () => { + (getDistVersion as jest.Mock).mockReturnValue('1.1.0'); + + const newVersion = await hasNewVersion(defaultArgs); + + expect(newVersion).toBe('1.1.0'); +}); + +test('it should trigger update for major version bump', async () => { + (getDistVersion as jest.Mock).mockReturnValue('2.0.0'); + + const newVersion = await hasNewVersion(defaultArgs); + + expect(newVersion).toBe('2.0.0'); +}); + +test('it should not trigger update if version is lower', async () => { + (getDistVersion as jest.Mock).mockReturnValue('0.0.9'); + + const newVersion = await hasNewVersion(defaultArgs); + + expect(newVersion).toBe(false); +}); + +it('should trigger update check if last update older than config', async () => { + const TWO_WEEKS = new Date().getTime() - 1000 * 60 * 60 * 24 * 14; + (getLastUpdate as jest.Mock).mockReturnValue(TWO_WEEKS); + const newVersion = await hasNewVersion({ + pkg, + shouldNotifyInNpmScript: true, + }); + + expect(newVersion).toBe(false); + expect(getDistVersion).toHaveBeenCalled(); +}); + +it('should not trigger update check if last update is too recent', async () => { + const TWELVE_HOURS = new Date().getTime() - 1000 * 60 * 60 * 12; + (getLastUpdate as jest.Mock).mockReturnValue(TWELVE_HOURS); + const newVersion = await hasNewVersion({ + pkg, + shouldNotifyInNpmScript: true, + }); + + expect(newVersion).toBe(false); + expect(getDistVersion).not.toHaveBeenCalled(); +}); diff --git a/node_modules/simple-update-notifier/src/hasNewVersion.ts b/node_modules/simple-update-notifier/src/hasNewVersion.ts new file mode 100644 index 0000000..31d5069 --- /dev/null +++ b/node_modules/simple-update-notifier/src/hasNewVersion.ts @@ -0,0 +1,40 @@ +import semver from 'semver'; +import { createConfigDir, getLastUpdate, saveLastUpdate } from './cache'; +import getDistVersion from './getDistVersion'; +import { IUpdate } from './types'; + +const hasNewVersion = async ({ + pkg, + updateCheckInterval = 1000 * 60 * 60 * 24, + distTag = 'latest', + alwaysRun, + debug, +}: IUpdate) => { + createConfigDir(); + const lastUpdateCheck = getLastUpdate(pkg.name); + if ( + alwaysRun || + !lastUpdateCheck || + lastUpdateCheck < new Date().getTime() - updateCheckInterval + ) { + const latestVersion = await getDistVersion(pkg.name, distTag); + saveLastUpdate(pkg.name); + if (semver.gt(latestVersion, pkg.version)) { + return latestVersion; + } else if (debug) { + console.error( + `Latest version (${latestVersion}) not newer than current version (${pkg.version})` + ); + } + } else if (debug) { + console.error( + `Too recent to check for a new update. simpleUpdateNotifier() interval set to ${updateCheckInterval}ms but only ${ + new Date().getTime() - lastUpdateCheck + }ms since last check.` + ); + } + + return false; +}; + +export default hasNewVersion; diff --git a/node_modules/simple-update-notifier/src/index.spec.ts b/node_modules/simple-update-notifier/src/index.spec.ts new file mode 100644 index 0000000..98ffb5a --- /dev/null +++ b/node_modules/simple-update-notifier/src/index.spec.ts @@ -0,0 +1,27 @@ +import simpleUpdateNotifier from '.'; +import hasNewVersion from './hasNewVersion'; + +const consoleSpy = jest.spyOn(console, 'error'); + +jest.mock('./hasNewVersion', () => jest.fn().mockResolvedValue('2.0.0')); + +beforeEach(jest.clearAllMocks); + +test('it logs message if update is available', async () => { + await simpleUpdateNotifier({ + pkg: { name: 'test', version: '1.0.0' }, + alwaysRun: true, + }); + + expect(consoleSpy).toHaveBeenCalledTimes(1); +}); + +test('it does not log message if update is not available', async () => { + (hasNewVersion as jest.Mock).mockResolvedValue(false); + await simpleUpdateNotifier({ + pkg: { name: 'test', version: '2.0.0' }, + alwaysRun: true, + }); + + expect(consoleSpy).toHaveBeenCalledTimes(0); +}); diff --git a/node_modules/simple-update-notifier/src/index.ts b/node_modules/simple-update-notifier/src/index.ts new file mode 100644 index 0000000..2b0d2cf --- /dev/null +++ b/node_modules/simple-update-notifier/src/index.ts @@ -0,0 +1,34 @@ +import isNpmOrYarn from './isNpmOrYarn'; +import hasNewVersion from './hasNewVersion'; +import { IUpdate } from './types'; +import borderedText from './borderedText'; + +const simpleUpdateNotifier = async (args: IUpdate) => { + if ( + !args.alwaysRun && + (!process.stdout.isTTY || (isNpmOrYarn && !args.shouldNotifyInNpmScript)) + ) { + if (args.debug) { + console.error('Opting out of running simpleUpdateNotifier()'); + } + return; + } + + try { + const latestVersion = await hasNewVersion(args); + if (latestVersion) { + console.error( + borderedText(`New version of ${args.pkg.name} available! +Current Version: ${args.pkg.version} +Latest Version: ${latestVersion}`) + ); + } + } catch (err) { + // Catch any network errors or cache writing errors so module doesn't cause a crash + if (args.debug && err instanceof Error) { + console.error('Unexpected error in simpleUpdateNotifier():', err); + } + } +}; + +export default simpleUpdateNotifier; diff --git a/node_modules/simple-update-notifier/src/isNpmOrYarn.ts b/node_modules/simple-update-notifier/src/isNpmOrYarn.ts new file mode 100644 index 0000000..ee4c837 --- /dev/null +++ b/node_modules/simple-update-notifier/src/isNpmOrYarn.ts @@ -0,0 +1,12 @@ +import process from 'process'; + +const packageJson = process.env.npm_package_json; +const userAgent = process.env.npm_config_user_agent; +const isNpm6 = Boolean(userAgent && userAgent.startsWith('npm')); +const isNpm7 = Boolean(packageJson && packageJson.endsWith('package.json')); + +const isNpm = isNpm6 || isNpm7; +const isYarn = Boolean(userAgent && userAgent.startsWith('yarn')); +const isNpmOrYarn = isNpm || isYarn; + +export default isNpmOrYarn; diff --git a/node_modules/simple-update-notifier/src/types.ts b/node_modules/simple-update-notifier/src/types.ts new file mode 100644 index 0000000..c395eb0 --- /dev/null +++ b/node_modules/simple-update-notifier/src/types.ts @@ -0,0 +1,8 @@ +export interface IUpdate { + pkg: { name: string; version: string }; + updateCheckInterval?: number; + shouldNotifyInNpmScript?: boolean; + distTag?: string; + alwaysRun?: boolean; + debug?: boolean; +} diff --git a/node_modules/split2/LICENSE b/node_modules/split2/LICENSE new file mode 100644 index 0000000..a91afe5 --- /dev/null +++ b/node_modules/split2/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014-2018, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/split2/README.md b/node_modules/split2/README.md new file mode 100644 index 0000000..36f03ab --- /dev/null +++ b/node_modules/split2/README.md @@ -0,0 +1,85 @@ +# Split2(matcher, mapper, options) + +![ci](https://github.com/mcollina/split2/workflows/ci/badge.svg) + +Break up a stream and reassemble it so that each line is a chunk. +`split2` is inspired by [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) module, +and it is totally API compatible with it. +However, it is based on Node.js core [`Transform`](https://nodejs.org/api/stream.html#stream_new_stream_transform_options). + +`matcher` may be a `String`, or a `RegExp`. Example, read every line in a file ... + +``` js + fs.createReadStream(file) + .pipe(split2()) + .on('data', function (line) { + //each chunk now is a separate line! + }) + +``` + +`split` takes the same arguments as `string.split` except it defaults to '/\r?\n/', and the optional `limit` paremeter is ignored. +[String#split](https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/String/split) + +`split` takes an optional options object on it's third argument, which +is directly passed as a +[Transform](https://nodejs.org/api/stream.html#stream_new_stream_transform_options) +option. + +Additionally, the `.maxLength` and `.skipOverflow` options are implemented, which set limits on the internal +buffer size and the stream's behavior when the limit is exceeded. There is no limit unless `maxLength` is set. When +the internal buffer size exceeds `maxLength`, the stream emits an error by default. You may also set `skipOverflow` to +true to suppress the error and instead skip past any lines that cause the internal buffer to exceed `maxLength`. + +Calling `.destroy` will make the stream emit `close`. Use this to perform cleanup logic + +``` js +var splitFile = function(filename) { + var file = fs.createReadStream(filename) + + return file + .pipe(split2()) + .on('close', function() { + // destroy the file stream in case the split stream was destroyed + file.destroy() + }) +} + +var stream = splitFile('my-file.txt') + +stream.destroy() // will destroy the input file stream +``` + +# NDJ - Newline Delimited Json + +`split2` accepts a function which transforms each line. + +``` js +fs.createReadStream(file) + .pipe(split2(JSON.parse)) + .on('data', function (obj) { + //each chunk now is a js object + }) + .on("error", function(error) { + //handling parsing errors + }) +``` + +However, in [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) the mapper +is wrapped in a try-catch, while here it is not: if your parsing logic can throw, wrap it yourself. Otherwise, you can also use the stream error handling when mapper function throw. + +# License + +Copyright (c) 2014-2021, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/split2/bench.js b/node_modules/split2/bench.js new file mode 100644 index 0000000..15ec5df --- /dev/null +++ b/node_modules/split2/bench.js @@ -0,0 +1,27 @@ +'use strict' + +const split = require('./') +const bench = require('fastbench') +const binarySplit = require('binary-split') +const fs = require('fs') + +function benchSplit (cb) { + fs.createReadStream('package.json') + .pipe(split()) + .on('end', cb) + .resume() +} + +function benchBinarySplit (cb) { + fs.createReadStream('package.json') + .pipe(binarySplit()) + .on('end', cb) + .resume() +} + +const run = bench([ + benchSplit, + benchBinarySplit +], 10000) + +run(run) diff --git a/node_modules/split2/index.js b/node_modules/split2/index.js new file mode 100644 index 0000000..9b59f6c --- /dev/null +++ b/node_modules/split2/index.js @@ -0,0 +1,141 @@ +/* +Copyright (c) 2014-2021, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +'use strict' + +const { Transform } = require('stream') +const { StringDecoder } = require('string_decoder') +const kLast = Symbol('last') +const kDecoder = Symbol('decoder') + +function transform (chunk, enc, cb) { + let list + if (this.overflow) { // Line buffer is full. Skip to start of next line. + const buf = this[kDecoder].write(chunk) + list = buf.split(this.matcher) + + if (list.length === 1) return cb() // Line ending not found. Discard entire chunk. + + // Line ending found. Discard trailing fragment of previous line and reset overflow state. + list.shift() + this.overflow = false + } else { + this[kLast] += this[kDecoder].write(chunk) + list = this[kLast].split(this.matcher) + } + + this[kLast] = list.pop() + + for (let i = 0; i < list.length; i++) { + try { + push(this, this.mapper(list[i])) + } catch (error) { + return cb(error) + } + } + + this.overflow = this[kLast].length > this.maxLength + if (this.overflow && !this.skipOverflow) { + cb(new Error('maximum buffer reached')) + return + } + + cb() +} + +function flush (cb) { + // forward any gibberish left in there + this[kLast] += this[kDecoder].end() + + if (this[kLast]) { + try { + push(this, this.mapper(this[kLast])) + } catch (error) { + return cb(error) + } + } + + cb() +} + +function push (self, val) { + if (val !== undefined) { + self.push(val) + } +} + +function noop (incoming) { + return incoming +} + +function split (matcher, mapper, options) { + // Set defaults for any arguments not supplied. + matcher = matcher || /\r?\n/ + mapper = mapper || noop + options = options || {} + + // Test arguments explicitly. + switch (arguments.length) { + case 1: + // If mapper is only argument. + if (typeof matcher === 'function') { + mapper = matcher + matcher = /\r?\n/ + // If options is only argument. + } else if (typeof matcher === 'object' && !(matcher instanceof RegExp) && !matcher[Symbol.split]) { + options = matcher + matcher = /\r?\n/ + } + break + + case 2: + // If mapper and options are arguments. + if (typeof matcher === 'function') { + options = mapper + mapper = matcher + matcher = /\r?\n/ + // If matcher and options are arguments. + } else if (typeof mapper === 'object') { + options = mapper + mapper = noop + } + } + + options = Object.assign({}, options) + options.autoDestroy = true + options.transform = transform + options.flush = flush + options.readableObjectMode = true + + const stream = new Transform(options) + + stream[kLast] = '' + stream[kDecoder] = new StringDecoder('utf8') + stream.matcher = matcher + stream.mapper = mapper + stream.maxLength = options.maxLength + stream.skipOverflow = options.skipOverflow || false + stream.overflow = false + stream._destroy = function (err, cb) { + // Weird Node v12 bug that we need to work around + this._writableState.errorEmitted = false + cb(err) + } + + return stream +} + +module.exports = split diff --git a/node_modules/split2/package.json b/node_modules/split2/package.json new file mode 100644 index 0000000..e04bcc8 --- /dev/null +++ b/node_modules/split2/package.json @@ -0,0 +1,39 @@ +{ + "name": "split2", + "version": "4.2.0", + "description": "split a Text Stream into a Line Stream, using Stream 3", + "main": "index.js", + "scripts": { + "lint": "standard --verbose", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test.js", + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js", + "test:report": "npm run lint && npm run unit:report", + "test": "npm run lint && npm run unit", + "legacy": "tape test.js" + }, + "pre-commit": [ + "test" + ], + "website": "https://github.com/mcollina/split2", + "repository": { + "type": "git", + "url": "https://github.com/mcollina/split2.git" + }, + "bugs": { + "url": "http://github.com/mcollina/split2/issues" + }, + "engines": { + "node": ">= 10.x" + }, + "author": "Matteo Collina ", + "license": "ISC", + "devDependencies": { + "binary-split": "^1.0.3", + "callback-stream": "^1.1.0", + "fastbench": "^1.0.0", + "nyc": "^15.0.1", + "pre-commit": "^1.1.2", + "standard": "^17.0.0", + "tape": "^5.0.0" + } +} diff --git a/node_modules/split2/test.js b/node_modules/split2/test.js new file mode 100644 index 0000000..a7f9838 --- /dev/null +++ b/node_modules/split2/test.js @@ -0,0 +1,409 @@ +'use strict' + +const test = require('tape') +const split = require('./') +const callback = require('callback-stream') +const strcb = callback.bind(null, { decodeStrings: false }) +const objcb = callback.bind(null, { objectMode: true }) + +test('split two lines on end', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\nworld') +}) + +test('split two lines on two writes', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.write('hello') + input.write('\nworld') + input.end() +}) + +test('split four lines on three writes', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world', 'bye', 'world']) + })) + + input.write('hello\nwor') + input.write('ld\nbye\nwo') + input.write('rld') + input.end() +}) + +test('accumulate multiple writes', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['helloworld']) + })) + + input.write('hello') + input.write('world') + input.end() +}) + +test('split using a custom string matcher', function (t) { + t.plan(2) + + const input = split('~') + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('split using a custom regexp matcher', function (t) { + t.plan(2) + + const input = split(/~/) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('support an option argument', function (t) { + t.plan(2) + + const input = split({ highWaterMark: 2 }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\nworld') +}) + +test('support a mapper function', function (t) { + t.plan(2) + + const a = { a: '42' } + const b = { b: '24' } + + const input = split(JSON.parse) + + input.pipe(objcb(function (err, list) { + t.error(err) + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('\n') + input.end(JSON.stringify(b)) +}) + +test('split lines windows-style', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\r\nworld') +}) + +test('splits a buffer', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end(Buffer.from('hello\nworld')) +}) + +test('do not end on undefined', function (t) { + t.plan(2) + + const input = split(function (line) { }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, []) + })) + + input.end(Buffer.from('hello\nworld')) +}) + +test('has destroy method', function (t) { + t.plan(1) + + const input = split(function (line) { }) + + input.on('close', function () { + t.ok(true, 'close emitted') + t.end() + }) + + input.destroy() +}) + +test('support custom matcher and mapper', function (t) { + t.plan(4) + + const a = { a: '42' } + const b = { b: '24' } + const input = split('~', JSON.parse) + + t.equal(input.matcher, '~') + t.equal(typeof input.mapper, 'function') + + input.pipe(objcb(function (err, list) { + t.notOk(err, 'no errors') + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('~') + input.end(JSON.stringify(b)) +}) + +test('support custom matcher and options', function (t) { + t.plan(6) + + const input = split('~', { highWaterMark: 1024 }) + + t.equal(input.matcher, '~') + t.equal(typeof input.mapper, 'function') + t.equal(input._readableState.highWaterMark, 1024) + t.equal(input._writableState.highWaterMark, 1024) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('support mapper and options', function (t) { + t.plan(6) + + const a = { a: '42' } + const b = { b: '24' } + const input = split(JSON.parse, { highWaterMark: 1024 }) + + t.ok(input.matcher instanceof RegExp, 'matcher is RegExp') + t.equal(typeof input.mapper, 'function') + t.equal(input._readableState.highWaterMark, 1024) + t.equal(input._writableState.highWaterMark, 1024) + + input.pipe(objcb(function (err, list) { + t.error(err) + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('\n') + input.end(JSON.stringify(b)) +}) + +test('split utf8 chars', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫烫烫', '锟斤拷']) + })) + + const buf = Buffer.from('烫烫烫\r\n锟斤拷', 'utf8') + for (let i = 0; i < buf.length; ++i) { + input.write(buf.slice(i, i + 1)) + } + input.end() +}) + +test('split utf8 chars 2by2', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫烫烫', '烫烫烫']) + })) + + const str = '烫烫烫\r\n烫烫烫' + const buf = Buffer.from(str, 'utf8') + for (let i = 0; i < buf.length; i += 2) { + input.write(buf.slice(i, i + 2)) + } + input.end() +}) + +test('split lines when the \n comes at the end of a chunk', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.write('hello\n') + input.end('world') +}) + +test('truncated utf-8 char', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫' + Buffer.from('e7', 'hex').toString()]) + })) + + const str = '烫烫' + const buf = Buffer.from(str, 'utf8') + + input.write(buf.slice(0, 3)) + input.end(buf.slice(3, 4)) +}) + +test('maximum buffer limit', function (t) { + t.plan(1) + + const input = split({ maxLength: 2 }) + input.on('error', function (err) { + t.ok(err) + }) + + input.resume() + + input.write('hey') +}) + +test('readable highWaterMark', function (t) { + const input = split() + t.equal(input._readableState.highWaterMark, 16) + t.end() +}) + +test('maxLength < chunk size', function (t) { + t.plan(2) + + const input = split({ maxLength: 2 }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['a', 'b']) + })) + + input.end('a\nb') +}) + +test('maximum buffer limit w/skip', function (t) { + t.plan(2) + + const input = split({ maxLength: 2, skipOverflow: true }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['a', 'b', 'c']) + })) + + input.write('a\n123') + input.write('456') + input.write('789\nb\nc') + input.end() +}) + +test("don't modify the options object", function (t) { + t.plan(2) + + const options = {} + const input = split(options) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.same(options, {}) + })) + + input.end() +}) + +test('mapper throws flush', function (t) { + t.plan(1) + const error = new Error() + const input = split(function () { + throw error + }) + + input.on('error', (err, list) => { + t.same(err, error) + }) + input.end('hello') +}) + +test('mapper throws on transform', function (t) { + t.plan(1) + + const error = new Error() + const input = split(function (l) { + throw error + }) + + input.on('error', (err) => { + t.same(err, error) + }) + input.write('a') + input.write('\n') + input.end('b') +}) + +test('supports Symbol.split', function (t) { + t.plan(2) + + const input = split({ + [Symbol.split] (str) { + return str.split('~') + } + }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) diff --git a/node_modules/supports-color/browser.js b/node_modules/supports-color/browser.js new file mode 100644 index 0000000..62afa3a --- /dev/null +++ b/node_modules/supports-color/browser.js @@ -0,0 +1,5 @@ +'use strict'; +module.exports = { + stdout: false, + stderr: false +}; diff --git a/node_modules/supports-color/index.js b/node_modules/supports-color/index.js new file mode 100644 index 0000000..1704131 --- /dev/null +++ b/node_modules/supports-color/index.js @@ -0,0 +1,131 @@ +'use strict'; +const os = require('os'); +const hasFlag = require('has-flag'); + +const env = process.env; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false')) { + forceColor = false; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = true; +} +if ('FORCE_COLOR' in env) { + forceColor = env.FORCE_COLOR.length === 0 || parseInt(env.FORCE_COLOR, 10) !== 0; +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(stream) { + if (forceColor === false) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (stream && !stream.isTTY && forceColor !== true) { + return 0; + } + + const min = forceColor ? 1 : 0; + + if (process.platform === 'win32') { + // Node.js 7.5.0 is the first version of Node.js to include a patch to + // libuv that enables 256 color output on Windows. Anything earlier and it + // won't work. However, here we target Node.js 8 at minimum as it is an LTS + // release, and Node.js 7 is not. Windows 10 build 10586 is the first Windows + // release that supports 256 colors. Windows 10 build 14931 is the first release + // that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(process.versions.node.split('.')[0]) >= 8 && + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + if (env.TERM === 'dumb') { + return min; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: getSupportLevel(process.stdout), + stderr: getSupportLevel(process.stderr) +}; diff --git a/node_modules/supports-color/license b/node_modules/supports-color/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/supports-color/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/supports-color/package.json b/node_modules/supports-color/package.json new file mode 100644 index 0000000..ad199f5 --- /dev/null +++ b/node_modules/supports-color/package.json @@ -0,0 +1,53 @@ +{ + "name": "supports-color", + "version": "5.5.0", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "browser.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m" + ], + "dependencies": { + "has-flag": "^3.0.0" + }, + "devDependencies": { + "ava": "^0.25.0", + "import-fresh": "^2.0.0", + "xo": "^0.20.0" + }, + "browser": "browser.js" +} diff --git a/node_modules/supports-color/readme.md b/node_modules/supports-color/readme.md new file mode 100644 index 0000000..f6e4019 --- /dev/null +++ b/node_modules/supports-color/readme.md @@ -0,0 +1,66 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install supports-color +``` + + +## Usage + +```js +const supportsColor = require('supports-color'); + +if (supportsColor.stdout) { + console.log('Terminal stdout supports color'); +} + +if (supportsColor.stdout.has256) { + console.log('Terminal stdout supports 256 colors'); +} + +if (supportsColor.stderr.has16m) { + console.log('Terminal stderr supports 16 million colors (truecolor)'); +} +``` + + +## API + +Returns an `Object` with a `stdout` and `stderr` property for testing either streams. Each property is an `Object`, or `false` if color is not supported. + +The `stdout`/`stderr` objects specifies a level of support for color through a `.level` property and a corresponding flag: + +- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors) +- `.level = 2` and `.has256 = true`: 256 color support +- `.level = 3` and `.has16m = true`: Truecolor support (16 million colors) + + +## Info + +It obeys the `--color` and `--no-color` CLI flags. + +Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add the environment variable `FORCE_COLOR=1` to forcefully enable color or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks. + +Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/to-regex-range/LICENSE b/node_modules/to-regex-range/LICENSE new file mode 100644 index 0000000..7cccaf9 --- /dev/null +++ b/node_modules/to-regex-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex-range/README.md b/node_modules/to-regex-range/README.md new file mode 100644 index 0000000..38887da --- /dev/null +++ b/node_modules/to-regex-range/README.md @@ -0,0 +1,305 @@ +# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) + +> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save to-regex-range +``` + +
+What does this do? + +
+ +This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. + +**Example** + +```js +const toRegexRange = require('to-regex-range'); +const regex = new RegExp(toRegexRange('15', '95')); +``` + +A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). + +
+ +
+ +
+Why use this library? + +
+ +### Convenience + +Creating regular expressions for matching numbers gets deceptively complicated pretty fast. + +For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: + +* regex for matching `1` => `/1/` (easy enough) +* regex for matching `1` through `5` => `/[1-5]/` (not bad...) +* regex for matching `1` or `5` => `/(1|5)/` (still easy...) +* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) +* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) +* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) +* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) + +The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. + +**Learn more** + +If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. + +### Heavily tested + +As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. + +Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. + +### Optimized + +Generated regular expressions are optimized: + +* duplicate sequences and character classes are reduced using quantifiers +* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative +* uses fragment caching to avoid processing the same exact string more than once + +
+ +
+ +## Usage + +Add this library to your javascript application with the following line of code + +```js +const toRegexRange = require('to-regex-range'); +``` + +The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). + +```js +const source = toRegexRange('15', '95'); +//=> 1[5-9]|[2-8][0-9]|9[0-5] + +const regex = new RegExp(`^${source}$`); +console.log(regex.test('14')); //=> false +console.log(regex.test('50')); //=> true +console.log(regex.test('94')); //=> true +console.log(regex.test('96')); //=> false +``` + +## Options + +### options.capture + +**Type**: `boolean` + +**Deafault**: `undefined` + +Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. + +```js +console.log(toRegexRange('-10', '10')); +//=> -[1-9]|-?10|[0-9] + +console.log(toRegexRange('-10', '10', { capture: true })); +//=> (-[1-9]|-?10|[0-9]) +``` + +### options.shorthand + +**Type**: `boolean` + +**Deafault**: `undefined` + +Use the regex shorthand for `[0-9]`: + +```js +console.log(toRegexRange('0', '999999')); +//=> [0-9]|[1-9][0-9]{1,5} + +console.log(toRegexRange('0', '999999', { shorthand: true })); +//=> \d|[1-9]\d{1,5} +``` + +### options.relaxZeros + +**Type**: `boolean` + +**Default**: `true` + +This option relaxes matching for leading zeros when when ranges are zero-padded. + +```js +const source = toRegexRange('-0010', '0010'); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> true +console.log(regex.test('-010')); //=> true +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> true +console.log(regex.test('010')); //=> true +console.log(regex.test('0010')); //=> true +``` + +When `relaxZeros` is false, matching is strict: + +```js +const source = toRegexRange('-0010', '0010', { relaxZeros: false }); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> false +console.log(regex.test('-010')); //=> false +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> false +console.log(regex.test('010')); //=> false +console.log(regex.test('0010')); //=> true +``` + +## Examples + +| **Range** | **Result** | **Compile time** | +| --- | --- | --- | +| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | +| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | +| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | +| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | +| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | +| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | +| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | +| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | +| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | +| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | +| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | +| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | +| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | +| `toRegexRange(5, 5)` | `5` | _8μs_ | +| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | +| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | +| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | +| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | +| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | +| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | +| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | +| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | +| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | +| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | + +## Heads up! + +**Order of arguments** + +When the `min` is larger than the `max`, values will be flipped to create a valid range: + +```js +toRegexRange('51', '29'); +``` + +Is effectively flipped to: + +```js +toRegexRange('29', '51'); +//=> 29|[3-4][0-9]|5[0-1] +``` + +**Steps / increments** + +This library does not support steps (increments). A pr to add support would be welcome. + +## History + +### v2.0.0 - 2017-04-21 + +**New features** + +Adds support for zero-padding! + +### v1.0.0 + +**Optimizations** + +Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. + +## Attribution + +Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") +* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 63 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [doowb](https://github.com/doowb) | +| 2 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/to-regex-range/index.js b/node_modules/to-regex-range/index.js new file mode 100644 index 0000000..77fbace --- /dev/null +++ b/node_modules/to-regex-range/index.js @@ -0,0 +1,288 @@ +/*! + * to-regex-range + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +const isNumber = require('is-number'); + +const toRegexRange = (min, max, options) => { + if (isNumber(min) === false) { + throw new TypeError('toRegexRange: expected the first argument to be a number'); + } + + if (max === void 0 || min === max) { + return String(min); + } + + if (isNumber(max) === false) { + throw new TypeError('toRegexRange: expected the second argument to be a number.'); + } + + let opts = { relaxZeros: true, ...options }; + if (typeof opts.strictZeros === 'boolean') { + opts.relaxZeros = opts.strictZeros === false; + } + + let relax = String(opts.relaxZeros); + let shorthand = String(opts.shorthand); + let capture = String(opts.capture); + let wrap = String(opts.wrap); + let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; + + if (toRegexRange.cache.hasOwnProperty(cacheKey)) { + return toRegexRange.cache[cacheKey].result; + } + + let a = Math.min(min, max); + let b = Math.max(min, max); + + if (Math.abs(a - b) === 1) { + let result = min + '|' + max; + if (opts.capture) { + return `(${result})`; + } + if (opts.wrap === false) { + return result; + } + return `(?:${result})`; + } + + let isPadded = hasPadding(min) || hasPadding(max); + let state = { min, max, a, b }; + let positives = []; + let negatives = []; + + if (isPadded) { + state.isPadded = isPadded; + state.maxLen = String(state.max).length; + } + + if (a < 0) { + let newMin = b < 0 ? Math.abs(b) : 1; + negatives = splitToPatterns(newMin, Math.abs(a), state, opts); + a = state.a = 0; + } + + if (b >= 0) { + positives = splitToPatterns(a, b, state, opts); + } + + state.negatives = negatives; + state.positives = positives; + state.result = collatePatterns(negatives, positives, opts); + + if (opts.capture === true) { + state.result = `(${state.result})`; + } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { + state.result = `(?:${state.result})`; + } + + toRegexRange.cache[cacheKey] = state; + return state.result; +}; + +function collatePatterns(neg, pos, options) { + let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; + let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; + let intersected = filterPatterns(neg, pos, '-?', true, options) || []; + let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); + return subpatterns.join('|'); +} + +function splitToRanges(min, max) { + let nines = 1; + let zeros = 1; + + let stop = countNines(min, nines); + let stops = new Set([max]); + + while (min <= stop && stop <= max) { + stops.add(stop); + nines += 1; + stop = countNines(min, nines); + } + + stop = countZeros(max + 1, zeros) - 1; + + while (min < stop && stop <= max) { + stops.add(stop); + zeros += 1; + stop = countZeros(max + 1, zeros) - 1; + } + + stops = [...stops]; + stops.sort(compare); + return stops; +} + +/** + * Convert a range to a regex pattern + * @param {Number} `start` + * @param {Number} `stop` + * @return {String} + */ + +function rangeToPattern(start, stop, options) { + if (start === stop) { + return { pattern: start, count: [], digits: 0 }; + } + + let zipped = zip(start, stop); + let digits = zipped.length; + let pattern = ''; + let count = 0; + + for (let i = 0; i < digits; i++) { + let [startDigit, stopDigit] = zipped[i]; + + if (startDigit === stopDigit) { + pattern += startDigit; + + } else if (startDigit !== '0' || stopDigit !== '9') { + pattern += toCharacterClass(startDigit, stopDigit, options); + + } else { + count++; + } + } + + if (count) { + pattern += options.shorthand === true ? '\\d' : '[0-9]'; + } + + return { pattern, count: [count], digits }; +} + +function splitToPatterns(min, max, tok, options) { + let ranges = splitToRanges(min, max); + let tokens = []; + let start = min; + let prev; + + for (let i = 0; i < ranges.length; i++) { + let max = ranges[i]; + let obj = rangeToPattern(String(start), String(max), options); + let zeros = ''; + + if (!tok.isPadded && prev && prev.pattern === obj.pattern) { + if (prev.count.length > 1) { + prev.count.pop(); + } + + prev.count.push(obj.count[0]); + prev.string = prev.pattern + toQuantifier(prev.count); + start = max + 1; + continue; + } + + if (tok.isPadded) { + zeros = padZeros(max, tok, options); + } + + obj.string = zeros + obj.pattern + toQuantifier(obj.count); + tokens.push(obj); + start = max + 1; + prev = obj; + } + + return tokens; +} + +function filterPatterns(arr, comparison, prefix, intersection, options) { + let result = []; + + for (let ele of arr) { + let { string } = ele; + + // only push if _both_ are negative... + if (!intersection && !contains(comparison, 'string', string)) { + result.push(prefix + string); + } + + // or _both_ are positive + if (intersection && contains(comparison, 'string', string)) { + result.push(prefix + string); + } + } + return result; +} + +/** + * Zip strings + */ + +function zip(a, b) { + let arr = []; + for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); + return arr; +} + +function compare(a, b) { + return a > b ? 1 : b > a ? -1 : 0; +} + +function contains(arr, key, val) { + return arr.some(ele => ele[key] === val); +} + +function countNines(min, len) { + return Number(String(min).slice(0, -len) + '9'.repeat(len)); +} + +function countZeros(integer, zeros) { + return integer - (integer % Math.pow(10, zeros)); +} + +function toQuantifier(digits) { + let [start = 0, stop = ''] = digits; + if (stop || start > 1) { + return `{${start + (stop ? ',' + stop : '')}}`; + } + return ''; +} + +function toCharacterClass(a, b, options) { + return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; +} + +function hasPadding(str) { + return /^-?(0+)\d/.test(str); +} + +function padZeros(value, tok, options) { + if (!tok.isPadded) { + return value; + } + + let diff = Math.abs(tok.maxLen - String(value).length); + let relax = options.relaxZeros !== false; + + switch (diff) { + case 0: + return ''; + case 1: + return relax ? '0?' : '0'; + case 2: + return relax ? '0{0,2}' : '00'; + default: { + return relax ? `0{0,${diff}}` : `0{${diff}}`; + } + } +} + +/** + * Cache + */ + +toRegexRange.cache = {}; +toRegexRange.clearCache = () => (toRegexRange.cache = {}); + +/** + * Expose `toRegexRange` + */ + +module.exports = toRegexRange; diff --git a/node_modules/to-regex-range/package.json b/node_modules/to-regex-range/package.json new file mode 100644 index 0000000..4ef194f --- /dev/null +++ b/node_modules/to-regex-range/package.json @@ -0,0 +1,88 @@ +{ + "name": "to-regex-range", + "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", + "version": "5.0.1", + "homepage": "https://github.com/micromatch/to-regex-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "micromatch/to-regex-range", + "bugs": { + "url": "https://github.com/micromatch/to-regex-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^7.0.0" + }, + "devDependencies": { + "fill-range": "^6.0.0", + "gulp-format-md": "^2.0.0", + "mocha": "^6.0.2", + "text-table": "^0.2.0", + "time-diff": "^0.3.1" + }, + "keywords": [ + "bash", + "date", + "expand", + "expansion", + "expression", + "glob", + "match", + "match date", + "match number", + "match numbers", + "match year", + "matches", + "matching", + "number", + "numbers", + "numerical", + "range", + "ranges", + "regex", + "regexp", + "regular", + "regular expression", + "sequence" + ], + "verb": { + "layout": "default", + "toc": false, + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "helpers": { + "examples": { + "displayName": "examples" + } + }, + "related": { + "list": [ + "expand-range", + "fill-range", + "micromatch", + "repeat-element", + "repeat-string" + ] + } + } +} diff --git a/node_modules/touch/LICENSE b/node_modules/touch/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/touch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/touch/README.md b/node_modules/touch/README.md new file mode 100644 index 0000000..b5a361e --- /dev/null +++ b/node_modules/touch/README.md @@ -0,0 +1,52 @@ +# node-touch + +For all your node touching needs. + +## Installing + +```bash +npm install touch +``` + +## CLI Usage: + +See `man touch` + +This package exports a binary called `nodetouch` that works mostly +like the unix builtin `touch(1)`. + +## API Usage: + +```javascript +var touch = require("touch") +``` + +Gives you the following functions: + +* `touch(filename, options, cb)` +* `touch.sync(filename, options)` +* `touch.ftouch(fd, options, cb)` +* `touch.ftouchSync(fd, options)` + +All the `options` objects are optional. + +All the async functions return a Promise. If a callback function is +provided, then it's attached to the Promise. + +## Options + +* `force` like `touch -f` Boolean +* `time` like `touch -t ` Can be a Date object, or any parseable + Date string, or epoch ms number. +* `atime` like `touch -a` Can be either a Boolean, or a Date. +* `mtime` like `touch -m` Can be either a Boolean, or a Date. +* `ref` like `touch -r ` Must be path to a file. +* `nocreate` like `touch -c` Boolean + +If neither `atime` nor `mtime` are set, then both values are set. If +one of them is set, then the other is not. + +## cli + +This package creates a `nodetouch` command line executable that works +very much like the unix builtin `touch(1)` diff --git a/node_modules/touch/bin/nodetouch.js b/node_modules/touch/bin/nodetouch.js new file mode 100755 index 0000000..f78f082 --- /dev/null +++ b/node_modules/touch/bin/nodetouch.js @@ -0,0 +1,112 @@ +#!/usr/bin/env node +const touch = require("../index.js") + +const usage = code => { + console[code ? 'error' : 'log']( + 'usage:\n' + + 'touch [-acfm] [-r file] [-t [[CC]YY]MMDDhhmm[.SS]] file ...' + ) + process.exit(code) +} + +const singleFlags = { + a: 'atime', + m: 'mtime', + c: 'nocreate', + f: 'force' +} + +const singleOpts = { + r: 'ref', + t: 'time' +} + +const files = [] +const args = process.argv.slice(2) +const options = {} +for (let i = 0; i < args.length; i++) { + const arg = args[i] + if (!arg.match(/^-/)) { + files.push(arg) + continue + } + + // expand shorthands + if (arg.charAt(1) !== '-') { + const expand = [] + for (let f = 1; f < arg.length; f++) { + const fc = arg.charAt(f) + const sf = singleFlags[fc] + const so = singleOpts[fc] + if (sf) + expand.push('--' + sf) + else if (so) { + const soslice = arg.slice(f + 1) + const soval = soslice.charAt(0) === '=' ? soslice : '=' + soslice + expand.push('--' + so + soval) + f = arg.length + } else if (arg !== '-' + fc) + expand.push('-' + fc) + } + if (expand.length) { + args.splice.apply(args, [i, 1].concat(expand)) + i-- + continue + } + } + + const argsplit = arg.split('=') + const key = argsplit.shift().replace(/^\-\-/, '') + const val = argsplit.length ? argsplit.join('=') : null + + switch (key) { + case 'time': + const timestr = val || args[++i] + // [-t [[CC]YY]MMDDhhmm[.SS]] + const parsedtime = timestr.match( + /^(([0-9]{2})?([0-9]{2}))?([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})(\.([0-9]{2}))?$/ + ) + if (!parsedtime) { + console.error('touch: out of range or illegal ' + + 'time specification: ' + + '[[CC]YY]MMDDhhmm[.SS]') + process.exit(1) + } else { + const y = +parsedtime[1] + const year = parsedtime[2] ? y + : y <= 68 ? 2000 + y + : 1900 + y + + const MM = +parsedtime[4] - 1 + const dd = +parsedtime[5] + const hh = +parsedtime[6] + const mm = +parsedtime[7] + const ss = +parsedtime[8] + + options.time = new Date(Date.UTC(year, MM, dd, hh, mm, ss)) + } + continue + + case 'ref': + options.ref = val || args[++i] + continue + + case 'mtime': + case 'nocreate': + case 'atime': + case 'force': + options[key] = true + continue + + default: + console.error('touch: illegal option -- ' + arg) + usage(1) + } +} + +if (!files.length) + usage() + +process.exitCode = 0 +Promise.all(files.map(f => touch(f, options))) + .catch(er => process.exitCode = 1) diff --git a/node_modules/touch/index.js b/node_modules/touch/index.js new file mode 100644 index 0000000..fa6a8d7 --- /dev/null +++ b/node_modules/touch/index.js @@ -0,0 +1,224 @@ +'use strict' + +const EE = require('events').EventEmitter +const cons = require('constants') +const fs = require('fs') + +module.exports = (f, options, cb) => { + if (typeof options === 'function') + cb = options, options = {} + + const p = new Promise((res, rej) => { + new Touch(validOpts(options, f, null)) + .on('done', res).on('error', rej) + }) + + return cb ? p.then(res => cb(null, res), cb) : p +} + +module.exports.sync = module.exports.touchSync = (f, options) => + (new TouchSync(validOpts(options, f, null)), undefined) + +module.exports.ftouch = (fd, options, cb) => { + if (typeof options === 'function') + cb = options, options = {} + + const p = new Promise((res, rej) => { + new Touch(validOpts(options, null, fd)) + .on('done', res).on('error', rej) + }) + + return cb ? p.then(res => cb(null, res), cb) : p +} + +module.exports.ftouchSync = (fd, opt) => + (new TouchSync(validOpts(opt, null, fd)), undefined) + +const validOpts = (options, path, fd) => { + options = Object.create(options || {}) + options.fd = fd + options.path = path + + // {mtime: true}, {ctime: true} + // If set to something else, then treat as epoch ms value + const now = new Date(options.time || Date.now()).getTime() / 1000 + if (!options.atime && !options.mtime) + options.atime = options.mtime = now + else { + if (true === options.atime) + options.atime = now + + if (true === options.mtime) + options.mtime = now + } + + let oflags = 0 + if (!options.force) + oflags = oflags | cons.O_RDWR + + if (!options.nocreate) + oflags = oflags | cons.O_CREAT + + options.oflags = oflags + return options +} + +class Touch extends EE { + constructor (options) { + super(options) + this.fd = options.fd + this.path = options.path + this.atime = options.atime + this.mtime = options.mtime + this.ref = options.ref + this.nocreate = !!options.nocreate + this.force = !!options.force + this.closeAfter = options.closeAfter + this.oflags = options.oflags + this.options = options + + if (typeof this.fd !== 'number') { + this.closeAfter = true + this.open() + } else + this.onopen(null, this.fd) + } + + emit (ev, data) { + // we only emit when either done or erroring + // in both cases, need to close + this.close() + return super.emit(ev, data) + } + + close () { + if (typeof this.fd === 'number' && this.closeAfter) + fs.close(this.fd, () => {}) + } + + open () { + fs.open(this.path, this.oflags, (er, fd) => this.onopen(er, fd)) + } + + onopen (er, fd) { + if (er) { + if (er.code === 'EISDIR') + this.onopen(null, null) + else if (er.code === 'ENOENT' && this.nocreate) + this.emit('done') + else + this.emit('error', er) + } else { + this.fd = fd + if (this.ref) + this.statref() + else if (!this.atime || !this.mtime) + this.fstat() + else + this.futimes() + } + } + + statref () { + fs.stat(this.ref, (er, st) => { + if (er) + this.emit('error', er) + else + this.onstatref(st) + }) + } + + onstatref (st) { + this.atime = this.atime && st.atime.getTime()/1000 + this.mtime = this.mtime && st.mtime.getTime()/1000 + if (!this.atime || !this.mtime) + this.fstat() + else + this.futimes() + } + + fstat () { + const stat = this.fd ? 'fstat' : 'stat' + const target = this.fd || this.path + fs[stat](target, (er, st) => { + if (er) + this.emit('error', er) + else + this.onfstat(st) + }) + } + + onfstat (st) { + if (typeof this.atime !== 'number') + this.atime = st.atime.getTime()/1000 + + if (typeof this.mtime !== 'number') + this.mtime = st.mtime.getTime()/1000 + + this.futimes() + } + + futimes () { + const utimes = this.fd ? 'futimes' : 'utimes' + const target = this.fd || this.path + fs[utimes](target, ''+this.atime, ''+this.mtime, er => { + if (er) + this.emit('error', er) + else + this.emit('done') + }) + } +} + +class TouchSync extends Touch { + open () { + try { + this.onopen(null, fs.openSync(this.path, this.oflags)) + } catch (er) { + this.onopen(er) + } + } + + statref () { + let threw = true + try { + this.onstatref(fs.statSync(this.ref)) + threw = false + } finally { + if (threw) + this.close() + } + } + + fstat () { + let threw = true + const stat = this.fd ? 'fstatSync' : 'statSync' + const target = this.fd || this.path + try { + this.onfstat(fs[stat](target)) + threw = false + } finally { + if (threw) + this.close() + } + } + + futimes () { + let threw = true + const utimes = this.fd ? 'futimesSync' : 'utimesSync' + const target = this.fd || this.path + try { + fs[utimes](target, this.atime, this.mtime) + threw = false + } finally { + if (threw) + this.close() + } + this.emit('done') + } + + close () { + if (typeof this.fd === 'number' && this.closeAfter) + try { fs.closeSync(this.fd) } catch (er) {} + } +} diff --git a/node_modules/touch/package.json b/node_modules/touch/package.json new file mode 100644 index 0000000..a51c29b --- /dev/null +++ b/node_modules/touch/package.json @@ -0,0 +1,25 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "touch", + "description": "like touch(1) in node", + "version": "3.1.1", + "repository": "git://github.com/isaacs/node-touch.git", + "bin": { + "nodetouch": "./bin/nodetouch.js" + }, + "license": "ISC", + "scripts": { + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "devDependencies": { + "mutate-fs": "^1.1.0", + "tap": "^10.7.0" + }, + "files": [ + "index.js", + "bin/nodetouch.js" + ] +} diff --git a/node_modules/undefsafe/.github/workflows/release.yml b/node_modules/undefsafe/.github/workflows/release.yml new file mode 100644 index 0000000..e6ee886 --- /dev/null +++ b/node_modules/undefsafe/.github/workflows/release.yml @@ -0,0 +1,25 @@ +name: Release +on: + push: + branches: + - master +jobs: + release: + name: Release + runs-on: ubuntu-18.04 + steps: + - name: Checkout + uses: actions/checkout@v1 + - name: Setup Node.js + uses: actions/setup-node@v1 + with: + node-version: 16 + - name: Install dependencies + run: npm ci + - name: Test + run: npm run test + - name: Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npx semantic-release diff --git a/node_modules/undefsafe/.jscsrc b/node_modules/undefsafe/.jscsrc new file mode 100644 index 0000000..9e01c9b --- /dev/null +++ b/node_modules/undefsafe/.jscsrc @@ -0,0 +1,13 @@ +{ + "preset": "node-style-guide", + "requireCapitalizedComments": null, + "requireSpacesInAnonymousFunctionExpression": { + "beforeOpeningCurlyBrace": true, + "beforeOpeningRoundBrace": true + }, + "disallowSpacesInNamedFunctionExpression": { + "beforeOpeningRoundBrace": true + }, + "excludeFiles": ["node_modules/**"], + "disallowSpacesInFunction": null +} diff --git a/node_modules/undefsafe/.jshintrc b/node_modules/undefsafe/.jshintrc new file mode 100644 index 0000000..b47f672 --- /dev/null +++ b/node_modules/undefsafe/.jshintrc @@ -0,0 +1,16 @@ +{ + "browser": false, + "camelcase": true, + "curly": true, + "devel": true, + "eqeqeq": true, + "forin": true, + "indent": 2, + "noarg": true, + "node": true, + "quotmark": "single", + "undef": true, + "strict": false, + "unused": true +} + diff --git a/node_modules/undefsafe/.travis.yml b/node_modules/undefsafe/.travis.yml new file mode 100644 index 0000000..a1ace24 --- /dev/null +++ b/node_modules/undefsafe/.travis.yml @@ -0,0 +1,18 @@ +sudo: false +language: node_js +cache: + directories: + - node_modules +notifications: + email: false +node_js: + - '4' +before_install: + - npm i -g npm@^2.0.0 +before_script: + - npm prune +after_success: + - npm run semantic-release +branches: + except: + - "/^v\\d+\\.\\d+\\.\\d+$/" diff --git a/node_modules/undefsafe/LICENSE b/node_modules/undefsafe/LICENSE new file mode 100644 index 0000000..caaf03a --- /dev/null +++ b/node_modules/undefsafe/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright © 2016 Remy Sharp, http://remysharp.com + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/undefsafe/README.md b/node_modules/undefsafe/README.md new file mode 100644 index 0000000..46a706b --- /dev/null +++ b/node_modules/undefsafe/README.md @@ -0,0 +1,63 @@ +# undefsafe + +Simple *function* for retrieving deep object properties without getting "Cannot read property 'X' of undefined" + +Can also be used to safely set deep values. + +## Usage + +```js +var object = { + a: { + b: { + c: 1, + d: [1,2,3], + e: 'remy' + } + } +}; + +console.log(undefsafe(object, 'a.b.e')); // "remy" +console.log(undefsafe(object, 'a.b.not.found')); // undefined +``` + +Demo: [https://jsbin.com/eroqame/3/edit?js,console](https://jsbin.com/eroqame/3/edit?js,console) + +## Setting + +```js +var object = { + a: { + b: [1,2,3] + } +}; + +// modified object +var res = undefsafe(object, 'a.b.0', 10); + +console.log(object); // { a: { b: [10, 2, 3] } } +console.log(res); // 1 - previous value +``` + +## Star rules in paths + +As of 1.2.0, `undefsafe` supports a `*` in the path if you want to search all of the properties (or array elements) for a particular element. + +The function will only return a single result, either the 3rd argument validation value, or the first positive match. For example, the following github data: + +```js +const githubData = { + commits: [{ + modified: [ + "one", + "two" + ] + }, /* ... */ ] + }; + +// first modified file found in the first commit +console.log(undefsafe(githubData, 'commits.*.modified.0')); + +// returns `two` or undefined if not found +console.log(undefsafe(githubData, 'commits.*.modified.*', 'two')); +``` diff --git a/node_modules/undefsafe/example.js b/node_modules/undefsafe/example.js new file mode 100644 index 0000000..ed93c23 --- /dev/null +++ b/node_modules/undefsafe/example.js @@ -0,0 +1,14 @@ +var undefsafe = require('undefsafe'); + +var object = { + a: { + b: { + c: 1, + d: [1, 2, 3], + e: 'remy' + } + } +}; + +console.log(undefsafe(object, 'a.b.e')); // "remy" +console.log(undefsafe(object, 'a.b.not.found')); // undefined diff --git a/node_modules/undefsafe/lib/undefsafe.js b/node_modules/undefsafe/lib/undefsafe.js new file mode 100644 index 0000000..7446878 --- /dev/null +++ b/node_modules/undefsafe/lib/undefsafe.js @@ -0,0 +1,125 @@ +'use strict'; + +function undefsafe(obj, path, value, __res) { + // I'm not super keen on this private function, but it's because + // it'll also be use in the browser and I wont *one* function exposed + function split(path) { + var res = []; + var level = 0; + var key = ''; + + for (var i = 0; i < path.length; i++) { + var c = path.substr(i, 1); + + if (level === 0 && (c === '.' || c === '[')) { + if (c === '[') { + level++; + i++; + c = path.substr(i, 1); + } + + if (key) { + // the first value could be a string + res.push(key); + } + key = ''; + continue; + } + + if (c === ']') { + level--; + key = key.slice(0, -1); + continue; + } + + key += c; + } + + res.push(key); + + return res; + } + + // bail if there's nothing + if (obj === undefined || obj === null) { + return undefined; + } + + var parts = split(path); + var key = null; + var type = typeof obj; + var root = obj; + var parent = obj; + + var star = + parts.filter(function(_) { + return _ === '*'; + }).length > 0; + + // we're dealing with a primitive + if (type !== 'object' && type !== 'function') { + return obj; + } else if (path.trim() === '') { + return obj; + } + + key = parts[0]; + var i = 0; + for (; i < parts.length; i++) { + key = parts[i]; + parent = obj; + + if (key === '*') { + // loop through each property + var prop = ''; + var res = __res || []; + + for (prop in parent) { + var shallowObj = undefsafe( + obj[prop], + parts.slice(i + 1).join('.'), + value, + res + ); + if (shallowObj && shallowObj !== res) { + if ((value && shallowObj === value) || value === undefined) { + if (value !== undefined) { + return shallowObj; + } + + res.push(shallowObj); + } + } + } + + if (res.length === 0) { + return undefined; + } + + return res; + } + + if (Object.getOwnPropertyNames(obj).indexOf(key) == -1) { + return undefined; + } + + obj = obj[key]; + if (obj === undefined || obj === null) { + break; + } + } + + // if we have a null object, make sure it's the one the user was after, + // if it's not (i.e. parts has a length) then give undefined back. + if (obj === null && i !== parts.length - 1) { + obj = undefined; + } else if (!star && value) { + key = path.split('.').pop(); + parent[key] = value; + } + return obj; +} + +if (typeof module !== 'undefined') { + module.exports = undefsafe; +} diff --git a/node_modules/undefsafe/package.json b/node_modules/undefsafe/package.json new file mode 100644 index 0000000..a454233 --- /dev/null +++ b/node_modules/undefsafe/package.json @@ -0,0 +1,34 @@ +{ + "name": "undefsafe", + "description": "Undefined safe way of extracting object properties", + "main": "lib/undefsafe.js", + "tonicExampleFilename": "example.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "tap test/**/*.test.js -R spec", + "cover": "tap test/*.test.js --cov --coverage-report=lcov", + "semantic-release": "semantic-release" + }, + "prettier": { + "trailingComma": "none", + "singleQuote": true + }, + "repository": { + "type": "git", + "url": "https://github.com/remy/undefsafe.git" + }, + "keywords": [ + "undefined" + ], + "author": "Remy Sharp", + "license": "MIT", + "devDependencies": { + "semantic-release": "^18.0.0", + "tap": "^5.7.1", + "tap-only": "0.0.5" + }, + "dependencies": {}, + "version": "2.0.5" +} diff --git a/node_modules/xtend/.jshintrc b/node_modules/xtend/.jshintrc new file mode 100644 index 0000000..77887b5 --- /dev/null +++ b/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/node_modules/xtend/LICENSE b/node_modules/xtend/LICENSE new file mode 100644 index 0000000..0099f4f --- /dev/null +++ b/node_modules/xtend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xtend/README.md b/node_modules/xtend/README.md new file mode 100644 index 0000000..4a2703c --- /dev/null +++ b/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: "c" +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licensed + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/node_modules/xtend/immutable.js b/node_modules/xtend/immutable.js new file mode 100644 index 0000000..94889c9 --- /dev/null +++ b/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/mutable.js b/node_modules/xtend/mutable.js new file mode 100644 index 0000000..72debed --- /dev/null +++ b/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/package.json b/node_modules/xtend/package.json new file mode 100644 index 0000000..f7a39d1 --- /dev/null +++ b/node_modules/xtend/package.json @@ -0,0 +1,55 @@ +{ + "name": "xtend", + "version": "4.0.2", + "description": "extend like a boss", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "author": "Raynos ", + "repository": "git://github.com/Raynos/xtend.git", + "main": "immutable", + "scripts": { + "test": "node test" + }, + "dependencies": {}, + "devDependencies": { + "tape": "~1.1.0" + }, + "homepage": "https://github.com/Raynos/xtend", + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "engines": { + "node": ">=0.4" + } +} diff --git a/node_modules/xtend/test.js b/node_modules/xtend/test.js new file mode 100644 index 0000000..b895b42 --- /dev/null +++ b/node_modules/xtend/test.js @@ -0,0 +1,103 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("prototype pollution", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + extend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) + +test("prototype pollution mutable", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + mutableExtend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..a968e96 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,462 @@ +{ + "name": "dh", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "dh", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "nodemon": "^3.1.4", + "pg": "^8.12.0" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/nodemon": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.4.tgz", + "integrity": "sha512-wjPBbFhtpJwmIeY2yP7QF+UKzPfltVGtfce1g/bB15/8vCGZj8uxD62b/b9M9/WVgme0NZudpownKN+c0plXlQ==", + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^4", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pg": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.12.0.tgz", + "integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==", + "dependencies": { + "pg-connection-string": "^2.6.4", + "pg-pool": "^3.6.2", + "pg-protocol": "^1.6.1", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.1.1" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz", + "integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.4.tgz", + "integrity": "sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.2.tgz", + "integrity": "sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.1.tgz", + "integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==" + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-update-notifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/touch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", + "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..ae0bef5 --- /dev/null +++ b/package.json @@ -0,0 +1,16 @@ +{ + "name": "dh", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "dev": "nodemon src/index.js" + }, + "author": "Leca", + "license": "ISC", + "dependencies": { + "nodemon": "^3.1.4", + "pg": "^8.12.0" + } +} diff --git a/root.crt b/root.crt new file mode 100644 index 0000000..cd9e9e3 --- /dev/null +++ b/root.crt @@ -0,0 +1,59 @@ +-----BEGIN CERTIFICATE----- +MIIE3TCCAsWgAwIBAgIKPxb5sAAAAAAAFzANBgkqhkiG9w0BAQ0FADAfMR0wGwYD +VQQDExRZYW5kZXhJbnRlcm5hbFJvb3RDQTAeFw0xNzA2MjAxNjQ0MzdaFw0yNzA2 +MjAxNjU0MzdaMFUxEjAQBgoJkiaJk/IsZAEZFgJydTEWMBQGCgmSJomT8ixkARkW +BnlhbmRleDESMBAGCgmSJomT8ixkARkWAmxkMRMwEQYDVQQDEwpZYW5kZXhDTENB +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqgNnjk0JKPcbsk1+KG2t +eM1AfMnEe5RkAJuBBuwVV49snhcvO1jhKBx/pCnjr6biICc1/oAFDVgU8yVYYPwp +WZ2vH3ZtscjJ/RAT/NS9OKKG7kKknhFhVYxua5xhoIQmm6usBNYYiTcWoFm1eHC8 +I9oddOLSscZYbh3unVRvt+3V+drVmUx9oSUKpqMgfysiv1MN6zB3vq9TFkbhz53E +k0tEcV+W2NnDaeFhLKy284FDKLvOdTDj1EDsSAihxl7sNEKpupNuhgyy2siOqUb+ +d5mO/CRfaAKGg3E6hDM3pEi48E506dJdjPXWfHKSvuguMLRlb2RWdVocRZuyWxOh +0QIDAQABo4HkMIHhMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRMU5uItjx+ +TOicX1+ovC1Xq2PSnzAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8E +BAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSrucX/oe/mUx0zOSKE +0XbUN04tajBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8vY3Jscy55YW5kZXgucnUv +WWFuZGV4SW50ZXJuYWxSb290Q0EvWWFuZGV4SW50ZXJuYWxSb290Q0EuY3JsMA0G +CSqGSIb3DQEBDQUAA4ICAQAsR5Lb4Pv2FD0Kk+4oc1GEOnehxKLsQtdV81nrU+IV +l9pr2oNMdi8lwIolvHZRllLM4Ba5AcRH6YJ5fe7AjKm+5EdSkhqVWo2UOllRCbtS +wmL50+erOAkxstSlRkO6b8x1L0MOBKv54E5YcQ/Wwt27ldSb6RkEmJBGvmxObAaf +5zc51pqSqao9tnldYaCblEQ/Zmy43FliIpa2eUJoh8DqK8bVo2gcI3wbQ32tWs9u +wvKk8fo4lAdhCwhv+QHuqau1VAY9hPU106bsFIDUmijTMxjAobKBi6CkIX6EbNHU +Jv4DzYVLlDd2y0CADdn2F6I70xpCBn5cquSGuvFbqZjQDmIHwb7WQSxadkiGRWfc +zVTnmiHjJONJJIpE2t+FOV3hc+8o98OzOtNaH2QQ9j6dnKvtIGKGFeNSDp0vXPOi +QhHiIyuB7eWx+g2whktQ74UCpGDSXYnEW3s8w5wezVWIEmouq7q4rCEkTNvJ7Ico +43AgUdPzAFS2zYktw1C+cbUALM8smvXbXrXOBzMmscjIhtXvLMrpPeh23VfdJfQB +0rN2BmRCLUE8JOV+o0k98XMm83oN+lGkL1l+hyoj3ok1uI3JrsWOcDyjOds3ptcN +KimJLm27ndjcxDNo/iA6gefMJuCxFRaqI+eF4P0jSkMgnnQqZkvLGFuHCw8eRDhm +bw== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIFGTCCAwGgAwIBAgIQJMM7ZIy2SYxCBgK7WcFwnjANBgkqhkiG9w0BAQ0FADAf +MR0wGwYDVQQDExRZYW5kZXhJbnRlcm5hbFJvb3RDQTAeFw0xMzAyMTExMzQxNDNa +Fw0zMzAyMTExMzUxNDJaMB8xHTAbBgNVBAMTFFlhbmRleEludGVybmFsUm9vdENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAgb4xoQjBQ7oEFk8EHVGy +1pDEmPWw0Wgw5nX9RM7LL2xQWyUuEq+Lf9Dgh+O725aZ9+SO2oEs47DHHt81/fne +5N6xOftRrCpy8hGtUR/A3bvjnQgjs+zdXvcO9cTuuzzPTFSts/iZATZsAruiepMx +SGj9S1fGwvYws/yiXWNoNBz4Tu1Tlp0g+5fp/ADjnxc6DqNk6w01mJRDbx+6rlBO +aIH2tQmJXDVoFdrhmBK9qOfjxWlIYGy83TnrvdXwi5mKTMtpEREMgyNLX75UjpvO +NkZgBvEXPQq+g91wBGsWIE2sYlguXiBniQgAJOyRuSdTxcJoG8tZkLDPRi5RouWY +gxXr13edn1TRDGco2hkdtSUBlajBMSvAq+H0hkslzWD/R+BXkn9dh0/DFnxVt4XU +5JbFyd/sKV/rF4Vygfw9ssh1ZIWdqkfZ2QXOZ2gH4AEeoN/9vEfUPwqPVzL0XEZK +r4s2WjU9mE5tHrVsQOZ80wnvYHYi2JHbl0hr5ghs4RIyJwx6LEEnj2tzMFec4f7o +dQeSsZpgRJmpvpAfRTxhIRjZBrKxnMytedAkUPguBQwjVCn7+EaKiJfpu42JG8Mm ++/dHi+Q9Tc+0tX5pKOIpQMlMxMHw8MfPmUjC3AAd9lsmCtuybYoeN2IRdbzzchJ8 +l1ZuoI3gH7pcIeElfVSqSBkCAwEAAaNRME8wCwYDVR0PBAQDAgGGMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFKu5xf+h7+ZTHTM5IoTRdtQ3Ti1qMBAGCSsGAQQB +gjcVAQQDAgEAMA0GCSqGSIb3DQEBDQUAA4ICAQAVpyJ1qLjqRLC34F1UXkC3vxpO +nV6WgzpzA+DUNog4Y6RhTnh0Bsir+I+FTl0zFCm7JpT/3NP9VjfEitMkHehmHhQK +c7cIBZSF62K477OTvLz+9ku2O/bGTtYv9fAvR4BmzFfyPDoAKOjJSghD1p/7El+1 +eSjvcUBzLnBUtxO/iYXRNo7B3+1qo4F5Hz7rPRLI0UWW/0UAfVCO2fFtyF6C1iEY +/q0Ldbf3YIaMkf2WgGhnX9yH/8OiIij2r0LVNHS811apyycjep8y/NkG4q1Z9jEi +VEX3P6NEL8dWtXQlvlNGMcfDT3lmB+tS32CPEUwce/Ble646rukbERRwFfxXojpf +C6ium+LtJc7qnK6ygnYF4D6mz4H+3WaxJd1S1hGQxOb/3WVw63tZFnN62F6/nc5g +6T44Yb7ND6y3nVcygLpbQsws6HsjX65CoSjrrPn0YhKxNBscF7M7tLTW/5LK9uhk +yjRCkJ0YagpeLxfV1l1ZJZaTPZvY9+ylHnWHhzlq0FzcrooSSsp4i44DB2K7O2ID +87leymZkKUY6PMDa4GkDJx0dG4UXDhRETMf+NkYgtLJ+UIzMNskwVDcxO4kVL+Hi +Pj78bnC5yCw8P5YylR45LdxLzLO68unoXOyFz1etGXzszw8lJI9LNubYxk77mK8H +LpuQKbSbIERsmR+QqQ== +-----END CERTIFICATE----- diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000..550ac8a --- /dev/null +++ b/src/index.js @@ -0,0 +1,43 @@ +const fs = require('fs'); +const pg = require('pg'); + +//Did this on local machine, so no yandex postgres server :) +const pg_config = { + connectionString: "postgres://work:123@localhost/work", + // ssl: { + // rejectUnauthorized: true, + // ca: fs + // .readFileSync("./root.crt") + // .toString(), + + // } +} + +const conn = new pg.Client(pg_config); + +conn.connect((err) => { + if (err) throw err; +}); + +const start = async () => { + + conn.query(` + CREATE TABLE IF NOT EXISTS lecam ( + ID SERIAL, + name TEXT, + data JSONB + ); + `); + + let pages = (await (await fetch('https://rickandmortyapi.com/api/character')).json()).info.pages; + + for (let i = 0; i < pages; i ++) { + let data_from_page = (await (await fetch(`https://rickandmortyapi.com/api/character?page=${i}`)).json()).results + data_from_page.forEach(character => { + conn.query(`INSERT INTO lecam (name, data) VALUES ($1::text, $2::jsonb)`, [character.name, character]) + }); + } + console.log("Done.") +} + +start() \ No newline at end of file