From a8b8883b11297960768b81b3ff12fde4e268129e Mon Sep 17 00:00:00 2001 From: Aravind142857 Date: Fri, 9 Jun 2023 20:08:47 -0500 Subject: [PATCH] Node JS version --- index.js | 67 - node_modules/.bin/ejs | 1 + node_modules/.bin/jake | 1 + node_modules/.bin/mime | 1 + node_modules/.package-lock.json | 820 +- node_modules/@types/long/LICENSE | 21 + node_modules/@types/long/README.md | 16 + node_modules/@types/long/index.d.ts | 389 + node_modules/@types/long/package.json | 25 + node_modules/accepts/HISTORY.md | 243 + node_modules/accepts/LICENSE | 23 + node_modules/accepts/README.md | 140 + node_modules/accepts/index.js | 238 + node_modules/accepts/package.json | 47 + node_modules/adm-zip/LICENSE | 21 + node_modules/adm-zip/README.md | 65 + node_modules/adm-zip/adm-zip.js | 786 ++ node_modules/adm-zip/headers/entryHeader.js | 338 + node_modules/adm-zip/headers/index.js | 2 + node_modules/adm-zip/headers/mainHeader.js | 130 + node_modules/adm-zip/methods/deflater.js | 33 + node_modules/adm-zip/methods/index.js | 3 + node_modules/adm-zip/methods/inflater.js | 31 + node_modules/adm-zip/methods/zipcrypto.js | 170 + node_modules/adm-zip/package.json | 48 + node_modules/adm-zip/util/constants.js | 142 + node_modules/adm-zip/util/errors.js | 35 + node_modules/adm-zip/util/fattr.js | 79 + node_modules/adm-zip/util/fileSystem.js | 11 + node_modules/adm-zip/util/index.js | 4 + node_modules/adm-zip/util/utils.js | 247 + node_modules/adm-zip/zipEntry.js | 333 + node_modules/adm-zip/zipFile.js | 384 + node_modules/ansi-styles/index.d.ts | 345 + node_modules/ansi-styles/index.js | 163 + node_modules/ansi-styles/license | 9 + node_modules/ansi-styles/package.json | 56 + node_modules/ansi-styles/readme.md | 152 + node_modules/array-flatten/LICENSE | 21 + node_modules/array-flatten/README.md | 43 + node_modules/array-flatten/array-flatten.js | 64 + node_modules/array-flatten/package.json | 39 + node_modules/async/CHANGELOG.md | 348 + node_modules/async/LICENSE | 19 + node_modules/async/README.md | 59 + node_modules/async/all.js | 119 + node_modules/async/allLimit.js | 46 + node_modules/async/allSeries.js | 45 + node_modules/async/any.js | 122 + node_modules/async/anyLimit.js | 47 + node_modules/async/anySeries.js | 46 + node_modules/async/apply.js | 55 + node_modules/async/applyEach.js | 57 + node_modules/async/applyEachSeries.js | 37 + node_modules/async/asyncify.js | 118 + node_modules/async/auto.js | 333 + node_modules/async/autoInject.js | 182 + node_modules/async/bower.json | 17 + node_modules/async/cargo.js | 63 + node_modules/async/cargoQueue.js | 71 + node_modules/async/compose.js | 55 + node_modules/async/concat.js | 115 + node_modules/async/concatLimit.js | 60 + node_modules/async/concatSeries.js | 41 + node_modules/async/constant.js | 55 + node_modules/async/detect.js | 96 + node_modules/async/detectLimit.js | 48 + node_modules/async/detectSeries.js | 47 + node_modules/async/dir.js | 43 + node_modules/async/dist/async.js | 6059 ++++++++++++ node_modules/async/dist/async.min.js | 1 + node_modules/async/dist/async.mjs | 5947 ++++++++++++ node_modules/async/doDuring.js | 68 + node_modules/async/doUntil.js | 46 + node_modules/async/doWhilst.js | 68 + node_modules/async/during.js | 78 + node_modules/async/each.js | 129 + node_modules/async/eachLimit.js | 50 + node_modules/async/eachOf.js | 185 + node_modules/async/eachOfLimit.js | 47 + node_modules/async/eachOfSeries.js | 39 + node_modules/async/eachSeries.js | 44 + node_modules/async/ensureAsync.js | 67 + node_modules/async/every.js | 119 + node_modules/async/everyLimit.js | 46 + node_modules/async/everySeries.js | 45 + node_modules/async/filter.js | 93 + node_modules/async/filterLimit.js | 45 + node_modules/async/filterSeries.js | 43 + node_modules/async/find.js | 96 + node_modules/async/findLimit.js | 48 + node_modules/async/findSeries.js | 47 + node_modules/async/flatMap.js | 115 + node_modules/async/flatMapLimit.js | 60 + node_modules/async/flatMapSeries.js | 41 + node_modules/async/foldl.js | 153 + node_modules/async/foldr.js | 41 + node_modules/async/forEach.js | 129 + node_modules/async/forEachLimit.js | 50 + node_modules/async/forEachOf.js | 185 + node_modules/async/forEachOfLimit.js | 47 + node_modules/async/forEachOfSeries.js | 39 + node_modules/async/forEachSeries.js | 44 + node_modules/async/forever.js | 68 + node_modules/async/groupBy.js | 108 + node_modules/async/groupByLimit.js | 71 + node_modules/async/groupBySeries.js | 36 + node_modules/async/index.js | 588 ++ node_modules/async/inject.js | 153 + .../async/internal/DoublyLinkedList.js | 92 + node_modules/async/internal/Heap.js | 120 + node_modules/async/internal/applyEach.js | 29 + .../async/internal/asyncEachOfLimit.js | 75 + node_modules/async/internal/awaitify.js | 27 + node_modules/async/internal/breakLoop.js | 10 + node_modules/async/internal/consoleFunc.js | 31 + node_modules/async/internal/createTester.js | 40 + node_modules/async/internal/eachOfLimit.js | 90 + node_modules/async/internal/filter.js | 55 + node_modules/async/internal/getIterator.js | 11 + node_modules/async/internal/initialParams.js | 14 + node_modules/async/internal/isArrayLike.js | 10 + node_modules/async/internal/iterator.js | 57 + node_modules/async/internal/map.js | 30 + node_modules/async/internal/once.js | 17 + node_modules/async/internal/onlyOnce.js | 15 + node_modules/async/internal/parallel.js | 34 + .../async/internal/promiseCallback.js | 23 + node_modules/async/internal/queue.js | 294 + node_modules/async/internal/range.js | 14 + node_modules/async/internal/reject.js | 26 + node_modules/async/internal/setImmediate.js | 34 + node_modules/async/internal/withoutIndex.js | 10 + node_modules/async/internal/wrapAsync.js | 34 + node_modules/async/log.js | 41 + node_modules/async/map.js | 142 + node_modules/async/mapLimit.js | 45 + node_modules/async/mapSeries.js | 44 + node_modules/async/mapValues.js | 152 + node_modules/async/mapValuesLimit.js | 61 + node_modules/async/mapValuesSeries.js | 37 + node_modules/async/memoize.js | 91 + node_modules/async/nextTick.js | 52 + node_modules/async/package.json | 75 + node_modules/async/parallel.js | 180 + node_modules/async/parallelLimit.js | 41 + node_modules/async/priorityQueue.js | 86 + node_modules/async/queue.js | 167 + node_modules/async/race.js | 67 + node_modules/async/reduce.js | 153 + node_modules/async/reduceRight.js | 41 + node_modules/async/reflect.js | 78 + node_modules/async/reflectAll.js | 93 + node_modules/async/reject.js | 87 + node_modules/async/rejectLimit.js | 45 + node_modules/async/rejectSeries.js | 43 + node_modules/async/retry.js | 159 + node_modules/async/retryable.js | 77 + node_modules/async/select.js | 93 + node_modules/async/selectLimit.js | 45 + node_modules/async/selectSeries.js | 43 + node_modules/async/seq.js | 79 + node_modules/async/series.js | 186 + node_modules/async/setImmediate.js | 45 + node_modules/async/some.js | 122 + node_modules/async/someLimit.js | 47 + node_modules/async/someSeries.js | 46 + node_modules/async/sortBy.js | 190 + node_modules/async/timeout.js | 89 + node_modules/async/times.js | 50 + node_modules/async/timesLimit.js | 43 + node_modules/async/timesSeries.js | 32 + node_modules/async/transform.js | 173 + node_modules/async/tryEach.js | 78 + node_modules/async/unmemoize.js | 25 + node_modules/async/until.js | 61 + node_modules/async/waterfall.js | 105 + node_modules/async/whilst.js | 78 + node_modules/async/wrapSync.js | 118 + .../balanced-match/.github/FUNDING.yml | 2 + node_modules/balanced-match/LICENSE.md | 21 + node_modules/balanced-match/README.md | 97 + node_modules/balanced-match/index.js | 62 + node_modules/balanced-match/package.json | 48 + node_modules/body-parser/HISTORY.md | 665 ++ node_modules/body-parser/LICENSE | 23 + node_modules/body-parser/README.md | 465 + node_modules/body-parser/SECURITY.md | 25 + node_modules/body-parser/index.js | 156 + node_modules/body-parser/lib/read.js | 205 + node_modules/body-parser/lib/types/json.js | 247 + node_modules/body-parser/lib/types/raw.js | 101 + node_modules/body-parser/lib/types/text.js | 121 + .../body-parser/lib/types/urlencoded.js | 284 + node_modules/body-parser/package.json | 56 + node_modules/brace-expansion/LICENSE | 21 + node_modules/brace-expansion/README.md | 129 + node_modules/brace-expansion/index.js | 201 + node_modules/brace-expansion/package.json | 47 + node_modules/bytes/History.md | 97 + node_modules/bytes/LICENSE | 23 + node_modules/bytes/Readme.md | 152 + node_modules/bytes/index.js | 170 + node_modules/bytes/package.json | 42 + node_modules/call-bind/.eslintignore | 1 + node_modules/call-bind/.eslintrc | 17 + node_modules/call-bind/.github/FUNDING.yml | 12 + node_modules/call-bind/.nycrc | 13 + node_modules/call-bind/CHANGELOG.md | 42 + node_modules/call-bind/LICENSE | 21 + node_modules/call-bind/README.md | 2 + node_modules/call-bind/callBound.js | 15 + node_modules/call-bind/index.js | 47 + node_modules/call-bind/package.json | 80 + node_modules/call-bind/test/callBound.js | 55 + node_modules/call-bind/test/index.js | 66 + node_modules/cassandra-driver/.eslintignore | 8 + node_modules/cassandra-driver/.travis.yml | 44 + node_modules/cassandra-driver/CHANGELOG.md | 611 ++ node_modules/cassandra-driver/Jenkinsfile | 648 ++ node_modules/cassandra-driver/LICENSE.txt | 177 + node_modules/cassandra-driver/NOTICE.txt | 4 + node_modules/cassandra-driver/README.md | 294 + node_modules/cassandra-driver/build.yaml | 70 + node_modules/cassandra-driver/index.d.ts | 414 + node_modules/cassandra-driver/index.js | 48 + .../lib/auth/base-dse-authenticator.js | 76 + .../lib/auth/dse-gssapi-auth-provider.js | 231 + .../lib/auth/dse-plain-text-auth-provider.js | 110 + .../lib/auth/gssapi-client.js | 155 + .../cassandra-driver/lib/auth/index.d.ts | 47 + .../cassandra-driver/lib/auth/index.js | 39 + .../lib/auth/no-auth-provider.js | 70 + .../lib/auth/plain-text-auth-provider.js | 81 + .../cassandra-driver/lib/auth/provider.js | 79 + .../cassandra-driver/lib/client-options.js | 361 + node_modules/cassandra-driver/lib/client.js | 1180 +++ .../lib/concurrent/index.d.ts | 45 + .../cassandra-driver/lib/concurrent/index.js | 335 + .../cassandra-driver/lib/connection.js | 790 ++ .../lib/control-connection.js | 1073 +++ .../lib/datastax/cloud/index.js | 338 + .../lib/datastax/graph/complex-type-helper.js | 99 + .../datastax/graph/custom-type-serializers.js | 362 + .../lib/datastax/graph/graph-executor.js | 280 + .../lib/datastax/graph/graph-serializer.js | 260 + .../lib/datastax/graph/index.d.ts | 92 + .../lib/datastax/graph/index.js | 82 + .../lib/datastax/graph/options.js | 334 + .../lib/datastax/graph/result-set.js | 156 + .../lib/datastax/graph/structure.js | 167 + .../lib/datastax/graph/type-serializers.js | 501 + .../lib/datastax/graph/wrappers.js | 84 + .../cassandra-driver/lib/datastax/index.d.ts | 24 + .../cassandra-driver/lib/datastax/index.js | 28 + .../lib/datastax/search/date-range.js | 537 ++ .../lib/datastax/search/index.d.ts | 58 + .../lib/datastax/search/index.js | 30 + node_modules/cassandra-driver/lib/encoder.js | 1865 ++++ node_modules/cassandra-driver/lib/errors.js | 175 + .../cassandra-driver/lib/execution-options.js | 619 ++ .../cassandra-driver/lib/execution-profile.js | 266 + .../cassandra-driver/lib/geometry/geometry.js | 133 + .../cassandra-driver/lib/geometry/index.d.ts | 67 + .../cassandra-driver/lib/geometry/index.js | 30 + .../lib/geometry/line-string.js | 197 + .../cassandra-driver/lib/geometry/point.js | 134 + .../cassandra-driver/lib/geometry/polygon.js | 239 + .../lib/host-connection-pool.js | 522 + node_modules/cassandra-driver/lib/host.js | 658 ++ .../cassandra-driver/lib/insights-client.js | 492 + .../cassandra-driver/lib/mapping/cache.js | 207 + .../lib/mapping/doc-info-adapter.js | 162 + .../cassandra-driver/lib/mapping/index.d.ts | 189 + .../cassandra-driver/lib/mapping/index.js | 33 + .../cassandra-driver/lib/mapping/mapper.js | 193 + .../lib/mapping/mapping-handler.js | 412 + .../lib/mapping/model-batch-item.js | 191 + .../lib/mapping/model-batch-mapper.js | 125 + .../lib/mapping/model-mapper.js | 306 + .../lib/mapping/model-mapping-info.js | 194 + .../lib/mapping/object-selector.js | 321 + .../cassandra-driver/lib/mapping/q.js | 154 + .../lib/mapping/query-generator.js | 446 + .../lib/mapping/result-mapper.js | 112 + .../cassandra-driver/lib/mapping/result.js | 136 + .../lib/mapping/table-mappings.js | 122 + .../cassandra-driver/lib/mapping/tree.js | 151 + .../lib/metadata/aggregate.js | 78 + .../lib/metadata/client-state.js | 114 + .../lib/metadata/data-collection.js | 173 + .../lib/metadata/event-debouncer.js | 164 + .../cassandra-driver/lib/metadata/index.d.ts | 211 + .../cassandra-driver/lib/metadata/index.js | 1024 ++ .../lib/metadata/materialized-view.js | 48 + .../lib/metadata/schema-function.js | 97 + .../lib/metadata/schema-index.js | 149 + .../lib/metadata/schema-parser.js | 1177 +++ .../lib/metadata/table-metadata.js | 77 + .../lib/metrics/client-metrics.js | 129 + .../lib/metrics/default-metrics.js | 198 + .../cassandra-driver/lib/metrics/index.d.ts | 89 + .../cassandra-driver/lib/metrics/index.js | 28 + .../cassandra-driver/lib/operation-state.js | 164 + .../lib/policies/address-resolution.js | 139 + .../cassandra-driver/lib/policies/index.d.ts | 210 + .../cassandra-driver/lib/policies/index.js | 84 + .../lib/policies/load-balancing.js | 883 ++ .../lib/policies/reconnection.js | 157 + .../cassandra-driver/lib/policies/retry.js | 276 + .../lib/policies/speculative-execution.js | 143 + .../lib/policies/timestamp-generation.js | 170 + .../cassandra-driver/lib/prepare-handler.js | 297 + .../cassandra-driver/lib/promise-utils.js | 177 + node_modules/cassandra-driver/lib/readers.js | 542 ++ .../cassandra-driver/lib/request-execution.js | 497 + .../cassandra-driver/lib/request-handler.js | 311 + node_modules/cassandra-driver/lib/requests.js | 542 ++ .../cassandra-driver/lib/stream-id-stack.js | 200 + node_modules/cassandra-driver/lib/streams.js | 582 ++ node_modules/cassandra-driver/lib/token.js | 286 + .../cassandra-driver/lib/tokenizer.js | 584 ++ .../cassandra-driver/lib/tracker/index.d.ts | 58 + .../cassandra-driver/lib/tracker/index.js | 25 + .../lib/tracker/request-logger.js | 294 + .../lib/tracker/request-tracker.js | 74 + .../cassandra-driver/lib/types/big-decimal.js | 271 + .../cassandra-driver/lib/types/duration.js | 714 ++ .../cassandra-driver/lib/types/index.d.ts | 427 + .../cassandra-driver/lib/types/index.js | 630 ++ .../lib/types/inet-address.js | 248 + .../cassandra-driver/lib/types/integer.js | 855 ++ .../cassandra-driver/lib/types/local-date.js | 252 + .../cassandra-driver/lib/types/local-time.js | 295 + .../lib/types/mutable-long.js | 329 + .../lib/types/protocol-version.js | 349 + .../cassandra-driver/lib/types/result-set.js | 275 + .../lib/types/result-stream.js | 148 + .../cassandra-driver/lib/types/row.js | 80 + .../cassandra-driver/lib/types/time-uuid.js | 410 + .../cassandra-driver/lib/types/tuple.js | 102 + .../cassandra-driver/lib/types/uuid.js | 153 + .../lib/types/version-number.js | 144 + node_modules/cassandra-driver/lib/utils.js | 1087 +++ node_modules/cassandra-driver/lib/writers.js | 310 + node_modules/cassandra-driver/package.json | 56 + node_modules/chalk/index.d.ts | 415 + node_modules/chalk/license | 9 + node_modules/chalk/package.json | 68 + node_modules/chalk/readme.md | 341 + node_modules/chalk/source/index.js | 229 + node_modules/chalk/source/templates.js | 134 + node_modules/chalk/source/util.js | 39 + node_modules/color-convert/CHANGELOG.md | 54 + node_modules/color-convert/LICENSE | 21 + node_modules/color-convert/README.md | 68 + node_modules/color-convert/conversions.js | 839 ++ node_modules/color-convert/index.js | 81 + node_modules/color-convert/package.json | 48 + node_modules/color-convert/route.js | 97 + node_modules/color-name/LICENSE | 8 + node_modules/color-name/README.md | 11 + node_modules/color-name/index.js | 152 + node_modules/color-name/package.json | 28 + node_modules/concat-map/.travis.yml | 4 + node_modules/concat-map/LICENSE | 18 + node_modules/concat-map/README.markdown | 62 + node_modules/concat-map/example/map.js | 6 + node_modules/concat-map/index.js | 13 + node_modules/concat-map/package.json | 43 + node_modules/concat-map/test/map.js | 39 + node_modules/content-disposition/HISTORY.md | 60 + node_modules/content-disposition/LICENSE | 22 + node_modules/content-disposition/README.md | 142 + node_modules/content-disposition/index.js | 458 + node_modules/content-disposition/package.json | 44 + node_modules/content-type/HISTORY.md | 29 + node_modules/content-type/LICENSE | 22 + node_modules/content-type/README.md | 94 + node_modules/content-type/index.js | 225 + node_modules/content-type/package.json | 42 + node_modules/cookie-signature/.npmignore | 4 + node_modules/cookie-signature/History.md | 38 + node_modules/cookie-signature/Readme.md | 42 + node_modules/cookie-signature/index.js | 51 + node_modules/cookie-signature/package.json | 18 + node_modules/cookie/HISTORY.md | 142 + node_modules/cookie/LICENSE | 24 + node_modules/cookie/README.md | 302 + node_modules/cookie/SECURITY.md | 25 + node_modules/cookie/index.js | 270 + node_modules/cookie/package.json | 44 + node_modules/debug/.coveralls.yml | 1 + node_modules/debug/.eslintrc | 11 + node_modules/debug/.npmignore | 9 + node_modules/debug/.travis.yml | 14 + node_modules/debug/CHANGELOG.md | 362 + node_modules/debug/LICENSE | 19 + node_modules/debug/Makefile | 50 + node_modules/debug/README.md | 312 + node_modules/debug/component.json | 19 + node_modules/debug/karma.conf.js | 70 + node_modules/debug/node.js | 1 + node_modules/debug/package.json | 49 + node_modules/debug/src/browser.js | 185 + node_modules/debug/src/debug.js | 202 + node_modules/debug/src/index.js | 10 + node_modules/debug/src/inspector-log.js | 15 + node_modules/debug/src/node.js | 248 + node_modules/depd/History.md | 103 + node_modules/depd/LICENSE | 22 + node_modules/depd/Readme.md | 280 + node_modules/depd/index.js | 538 ++ node_modules/depd/lib/browser/index.js | 77 + node_modules/depd/package.json | 45 + node_modules/destroy/LICENSE | 23 + node_modules/destroy/README.md | 63 + node_modules/destroy/index.js | 209 + node_modules/destroy/package.json | 48 + node_modules/ee-first/LICENSE | 22 + node_modules/ee-first/README.md | 80 + node_modules/ee-first/index.js | 95 + node_modules/ee-first/package.json | 29 + node_modules/ejs/LICENSE | 202 + node_modules/ejs/README.md | 344 + node_modules/ejs/bin/cli.js | 211 + node_modules/ejs/ejs.js | 1736 ++++ node_modules/ejs/ejs.min.js | 1 + node_modules/ejs/jakefile.js | 88 + node_modules/ejs/lib/ejs.js | 951 ++ node_modules/ejs/lib/utils.js | 241 + node_modules/ejs/package.json | 42 + node_modules/ejs/usage.txt | 24 + node_modules/encodeurl/HISTORY.md | 14 + node_modules/encodeurl/LICENSE | 22 + node_modules/encodeurl/README.md | 128 + node_modules/encodeurl/index.js | 60 + node_modules/encodeurl/package.json | 40 + node_modules/escape-html/LICENSE | 24 + node_modules/escape-html/Readme.md | 43 + node_modules/escape-html/index.js | 78 + node_modules/escape-html/package.json | 24 + node_modules/etag/HISTORY.md | 83 + node_modules/etag/LICENSE | 22 + node_modules/etag/README.md | 159 + node_modules/etag/index.js | 131 + node_modules/etag/package.json | 47 + node_modules/express/History.md | 3588 +++++++ node_modules/express/LICENSE | 24 + node_modules/express/Readme.md | 166 + node_modules/express/index.js | 11 + node_modules/express/lib/application.js | 661 ++ node_modules/express/lib/express.js | 116 + node_modules/express/lib/middleware/init.js | 43 + node_modules/express/lib/middleware/query.js | 47 + node_modules/express/lib/request.js | 525 + node_modules/express/lib/response.js | 1169 +++ node_modules/express/lib/router/index.js | 673 ++ node_modules/express/lib/router/layer.js | 181 + node_modules/express/lib/router/route.js | 225 + node_modules/express/lib/utils.js | 304 + node_modules/express/lib/view.js | 182 + .../node_modules/body-parser/HISTORY.md | 657 ++ .../express/node_modules/body-parser/LICENSE | 23 + .../node_modules/body-parser/README.md | 464 + .../node_modules/body-parser/SECURITY.md | 25 + .../express/node_modules/body-parser/index.js | 156 + .../node_modules/body-parser/lib/read.js | 205 + .../body-parser/lib/types/json.js | 236 + .../node_modules/body-parser/lib/types/raw.js | 101 + .../body-parser/lib/types/text.js | 121 + .../body-parser/lib/types/urlencoded.js | 284 + .../node_modules/body-parser/package.json | 56 + .../express/node_modules/raw-body/HISTORY.md | 303 + .../express/node_modules/raw-body/LICENSE | 22 + .../express/node_modules/raw-body/README.md | 223 + .../express/node_modules/raw-body/SECURITY.md | 24 + .../express/node_modules/raw-body/index.d.ts | 87 + .../express/node_modules/raw-body/index.js | 329 + .../node_modules/raw-body/package.json | 49 + node_modules/express/package.json | 99 + node_modules/filelist/README.md | 84 + node_modules/filelist/index.d.ts | 110 + node_modules/filelist/index.js | 495 + node_modules/filelist/jakefile.js | 15 + .../brace-expansion/.github/FUNDING.yml | 2 + .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/README.md | 135 + .../node_modules/brace-expansion/index.js | 203 + .../node_modules/brace-expansion/package.json | 46 + .../filelist/node_modules/minimatch/LICENSE | 15 + .../filelist/node_modules/minimatch/README.md | 259 + .../node_modules/minimatch/lib/path.js | 4 + .../node_modules/minimatch/minimatch.js | 944 ++ .../node_modules/minimatch/package.json | 35 + node_modules/filelist/package.json | 28 + node_modules/finalhandler/HISTORY.md | 195 + node_modules/finalhandler/LICENSE | 22 + node_modules/finalhandler/README.md | 147 + node_modules/finalhandler/SECURITY.md | 25 + node_modules/finalhandler/index.js | 336 + node_modules/finalhandler/package.json | 46 + node_modules/forwarded/HISTORY.md | 21 + node_modules/forwarded/LICENSE | 22 + node_modules/forwarded/README.md | 57 + node_modules/forwarded/index.js | 90 + node_modules/forwarded/package.json | 45 + node_modules/fresh/HISTORY.md | 70 + node_modules/fresh/LICENSE | 23 + node_modules/fresh/README.md | 119 + node_modules/fresh/index.js | 137 + node_modules/fresh/package.json | 46 + node_modules/function-bind/.editorconfig | 20 + node_modules/function-bind/.eslintrc | 15 + node_modules/function-bind/.jscs.json | 176 + node_modules/function-bind/.npmignore | 22 + node_modules/function-bind/.travis.yml | 168 + node_modules/function-bind/LICENSE | 20 + node_modules/function-bind/README.md | 48 + node_modules/function-bind/implementation.js | 52 + node_modules/function-bind/index.js | 5 + node_modules/function-bind/package.json | 63 + node_modules/function-bind/test/.eslintrc | 9 + node_modules/function-bind/test/index.js | 252 + node_modules/get-intrinsic/.eslintrc | 38 + .../get-intrinsic/.github/FUNDING.yml | 12 + node_modules/get-intrinsic/.nycrc | 9 + node_modules/get-intrinsic/CHANGELOG.md | 117 + node_modules/get-intrinsic/LICENSE | 21 + node_modules/get-intrinsic/README.md | 71 + node_modules/get-intrinsic/index.js | 351 + node_modules/get-intrinsic/package.json | 93 + .../get-intrinsic/test/GetIntrinsic.js | 274 + node_modules/has-flag/index.d.ts | 39 + node_modules/has-flag/index.js | 8 + node_modules/has-flag/license | 9 + node_modules/has-flag/package.json | 46 + node_modules/has-flag/readme.md | 89 + node_modules/has-proto/.eslintrc | 5 + node_modules/has-proto/.github/FUNDING.yml | 12 + node_modules/has-proto/CHANGELOG.md | 23 + node_modules/has-proto/LICENSE | 21 + node_modules/has-proto/README.md | 38 + node_modules/has-proto/index.js | 11 + node_modules/has-proto/package.json | 74 + node_modules/has-proto/test/index.js | 19 + node_modules/has-symbols/.eslintrc | 11 + node_modules/has-symbols/.github/FUNDING.yml | 12 + node_modules/has-symbols/.nycrc | 9 + node_modules/has-symbols/CHANGELOG.md | 75 + node_modules/has-symbols/LICENSE | 21 + node_modules/has-symbols/README.md | 46 + node_modules/has-symbols/index.js | 13 + node_modules/has-symbols/package.json | 101 + node_modules/has-symbols/shams.js | 42 + node_modules/has-symbols/test/index.js | 22 + .../has-symbols/test/shams/core-js.js | 28 + .../test/shams/get-own-property-symbols.js | 28 + node_modules/has-symbols/test/tests.js | 56 + node_modules/has/LICENSE-MIT | 22 + node_modules/has/README.md | 18 + node_modules/has/package.json | 48 + node_modules/has/src/index.js | 5 + node_modules/has/test/index.js | 10 + node_modules/http-errors/HISTORY.md | 180 + node_modules/http-errors/LICENSE | 23 + node_modules/http-errors/README.md | 169 + node_modules/http-errors/index.js | 289 + node_modules/http-errors/package.json | 50 + node_modules/iconv-lite/Changelog.md | 162 + node_modules/iconv-lite/LICENSE | 21 + node_modules/iconv-lite/README.md | 156 + .../iconv-lite/encodings/dbcs-codec.js | 555 ++ .../iconv-lite/encodings/dbcs-data.js | 176 + node_modules/iconv-lite/encodings/index.js | 22 + node_modules/iconv-lite/encodings/internal.js | 188 + .../iconv-lite/encodings/sbcs-codec.js | 72 + .../encodings/sbcs-data-generated.js | 451 + .../iconv-lite/encodings/sbcs-data.js | 174 + .../encodings/tables/big5-added.json | 122 + .../iconv-lite/encodings/tables/cp936.json | 264 + .../iconv-lite/encodings/tables/cp949.json | 273 + .../iconv-lite/encodings/tables/cp950.json | 177 + .../iconv-lite/encodings/tables/eucjp.json | 182 + .../encodings/tables/gb18030-ranges.json | 1 + .../encodings/tables/gbk-added.json | 55 + .../iconv-lite/encodings/tables/shiftjis.json | 125 + node_modules/iconv-lite/encodings/utf16.js | 177 + node_modules/iconv-lite/encodings/utf7.js | 290 + node_modules/iconv-lite/lib/bom-handling.js | 52 + node_modules/iconv-lite/lib/extend-node.js | 217 + node_modules/iconv-lite/lib/index.d.ts | 24 + node_modules/iconv-lite/lib/index.js | 153 + node_modules/iconv-lite/lib/streams.js | 121 + node_modules/iconv-lite/package.json | 46 + node_modules/inherits/LICENSE | 16 + node_modules/inherits/README.md | 42 + node_modules/inherits/inherits.js | 9 + node_modules/inherits/inherits_browser.js | 27 + node_modules/inherits/package.json | 29 + node_modules/ipaddr.js/LICENSE | 19 + node_modules/ipaddr.js/README.md | 233 + node_modules/ipaddr.js/ipaddr.min.js | 1 + node_modules/ipaddr.js/lib/ipaddr.js | 673 ++ node_modules/ipaddr.js/lib/ipaddr.js.d.ts | 68 + node_modules/ipaddr.js/package.json | 35 + node_modules/jake/Makefile | 44 + node_modules/jake/README.md | 17 + node_modules/jake/bin/bash_completion.sh | 41 + node_modules/jake/bin/cli.js | 31 + node_modules/jake/jakefile.js | 112 + node_modules/jake/lib/api.js | 409 + node_modules/jake/lib/jake.js | 330 + node_modules/jake/lib/loader.js | 173 + node_modules/jake/lib/namespace.js | 115 + node_modules/jake/lib/package_task.js | 406 + node_modules/jake/lib/parseargs.js | 134 + node_modules/jake/lib/program.js | 282 + node_modules/jake/lib/publish_task.js | 290 + node_modules/jake/lib/rule.js | 311 + node_modules/jake/lib/task/directory_task.js | 30 + node_modules/jake/lib/task/file_task.js | 126 + node_modules/jake/lib/task/index.js | 9 + node_modules/jake/lib/task/task.js | 449 + node_modules/jake/lib/test_task.js | 270 + node_modules/jake/lib/utils/file.js | 286 + node_modules/jake/lib/utils/index.js | 297 + node_modules/jake/lib/utils/logger.js | 24 + node_modules/jake/package.json | 42 + .../jake/test/integration/concurrent.js | 45 + node_modules/jake/test/integration/file.js | 228 + .../jake/test/integration/file_task.js | 141 + node_modules/jake/test/integration/helpers.js | 80 + .../jake/test/integration/jakefile.js | 352 + .../integration/jakelib/concurrent.jake.js | 113 + .../test/integration/jakelib/publish.jake.js | 49 + .../jakelib/required_module.jake.js | 10 + .../test/integration/jakelib/rule.jake.js | 222 + .../jake/test/integration/list_tasks.js | 15 + .../jake/test/integration/publish_task.js | 27 + node_modules/jake/test/integration/rule.js | 217 + node_modules/jake/test/integration/selfdep.js | 42 + .../jake/test/integration/task_base.js | 167 + node_modules/jake/test/unit/jakefile.js | 36 + node_modules/jake/test/unit/namespace.js | 77 + node_modules/jake/test/unit/parseargs.js | 169 + node_modules/jake/usage.txt | 17 + node_modules/long/.npmignore | 3 + node_modules/long/.travis.yml | 5 + node_modules/long/LICENSE | 202 + node_modules/long/Long.png | Bin 0 -> 1387 bytes node_modules/long/README.md | 513 + node_modules/long/bower.json | 9 + node_modules/long/dist/Long.js | 1079 +++ node_modules/long/dist/Long.min.js | 22 + node_modules/long/dist/Long.min.js.gz | Bin 0 -> 2688 bytes node_modules/long/dist/Long.min.map | 8 + node_modules/long/dist/README.md | 16 + node_modules/long/doco/INDEX.md | 6 + node_modules/long/doco/Long.md | 473 + node_modules/long/donate.png | Bin 0 -> 1553 bytes node_modules/long/externs/Long.js | 321 + node_modules/long/index.js | 18 + node_modules/long/jsdoc.json | 13 + node_modules/long/package.json | 30 + node_modules/long/scripts/build.js | 28 + node_modules/long/src/Long.js | 1042 ++ node_modules/long/src/bower.json | 9 + node_modules/long/src/wrap.js | 38 + node_modules/long/tests/goog.math.long.js | 807 ++ node_modules/long/tests/suite.js | 164 + node_modules/media-typer/HISTORY.md | 22 + node_modules/media-typer/LICENSE | 22 + node_modules/media-typer/README.md | 81 + node_modules/media-typer/index.js | 270 + node_modules/media-typer/package.json | 26 + node_modules/merge-descriptors/HISTORY.md | 21 + node_modules/merge-descriptors/LICENSE | 23 + node_modules/merge-descriptors/README.md | 48 + node_modules/merge-descriptors/index.js | 60 + node_modules/merge-descriptors/package.json | 32 + node_modules/methods/HISTORY.md | 29 + node_modules/methods/LICENSE | 24 + node_modules/methods/README.md | 51 + node_modules/methods/index.js | 69 + node_modules/methods/package.json | 36 + node_modules/mime-db/HISTORY.md | 507 + node_modules/mime-db/LICENSE | 23 + node_modules/mime-db/README.md | 100 + node_modules/mime-db/db.json | 8519 +++++++++++++++++ node_modules/mime-db/index.js | 12 + node_modules/mime-db/package.json | 60 + node_modules/mime-types/HISTORY.md | 397 + node_modules/mime-types/LICENSE | 23 + node_modules/mime-types/README.md | 113 + node_modules/mime-types/index.js | 188 + node_modules/mime-types/package.json | 44 + node_modules/mime/.npmignore | 0 node_modules/mime/CHANGELOG.md | 164 + node_modules/mime/LICENSE | 21 + node_modules/mime/README.md | 90 + node_modules/mime/cli.js | 8 + node_modules/mime/mime.js | 108 + node_modules/mime/package.json | 44 + node_modules/mime/src/build.js | 53 + node_modules/mime/src/test.js | 60 + node_modules/mime/types.json | 1 + node_modules/minimatch/LICENSE | 15 + node_modules/minimatch/README.md | 230 + node_modules/minimatch/minimatch.js | 947 ++ node_modules/minimatch/package.json | 33 + node_modules/ms/index.js | 152 + node_modules/ms/license.md | 21 + node_modules/ms/package.json | 37 + node_modules/ms/readme.md | 51 + node_modules/negotiator/HISTORY.md | 108 + node_modules/negotiator/LICENSE | 24 + node_modules/negotiator/README.md | 203 + node_modules/negotiator/index.js | 82 + node_modules/negotiator/lib/charset.js | 169 + node_modules/negotiator/lib/encoding.js | 184 + node_modules/negotiator/lib/language.js | 179 + node_modules/negotiator/lib/mediaType.js | 294 + node_modules/negotiator/package.json | 42 + node_modules/object-inspect/.eslintrc | 53 + .../object-inspect/.github/FUNDING.yml | 12 + node_modules/object-inspect/.nycrc | 13 + node_modules/object-inspect/CHANGELOG.md | 370 + node_modules/object-inspect/LICENSE | 21 + node_modules/object-inspect/example/all.js | 23 + .../object-inspect/example/circular.js | 6 + node_modules/object-inspect/example/fn.js | 5 + .../object-inspect/example/inspect.js | 10 + node_modules/object-inspect/index.js | 516 + .../object-inspect/package-support.json | 20 + node_modules/object-inspect/package.json | 97 + node_modules/object-inspect/readme.markdown | 86 + node_modules/object-inspect/test-core-js.js | 26 + node_modules/object-inspect/test/bigint.js | 58 + .../object-inspect/test/browser/dom.js | 15 + node_modules/object-inspect/test/circular.js | 16 + node_modules/object-inspect/test/deep.js | 12 + node_modules/object-inspect/test/element.js | 53 + node_modules/object-inspect/test/err.js | 48 + node_modules/object-inspect/test/fakes.js | 29 + node_modules/object-inspect/test/fn.js | 76 + node_modules/object-inspect/test/has.js | 15 + node_modules/object-inspect/test/holes.js | 15 + .../object-inspect/test/indent-option.js | 271 + node_modules/object-inspect/test/inspect.js | 139 + node_modules/object-inspect/test/lowbyte.js | 12 + node_modules/object-inspect/test/number.js | 58 + .../object-inspect/test/quoteStyle.js | 17 + .../object-inspect/test/toStringTag.js | 40 + node_modules/object-inspect/test/undef.js | 12 + node_modules/object-inspect/test/values.js | 211 + node_modules/object-inspect/util.inspect.js | 1 + node_modules/on-finished/HISTORY.md | 98 + node_modules/on-finished/LICENSE | 23 + node_modules/on-finished/README.md | 162 + node_modules/on-finished/index.js | 234 + node_modules/on-finished/package.json | 39 + node_modules/parseurl/HISTORY.md | 58 + node_modules/parseurl/LICENSE | 24 + node_modules/parseurl/README.md | 133 + node_modules/parseurl/index.js | 158 + node_modules/parseurl/package.json | 40 + node_modules/path-to-regexp/History.md | 36 + node_modules/path-to-regexp/LICENSE | 21 + node_modules/path-to-regexp/Readme.md | 35 + node_modules/path-to-regexp/index.js | 129 + node_modules/path-to-regexp/package.json | 30 + node_modules/proxy-addr/HISTORY.md | 161 + node_modules/proxy-addr/LICENSE | 22 + node_modules/proxy-addr/README.md | 139 + node_modules/proxy-addr/index.js | 327 + node_modules/proxy-addr/package.json | 47 + node_modules/qs/.editorconfig | 43 + node_modules/qs/.eslintrc | 38 + node_modules/qs/.github/FUNDING.yml | 12 + node_modules/qs/.nycrc | 13 + node_modules/qs/CHANGELOG.md | 546 ++ node_modules/qs/LICENSE.md | 29 + node_modules/qs/README.md | 625 ++ node_modules/qs/dist/qs.js | 2054 ++++ node_modules/qs/lib/formats.js | 23 + node_modules/qs/lib/index.js | 11 + node_modules/qs/lib/parse.js | 263 + node_modules/qs/lib/stringify.js | 326 + node_modules/qs/lib/utils.js | 252 + node_modules/qs/package.json | 77 + node_modules/qs/test/parse.js | 855 ++ node_modules/qs/test/stringify.js | 909 ++ node_modules/qs/test/utils.js | 136 + node_modules/range-parser/HISTORY.md | 56 + node_modules/range-parser/LICENSE | 23 + node_modules/range-parser/README.md | 84 + node_modules/range-parser/index.js | 162 + node_modules/range-parser/package.json | 44 + node_modules/raw-body/HISTORY.md | 308 + node_modules/raw-body/LICENSE | 22 + node_modules/raw-body/README.md | 223 + node_modules/raw-body/SECURITY.md | 24 + node_modules/raw-body/index.d.ts | 87 + node_modules/raw-body/index.js | 336 + node_modules/raw-body/package.json | 49 + node_modules/safe-buffer/LICENSE | 21 + node_modules/safe-buffer/README.md | 584 ++ node_modules/safe-buffer/index.d.ts | 187 + node_modules/safe-buffer/index.js | 65 + node_modules/safe-buffer/package.json | 51 + node_modules/safer-buffer/LICENSE | 21 + node_modules/safer-buffer/Porting-Buffer.md | 268 + node_modules/safer-buffer/Readme.md | 156 + node_modules/safer-buffer/dangerous.js | 58 + node_modules/safer-buffer/package.json | 34 + node_modules/safer-buffer/safer.js | 77 + node_modules/safer-buffer/tests.js | 406 + node_modules/send/HISTORY.md | 521 + node_modules/send/LICENSE | 23 + node_modules/send/README.md | 327 + node_modules/send/SECURITY.md | 24 + node_modules/send/index.js | 1143 +++ node_modules/send/node_modules/ms/index.js | 162 + node_modules/send/node_modules/ms/license.md | 21 + .../send/node_modules/ms/package.json | 38 + node_modules/send/node_modules/ms/readme.md | 59 + node_modules/send/package.json | 62 + node_modules/serve-static/HISTORY.md | 471 + node_modules/serve-static/LICENSE | 25 + node_modules/serve-static/README.md | 257 + node_modules/serve-static/index.js | 210 + node_modules/serve-static/package.json | 42 + node_modules/setprototypeof/LICENSE | 13 + node_modules/setprototypeof/README.md | 31 + node_modules/setprototypeof/index.d.ts | 2 + node_modules/setprototypeof/index.js | 17 + node_modules/setprototypeof/package.json | 38 + node_modules/setprototypeof/test/index.js | 24 + node_modules/side-channel/.eslintignore | 1 + node_modules/side-channel/.eslintrc | 11 + node_modules/side-channel/.github/FUNDING.yml | 12 + node_modules/side-channel/.nycrc | 13 + node_modules/side-channel/CHANGELOG.md | 65 + node_modules/side-channel/LICENSE | 21 + node_modules/side-channel/README.md | 2 + node_modules/side-channel/index.js | 124 + node_modules/side-channel/package.json | 67 + node_modules/side-channel/test/index.js | 78 + node_modules/statuses/HISTORY.md | 82 + node_modules/statuses/LICENSE | 23 + node_modules/statuses/README.md | 136 + node_modules/statuses/codes.json | 65 + node_modules/statuses/index.js | 146 + node_modules/statuses/package.json | 49 + node_modules/supports-color/browser.js | 5 + node_modules/supports-color/index.js | 135 + node_modules/supports-color/license | 9 + node_modules/supports-color/package.json | 53 + node_modules/supports-color/readme.md | 76 + node_modules/toidentifier/HISTORY.md | 9 + node_modules/toidentifier/LICENSE | 21 + node_modules/toidentifier/README.md | 61 + node_modules/toidentifier/index.js | 32 + node_modules/toidentifier/package.json | 38 + node_modules/type-is/HISTORY.md | 259 + node_modules/type-is/LICENSE | 23 + node_modules/type-is/README.md | 170 + node_modules/type-is/index.js | 266 + node_modules/type-is/package.json | 45 + node_modules/unpipe/HISTORY.md | 4 + node_modules/unpipe/LICENSE | 22 + node_modules/unpipe/README.md | 43 + node_modules/unpipe/index.js | 69 + node_modules/unpipe/package.json | 27 + node_modules/utils-merge/.npmignore | 9 + node_modules/utils-merge/LICENSE | 20 + node_modules/utils-merge/README.md | 34 + node_modules/utils-merge/index.js | 23 + node_modules/utils-merge/package.json | 40 + node_modules/vary/HISTORY.md | 39 + node_modules/vary/LICENSE | 22 + node_modules/vary/README.md | 101 + node_modules/vary/index.js | 149 + node_modules/vary/package.json | 43 + package-lock.json | 827 +- package.json | 19 +- public/css/error.css | 4 + public/css/index.css | 42 + public/error.html | 11 + public/images/cat.gif | Bin 0 -> 253794 bytes public/index.html | 29 + public/src/index.js | 105 + result.js | 103 +- 894 files changed, 152408 insertions(+), 73 deletions(-) delete mode 100644 index.js create mode 120000 node_modules/.bin/ejs create mode 120000 node_modules/.bin/jake create mode 120000 node_modules/.bin/mime create mode 100755 node_modules/@types/long/LICENSE create mode 100755 node_modules/@types/long/README.md create mode 100755 node_modules/@types/long/index.d.ts create mode 100755 node_modules/@types/long/package.json create mode 100644 node_modules/accepts/HISTORY.md create mode 100644 node_modules/accepts/LICENSE create mode 100644 node_modules/accepts/README.md create mode 100644 node_modules/accepts/index.js create mode 100644 node_modules/accepts/package.json create mode 100644 node_modules/adm-zip/LICENSE create mode 100644 node_modules/adm-zip/README.md create mode 100644 node_modules/adm-zip/adm-zip.js create mode 100644 node_modules/adm-zip/headers/entryHeader.js create mode 100644 node_modules/adm-zip/headers/index.js create mode 100644 node_modules/adm-zip/headers/mainHeader.js create mode 100644 node_modules/adm-zip/methods/deflater.js create mode 100644 node_modules/adm-zip/methods/index.js create mode 100644 node_modules/adm-zip/methods/inflater.js create mode 100644 node_modules/adm-zip/methods/zipcrypto.js create mode 100644 node_modules/adm-zip/package.json create mode 100644 node_modules/adm-zip/util/constants.js create mode 100644 node_modules/adm-zip/util/errors.js create mode 100644 node_modules/adm-zip/util/fattr.js create mode 100644 node_modules/adm-zip/util/fileSystem.js create mode 100644 node_modules/adm-zip/util/index.js create mode 100644 node_modules/adm-zip/util/utils.js create mode 100644 node_modules/adm-zip/zipEntry.js create mode 100644 node_modules/adm-zip/zipFile.js create mode 100644 node_modules/ansi-styles/index.d.ts create mode 100644 node_modules/ansi-styles/index.js create mode 100644 node_modules/ansi-styles/license create mode 100644 node_modules/ansi-styles/package.json create mode 100644 node_modules/ansi-styles/readme.md create mode 100644 node_modules/array-flatten/LICENSE create mode 100644 node_modules/array-flatten/README.md create mode 100644 node_modules/array-flatten/array-flatten.js create mode 100644 node_modules/array-flatten/package.json create mode 100644 node_modules/async/CHANGELOG.md create mode 100644 node_modules/async/LICENSE create mode 100644 node_modules/async/README.md create mode 100644 node_modules/async/all.js create mode 100644 node_modules/async/allLimit.js create mode 100644 node_modules/async/allSeries.js create mode 100644 node_modules/async/any.js create mode 100644 node_modules/async/anyLimit.js create mode 100644 node_modules/async/anySeries.js create mode 100644 node_modules/async/apply.js create mode 100644 node_modules/async/applyEach.js create mode 100644 node_modules/async/applyEachSeries.js create mode 100644 node_modules/async/asyncify.js create mode 100644 node_modules/async/auto.js create mode 100644 node_modules/async/autoInject.js create mode 100644 node_modules/async/bower.json create mode 100644 node_modules/async/cargo.js create mode 100644 node_modules/async/cargoQueue.js create mode 100644 node_modules/async/compose.js create mode 100644 node_modules/async/concat.js create mode 100644 node_modules/async/concatLimit.js create mode 100644 node_modules/async/concatSeries.js create mode 100644 node_modules/async/constant.js create mode 100644 node_modules/async/detect.js create mode 100644 node_modules/async/detectLimit.js create mode 100644 node_modules/async/detectSeries.js create mode 100644 node_modules/async/dir.js create mode 100644 node_modules/async/dist/async.js create mode 100644 node_modules/async/dist/async.min.js create mode 100644 node_modules/async/dist/async.mjs create mode 100644 node_modules/async/doDuring.js create mode 100644 node_modules/async/doUntil.js create mode 100644 node_modules/async/doWhilst.js create mode 100644 node_modules/async/during.js create mode 100644 node_modules/async/each.js create mode 100644 node_modules/async/eachLimit.js create mode 100644 node_modules/async/eachOf.js create mode 100644 node_modules/async/eachOfLimit.js create mode 100644 node_modules/async/eachOfSeries.js create mode 100644 node_modules/async/eachSeries.js create mode 100644 node_modules/async/ensureAsync.js create mode 100644 node_modules/async/every.js create mode 100644 node_modules/async/everyLimit.js create mode 100644 node_modules/async/everySeries.js create mode 100644 node_modules/async/filter.js create mode 100644 node_modules/async/filterLimit.js create mode 100644 node_modules/async/filterSeries.js create mode 100644 node_modules/async/find.js create mode 100644 node_modules/async/findLimit.js create mode 100644 node_modules/async/findSeries.js create mode 100644 node_modules/async/flatMap.js create mode 100644 node_modules/async/flatMapLimit.js create mode 100644 node_modules/async/flatMapSeries.js create mode 100644 node_modules/async/foldl.js create mode 100644 node_modules/async/foldr.js create mode 100644 node_modules/async/forEach.js create mode 100644 node_modules/async/forEachLimit.js create mode 100644 node_modules/async/forEachOf.js create mode 100644 node_modules/async/forEachOfLimit.js create mode 100644 node_modules/async/forEachOfSeries.js create mode 100644 node_modules/async/forEachSeries.js create mode 100644 node_modules/async/forever.js create mode 100644 node_modules/async/groupBy.js create mode 100644 node_modules/async/groupByLimit.js create mode 100644 node_modules/async/groupBySeries.js create mode 100644 node_modules/async/index.js create mode 100644 node_modules/async/inject.js create mode 100644 node_modules/async/internal/DoublyLinkedList.js create mode 100644 node_modules/async/internal/Heap.js create mode 100644 node_modules/async/internal/applyEach.js create mode 100644 node_modules/async/internal/asyncEachOfLimit.js create mode 100644 node_modules/async/internal/awaitify.js create mode 100644 node_modules/async/internal/breakLoop.js create mode 100644 node_modules/async/internal/consoleFunc.js create mode 100644 node_modules/async/internal/createTester.js create mode 100644 node_modules/async/internal/eachOfLimit.js create mode 100644 node_modules/async/internal/filter.js create mode 100644 node_modules/async/internal/getIterator.js create mode 100644 node_modules/async/internal/initialParams.js create mode 100644 node_modules/async/internal/isArrayLike.js create mode 100644 node_modules/async/internal/iterator.js create mode 100644 node_modules/async/internal/map.js create mode 100644 node_modules/async/internal/once.js create mode 100644 node_modules/async/internal/onlyOnce.js create mode 100644 node_modules/async/internal/parallel.js create mode 100644 node_modules/async/internal/promiseCallback.js create mode 100644 node_modules/async/internal/queue.js create mode 100644 node_modules/async/internal/range.js create mode 100644 node_modules/async/internal/reject.js create mode 100644 node_modules/async/internal/setImmediate.js create mode 100644 node_modules/async/internal/withoutIndex.js create mode 100644 node_modules/async/internal/wrapAsync.js create mode 100644 node_modules/async/log.js create mode 100644 node_modules/async/map.js create mode 100644 node_modules/async/mapLimit.js create mode 100644 node_modules/async/mapSeries.js create mode 100644 node_modules/async/mapValues.js create mode 100644 node_modules/async/mapValuesLimit.js create mode 100644 node_modules/async/mapValuesSeries.js create mode 100644 node_modules/async/memoize.js create mode 100644 node_modules/async/nextTick.js create mode 100644 node_modules/async/package.json create mode 100644 node_modules/async/parallel.js create mode 100644 node_modules/async/parallelLimit.js create mode 100644 node_modules/async/priorityQueue.js create mode 100644 node_modules/async/queue.js create mode 100644 node_modules/async/race.js create mode 100644 node_modules/async/reduce.js create mode 100644 node_modules/async/reduceRight.js create mode 100644 node_modules/async/reflect.js create mode 100644 node_modules/async/reflectAll.js create mode 100644 node_modules/async/reject.js create mode 100644 node_modules/async/rejectLimit.js create mode 100644 node_modules/async/rejectSeries.js create mode 100644 node_modules/async/retry.js create mode 100644 node_modules/async/retryable.js create mode 100644 node_modules/async/select.js create mode 100644 node_modules/async/selectLimit.js create mode 100644 node_modules/async/selectSeries.js create mode 100644 node_modules/async/seq.js create mode 100644 node_modules/async/series.js create mode 100644 node_modules/async/setImmediate.js create mode 100644 node_modules/async/some.js create mode 100644 node_modules/async/someLimit.js create mode 100644 node_modules/async/someSeries.js create mode 100644 node_modules/async/sortBy.js create mode 100644 node_modules/async/timeout.js create mode 100644 node_modules/async/times.js create mode 100644 node_modules/async/timesLimit.js create mode 100644 node_modules/async/timesSeries.js create mode 100644 node_modules/async/transform.js create mode 100644 node_modules/async/tryEach.js create mode 100644 node_modules/async/unmemoize.js create mode 100644 node_modules/async/until.js create mode 100644 node_modules/async/waterfall.js create mode 100644 node_modules/async/whilst.js create mode 100644 node_modules/async/wrapSync.js create mode 100644 node_modules/balanced-match/.github/FUNDING.yml create mode 100644 node_modules/balanced-match/LICENSE.md create mode 100644 node_modules/balanced-match/README.md create mode 100644 node_modules/balanced-match/index.js create mode 100644 node_modules/balanced-match/package.json create mode 100644 node_modules/body-parser/HISTORY.md create mode 100644 node_modules/body-parser/LICENSE create mode 100644 node_modules/body-parser/README.md create mode 100644 node_modules/body-parser/SECURITY.md create mode 100644 node_modules/body-parser/index.js create mode 100644 node_modules/body-parser/lib/read.js create mode 100644 node_modules/body-parser/lib/types/json.js create mode 100644 node_modules/body-parser/lib/types/raw.js create mode 100644 node_modules/body-parser/lib/types/text.js create mode 100644 node_modules/body-parser/lib/types/urlencoded.js create mode 100644 node_modules/body-parser/package.json create mode 100644 node_modules/brace-expansion/LICENSE create mode 100644 node_modules/brace-expansion/README.md create mode 100644 node_modules/brace-expansion/index.js create mode 100644 node_modules/brace-expansion/package.json create mode 100644 node_modules/bytes/History.md create mode 100644 node_modules/bytes/LICENSE create mode 100644 node_modules/bytes/Readme.md create mode 100644 node_modules/bytes/index.js create mode 100644 node_modules/bytes/package.json create mode 100644 node_modules/call-bind/.eslintignore create mode 100644 node_modules/call-bind/.eslintrc create mode 100644 node_modules/call-bind/.github/FUNDING.yml create mode 100644 node_modules/call-bind/.nycrc create mode 100644 node_modules/call-bind/CHANGELOG.md create mode 100644 node_modules/call-bind/LICENSE create mode 100644 node_modules/call-bind/README.md create mode 100644 node_modules/call-bind/callBound.js create mode 100644 node_modules/call-bind/index.js create mode 100644 node_modules/call-bind/package.json create mode 100644 node_modules/call-bind/test/callBound.js create mode 100644 node_modules/call-bind/test/index.js create mode 100644 node_modules/cassandra-driver/.eslintignore create mode 100644 node_modules/cassandra-driver/.travis.yml create mode 100644 node_modules/cassandra-driver/CHANGELOG.md create mode 100644 node_modules/cassandra-driver/Jenkinsfile create mode 100644 node_modules/cassandra-driver/LICENSE.txt create mode 100644 node_modules/cassandra-driver/NOTICE.txt create mode 100644 node_modules/cassandra-driver/README.md create mode 100644 node_modules/cassandra-driver/build.yaml create mode 100644 node_modules/cassandra-driver/index.d.ts create mode 100644 node_modules/cassandra-driver/index.js create mode 100644 node_modules/cassandra-driver/lib/auth/base-dse-authenticator.js create mode 100644 node_modules/cassandra-driver/lib/auth/dse-gssapi-auth-provider.js create mode 100644 node_modules/cassandra-driver/lib/auth/dse-plain-text-auth-provider.js create mode 100644 node_modules/cassandra-driver/lib/auth/gssapi-client.js create mode 100644 node_modules/cassandra-driver/lib/auth/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/auth/index.js create mode 100644 node_modules/cassandra-driver/lib/auth/no-auth-provider.js create mode 100644 node_modules/cassandra-driver/lib/auth/plain-text-auth-provider.js create mode 100644 node_modules/cassandra-driver/lib/auth/provider.js create mode 100644 node_modules/cassandra-driver/lib/client-options.js create mode 100644 node_modules/cassandra-driver/lib/client.js create mode 100644 node_modules/cassandra-driver/lib/concurrent/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/concurrent/index.js create mode 100644 node_modules/cassandra-driver/lib/connection.js create mode 100644 node_modules/cassandra-driver/lib/control-connection.js create mode 100644 node_modules/cassandra-driver/lib/datastax/cloud/index.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/complex-type-helper.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/custom-type-serializers.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/graph-executor.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/graph-serializer.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/index.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/options.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/result-set.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/structure.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/type-serializers.js create mode 100644 node_modules/cassandra-driver/lib/datastax/graph/wrappers.js create mode 100644 node_modules/cassandra-driver/lib/datastax/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/datastax/index.js create mode 100644 node_modules/cassandra-driver/lib/datastax/search/date-range.js create mode 100644 node_modules/cassandra-driver/lib/datastax/search/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/datastax/search/index.js create mode 100644 node_modules/cassandra-driver/lib/encoder.js create mode 100644 node_modules/cassandra-driver/lib/errors.js create mode 100644 node_modules/cassandra-driver/lib/execution-options.js create mode 100644 node_modules/cassandra-driver/lib/execution-profile.js create mode 100644 node_modules/cassandra-driver/lib/geometry/geometry.js create mode 100644 node_modules/cassandra-driver/lib/geometry/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/geometry/index.js create mode 100644 node_modules/cassandra-driver/lib/geometry/line-string.js create mode 100644 node_modules/cassandra-driver/lib/geometry/point.js create mode 100644 node_modules/cassandra-driver/lib/geometry/polygon.js create mode 100644 node_modules/cassandra-driver/lib/host-connection-pool.js create mode 100644 node_modules/cassandra-driver/lib/host.js create mode 100644 node_modules/cassandra-driver/lib/insights-client.js create mode 100644 node_modules/cassandra-driver/lib/mapping/cache.js create mode 100644 node_modules/cassandra-driver/lib/mapping/doc-info-adapter.js create mode 100644 node_modules/cassandra-driver/lib/mapping/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/mapping/index.js create mode 100644 node_modules/cassandra-driver/lib/mapping/mapper.js create mode 100644 node_modules/cassandra-driver/lib/mapping/mapping-handler.js create mode 100644 node_modules/cassandra-driver/lib/mapping/model-batch-item.js create mode 100644 node_modules/cassandra-driver/lib/mapping/model-batch-mapper.js create mode 100644 node_modules/cassandra-driver/lib/mapping/model-mapper.js create mode 100644 node_modules/cassandra-driver/lib/mapping/model-mapping-info.js create mode 100644 node_modules/cassandra-driver/lib/mapping/object-selector.js create mode 100644 node_modules/cassandra-driver/lib/mapping/q.js create mode 100644 node_modules/cassandra-driver/lib/mapping/query-generator.js create mode 100644 node_modules/cassandra-driver/lib/mapping/result-mapper.js create mode 100644 node_modules/cassandra-driver/lib/mapping/result.js create mode 100644 node_modules/cassandra-driver/lib/mapping/table-mappings.js create mode 100644 node_modules/cassandra-driver/lib/mapping/tree.js create mode 100644 node_modules/cassandra-driver/lib/metadata/aggregate.js create mode 100644 node_modules/cassandra-driver/lib/metadata/client-state.js create mode 100644 node_modules/cassandra-driver/lib/metadata/data-collection.js create mode 100644 node_modules/cassandra-driver/lib/metadata/event-debouncer.js create mode 100644 node_modules/cassandra-driver/lib/metadata/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/metadata/index.js create mode 100644 node_modules/cassandra-driver/lib/metadata/materialized-view.js create mode 100644 node_modules/cassandra-driver/lib/metadata/schema-function.js create mode 100644 node_modules/cassandra-driver/lib/metadata/schema-index.js create mode 100644 node_modules/cassandra-driver/lib/metadata/schema-parser.js create mode 100644 node_modules/cassandra-driver/lib/metadata/table-metadata.js create mode 100644 node_modules/cassandra-driver/lib/metrics/client-metrics.js create mode 100644 node_modules/cassandra-driver/lib/metrics/default-metrics.js create mode 100644 node_modules/cassandra-driver/lib/metrics/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/metrics/index.js create mode 100644 node_modules/cassandra-driver/lib/operation-state.js create mode 100644 node_modules/cassandra-driver/lib/policies/address-resolution.js create mode 100644 node_modules/cassandra-driver/lib/policies/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/policies/index.js create mode 100644 node_modules/cassandra-driver/lib/policies/load-balancing.js create mode 100644 node_modules/cassandra-driver/lib/policies/reconnection.js create mode 100644 node_modules/cassandra-driver/lib/policies/retry.js create mode 100644 node_modules/cassandra-driver/lib/policies/speculative-execution.js create mode 100644 node_modules/cassandra-driver/lib/policies/timestamp-generation.js create mode 100644 node_modules/cassandra-driver/lib/prepare-handler.js create mode 100644 node_modules/cassandra-driver/lib/promise-utils.js create mode 100644 node_modules/cassandra-driver/lib/readers.js create mode 100644 node_modules/cassandra-driver/lib/request-execution.js create mode 100644 node_modules/cassandra-driver/lib/request-handler.js create mode 100644 node_modules/cassandra-driver/lib/requests.js create mode 100644 node_modules/cassandra-driver/lib/stream-id-stack.js create mode 100644 node_modules/cassandra-driver/lib/streams.js create mode 100644 node_modules/cassandra-driver/lib/token.js create mode 100644 node_modules/cassandra-driver/lib/tokenizer.js create mode 100644 node_modules/cassandra-driver/lib/tracker/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/tracker/index.js create mode 100644 node_modules/cassandra-driver/lib/tracker/request-logger.js create mode 100644 node_modules/cassandra-driver/lib/tracker/request-tracker.js create mode 100644 node_modules/cassandra-driver/lib/types/big-decimal.js create mode 100644 node_modules/cassandra-driver/lib/types/duration.js create mode 100644 node_modules/cassandra-driver/lib/types/index.d.ts create mode 100644 node_modules/cassandra-driver/lib/types/index.js create mode 100644 node_modules/cassandra-driver/lib/types/inet-address.js create mode 100644 node_modules/cassandra-driver/lib/types/integer.js create mode 100644 node_modules/cassandra-driver/lib/types/local-date.js create mode 100644 node_modules/cassandra-driver/lib/types/local-time.js create mode 100644 node_modules/cassandra-driver/lib/types/mutable-long.js create mode 100644 node_modules/cassandra-driver/lib/types/protocol-version.js create mode 100644 node_modules/cassandra-driver/lib/types/result-set.js create mode 100644 node_modules/cassandra-driver/lib/types/result-stream.js create mode 100644 node_modules/cassandra-driver/lib/types/row.js create mode 100644 node_modules/cassandra-driver/lib/types/time-uuid.js create mode 100644 node_modules/cassandra-driver/lib/types/tuple.js create mode 100644 node_modules/cassandra-driver/lib/types/uuid.js create mode 100644 node_modules/cassandra-driver/lib/types/version-number.js create mode 100644 node_modules/cassandra-driver/lib/utils.js create mode 100644 node_modules/cassandra-driver/lib/writers.js create mode 100644 node_modules/cassandra-driver/package.json create mode 100644 node_modules/chalk/index.d.ts create mode 100644 node_modules/chalk/license create mode 100644 node_modules/chalk/package.json create mode 100644 node_modules/chalk/readme.md create mode 100644 node_modules/chalk/source/index.js create mode 100644 node_modules/chalk/source/templates.js create mode 100644 node_modules/chalk/source/util.js create mode 100644 node_modules/color-convert/CHANGELOG.md create mode 100644 node_modules/color-convert/LICENSE create mode 100644 node_modules/color-convert/README.md create mode 100644 node_modules/color-convert/conversions.js create mode 100644 node_modules/color-convert/index.js create mode 100644 node_modules/color-convert/package.json create mode 100644 node_modules/color-convert/route.js create mode 100644 node_modules/color-name/LICENSE create mode 100644 node_modules/color-name/README.md create mode 100644 node_modules/color-name/index.js create mode 100644 node_modules/color-name/package.json create mode 100644 node_modules/concat-map/.travis.yml create mode 100644 node_modules/concat-map/LICENSE create mode 100644 node_modules/concat-map/README.markdown create mode 100644 node_modules/concat-map/example/map.js create mode 100644 node_modules/concat-map/index.js create mode 100644 node_modules/concat-map/package.json create mode 100644 node_modules/concat-map/test/map.js create mode 100644 node_modules/content-disposition/HISTORY.md create mode 100644 node_modules/content-disposition/LICENSE create mode 100644 node_modules/content-disposition/README.md create mode 100644 node_modules/content-disposition/index.js create mode 100644 node_modules/content-disposition/package.json create mode 100644 node_modules/content-type/HISTORY.md create mode 100644 node_modules/content-type/LICENSE create mode 100644 node_modules/content-type/README.md create mode 100644 node_modules/content-type/index.js create mode 100644 node_modules/content-type/package.json create mode 100644 node_modules/cookie-signature/.npmignore create mode 100644 node_modules/cookie-signature/History.md create mode 100644 node_modules/cookie-signature/Readme.md create mode 100644 node_modules/cookie-signature/index.js create mode 100644 node_modules/cookie-signature/package.json create mode 100644 node_modules/cookie/HISTORY.md create mode 100644 node_modules/cookie/LICENSE create mode 100644 node_modules/cookie/README.md create mode 100644 node_modules/cookie/SECURITY.md create mode 100644 node_modules/cookie/index.js create mode 100644 node_modules/cookie/package.json create mode 100644 node_modules/debug/.coveralls.yml create mode 100644 node_modules/debug/.eslintrc create mode 100644 node_modules/debug/.npmignore create mode 100644 node_modules/debug/.travis.yml create mode 100644 node_modules/debug/CHANGELOG.md create mode 100644 node_modules/debug/LICENSE create mode 100644 node_modules/debug/Makefile create mode 100644 node_modules/debug/README.md create mode 100644 node_modules/debug/component.json create mode 100644 node_modules/debug/karma.conf.js create mode 100644 node_modules/debug/node.js create mode 100644 node_modules/debug/package.json create mode 100644 node_modules/debug/src/browser.js create mode 100644 node_modules/debug/src/debug.js create mode 100644 node_modules/debug/src/index.js create mode 100644 node_modules/debug/src/inspector-log.js create mode 100644 node_modules/debug/src/node.js create mode 100644 node_modules/depd/History.md create mode 100644 node_modules/depd/LICENSE create mode 100644 node_modules/depd/Readme.md create mode 100644 node_modules/depd/index.js create mode 100644 node_modules/depd/lib/browser/index.js create mode 100644 node_modules/depd/package.json create mode 100644 node_modules/destroy/LICENSE create mode 100644 node_modules/destroy/README.md create mode 100644 node_modules/destroy/index.js create mode 100644 node_modules/destroy/package.json create mode 100644 node_modules/ee-first/LICENSE create mode 100644 node_modules/ee-first/README.md create mode 100644 node_modules/ee-first/index.js create mode 100644 node_modules/ee-first/package.json create mode 100644 node_modules/ejs/LICENSE create mode 100644 node_modules/ejs/README.md create mode 100755 node_modules/ejs/bin/cli.js create mode 100644 node_modules/ejs/ejs.js create mode 100644 node_modules/ejs/ejs.min.js create mode 100644 node_modules/ejs/jakefile.js create mode 100755 node_modules/ejs/lib/ejs.js create mode 100644 node_modules/ejs/lib/utils.js create mode 100644 node_modules/ejs/package.json create mode 100644 node_modules/ejs/usage.txt create mode 100644 node_modules/encodeurl/HISTORY.md create mode 100644 node_modules/encodeurl/LICENSE create mode 100644 node_modules/encodeurl/README.md create mode 100644 node_modules/encodeurl/index.js create mode 100644 node_modules/encodeurl/package.json create mode 100644 node_modules/escape-html/LICENSE create mode 100644 node_modules/escape-html/Readme.md create mode 100644 node_modules/escape-html/index.js create mode 100644 node_modules/escape-html/package.json create mode 100644 node_modules/etag/HISTORY.md create mode 100644 node_modules/etag/LICENSE create mode 100644 node_modules/etag/README.md create mode 100644 node_modules/etag/index.js create mode 100644 node_modules/etag/package.json create mode 100644 node_modules/express/History.md create mode 100644 node_modules/express/LICENSE create mode 100644 node_modules/express/Readme.md create mode 100644 node_modules/express/index.js create mode 100644 node_modules/express/lib/application.js create mode 100644 node_modules/express/lib/express.js create mode 100644 node_modules/express/lib/middleware/init.js create mode 100644 node_modules/express/lib/middleware/query.js create mode 100644 node_modules/express/lib/request.js create mode 100644 node_modules/express/lib/response.js create mode 100644 node_modules/express/lib/router/index.js create mode 100644 node_modules/express/lib/router/layer.js create mode 100644 node_modules/express/lib/router/route.js create mode 100644 node_modules/express/lib/utils.js create mode 100644 node_modules/express/lib/view.js create mode 100644 node_modules/express/node_modules/body-parser/HISTORY.md create mode 100644 node_modules/express/node_modules/body-parser/LICENSE create mode 100644 node_modules/express/node_modules/body-parser/README.md create mode 100644 node_modules/express/node_modules/body-parser/SECURITY.md create mode 100644 node_modules/express/node_modules/body-parser/index.js create mode 100644 node_modules/express/node_modules/body-parser/lib/read.js create mode 100644 node_modules/express/node_modules/body-parser/lib/types/json.js create mode 100644 node_modules/express/node_modules/body-parser/lib/types/raw.js create mode 100644 node_modules/express/node_modules/body-parser/lib/types/text.js create mode 100644 node_modules/express/node_modules/body-parser/lib/types/urlencoded.js create mode 100644 node_modules/express/node_modules/body-parser/package.json create mode 100644 node_modules/express/node_modules/raw-body/HISTORY.md create mode 100644 node_modules/express/node_modules/raw-body/LICENSE create mode 100644 node_modules/express/node_modules/raw-body/README.md create mode 100644 node_modules/express/node_modules/raw-body/SECURITY.md create mode 100644 node_modules/express/node_modules/raw-body/index.d.ts create mode 100644 node_modules/express/node_modules/raw-body/index.js create mode 100644 node_modules/express/node_modules/raw-body/package.json create mode 100644 node_modules/express/package.json create mode 100644 node_modules/filelist/README.md create mode 100644 node_modules/filelist/index.d.ts create mode 100644 node_modules/filelist/index.js create mode 100644 node_modules/filelist/jakefile.js create mode 100644 node_modules/filelist/node_modules/brace-expansion/.github/FUNDING.yml create mode 100644 node_modules/filelist/node_modules/brace-expansion/LICENSE create mode 100644 node_modules/filelist/node_modules/brace-expansion/README.md create mode 100644 node_modules/filelist/node_modules/brace-expansion/index.js create mode 100644 node_modules/filelist/node_modules/brace-expansion/package.json create mode 100644 node_modules/filelist/node_modules/minimatch/LICENSE create mode 100644 node_modules/filelist/node_modules/minimatch/README.md create mode 100644 node_modules/filelist/node_modules/minimatch/lib/path.js create mode 100644 node_modules/filelist/node_modules/minimatch/minimatch.js create mode 100644 node_modules/filelist/node_modules/minimatch/package.json create mode 100644 node_modules/filelist/package.json create mode 100644 node_modules/finalhandler/HISTORY.md create mode 100644 node_modules/finalhandler/LICENSE create mode 100644 node_modules/finalhandler/README.md create mode 100644 node_modules/finalhandler/SECURITY.md create mode 100644 node_modules/finalhandler/index.js create mode 100644 node_modules/finalhandler/package.json create mode 100644 node_modules/forwarded/HISTORY.md create mode 100644 node_modules/forwarded/LICENSE create mode 100644 node_modules/forwarded/README.md create mode 100644 node_modules/forwarded/index.js create mode 100644 node_modules/forwarded/package.json create mode 100644 node_modules/fresh/HISTORY.md create mode 100644 node_modules/fresh/LICENSE create mode 100644 node_modules/fresh/README.md create mode 100644 node_modules/fresh/index.js create mode 100644 node_modules/fresh/package.json create mode 100644 node_modules/function-bind/.editorconfig create mode 100644 node_modules/function-bind/.eslintrc create mode 100644 node_modules/function-bind/.jscs.json create mode 100644 node_modules/function-bind/.npmignore create mode 100644 node_modules/function-bind/.travis.yml create mode 100644 node_modules/function-bind/LICENSE create mode 100644 node_modules/function-bind/README.md create mode 100644 node_modules/function-bind/implementation.js create mode 100644 node_modules/function-bind/index.js create mode 100644 node_modules/function-bind/package.json create mode 100644 node_modules/function-bind/test/.eslintrc create mode 100644 node_modules/function-bind/test/index.js create mode 100644 node_modules/get-intrinsic/.eslintrc create mode 100644 node_modules/get-intrinsic/.github/FUNDING.yml create mode 100644 node_modules/get-intrinsic/.nycrc create mode 100644 node_modules/get-intrinsic/CHANGELOG.md create mode 100644 node_modules/get-intrinsic/LICENSE create mode 100644 node_modules/get-intrinsic/README.md create mode 100644 node_modules/get-intrinsic/index.js create mode 100644 node_modules/get-intrinsic/package.json create mode 100644 node_modules/get-intrinsic/test/GetIntrinsic.js create mode 100644 node_modules/has-flag/index.d.ts create mode 100644 node_modules/has-flag/index.js create mode 100644 node_modules/has-flag/license create mode 100644 node_modules/has-flag/package.json create mode 100644 node_modules/has-flag/readme.md create mode 100644 node_modules/has-proto/.eslintrc create mode 100644 node_modules/has-proto/.github/FUNDING.yml create mode 100644 node_modules/has-proto/CHANGELOG.md create mode 100644 node_modules/has-proto/LICENSE create mode 100644 node_modules/has-proto/README.md create mode 100644 node_modules/has-proto/index.js create mode 100644 node_modules/has-proto/package.json create mode 100644 node_modules/has-proto/test/index.js create mode 100644 node_modules/has-symbols/.eslintrc create mode 100644 node_modules/has-symbols/.github/FUNDING.yml create mode 100644 node_modules/has-symbols/.nycrc create mode 100644 node_modules/has-symbols/CHANGELOG.md create mode 100644 node_modules/has-symbols/LICENSE create mode 100644 node_modules/has-symbols/README.md create mode 100644 node_modules/has-symbols/index.js create mode 100644 node_modules/has-symbols/package.json create mode 100644 node_modules/has-symbols/shams.js create mode 100644 node_modules/has-symbols/test/index.js create mode 100644 node_modules/has-symbols/test/shams/core-js.js create mode 100644 node_modules/has-symbols/test/shams/get-own-property-symbols.js create mode 100644 node_modules/has-symbols/test/tests.js create mode 100644 node_modules/has/LICENSE-MIT create mode 100644 node_modules/has/README.md create mode 100644 node_modules/has/package.json create mode 100644 node_modules/has/src/index.js create mode 100644 node_modules/has/test/index.js create mode 100644 node_modules/http-errors/HISTORY.md create mode 100644 node_modules/http-errors/LICENSE create mode 100644 node_modules/http-errors/README.md create mode 100644 node_modules/http-errors/index.js create mode 100644 node_modules/http-errors/package.json create mode 100644 node_modules/iconv-lite/Changelog.md create mode 100644 node_modules/iconv-lite/LICENSE create mode 100644 node_modules/iconv-lite/README.md create mode 100644 node_modules/iconv-lite/encodings/dbcs-codec.js create mode 100644 node_modules/iconv-lite/encodings/dbcs-data.js create mode 100644 node_modules/iconv-lite/encodings/index.js create mode 100644 node_modules/iconv-lite/encodings/internal.js create mode 100644 node_modules/iconv-lite/encodings/sbcs-codec.js create mode 100644 node_modules/iconv-lite/encodings/sbcs-data-generated.js create mode 100644 node_modules/iconv-lite/encodings/sbcs-data.js create mode 100644 node_modules/iconv-lite/encodings/tables/big5-added.json create mode 100644 node_modules/iconv-lite/encodings/tables/cp936.json create mode 100644 node_modules/iconv-lite/encodings/tables/cp949.json create mode 100644 node_modules/iconv-lite/encodings/tables/cp950.json create mode 100644 node_modules/iconv-lite/encodings/tables/eucjp.json create mode 100644 node_modules/iconv-lite/encodings/tables/gb18030-ranges.json create mode 100644 node_modules/iconv-lite/encodings/tables/gbk-added.json create mode 100644 node_modules/iconv-lite/encodings/tables/shiftjis.json create mode 100644 node_modules/iconv-lite/encodings/utf16.js create mode 100644 node_modules/iconv-lite/encodings/utf7.js create mode 100644 node_modules/iconv-lite/lib/bom-handling.js create mode 100644 node_modules/iconv-lite/lib/extend-node.js create mode 100644 node_modules/iconv-lite/lib/index.d.ts create mode 100644 node_modules/iconv-lite/lib/index.js create mode 100644 node_modules/iconv-lite/lib/streams.js create mode 100644 node_modules/iconv-lite/package.json create mode 100644 node_modules/inherits/LICENSE create mode 100644 node_modules/inherits/README.md create mode 100644 node_modules/inherits/inherits.js create mode 100644 node_modules/inherits/inherits_browser.js create mode 100644 node_modules/inherits/package.json create mode 100644 node_modules/ipaddr.js/LICENSE create mode 100644 node_modules/ipaddr.js/README.md create mode 100644 node_modules/ipaddr.js/ipaddr.min.js create mode 100644 node_modules/ipaddr.js/lib/ipaddr.js create mode 100644 node_modules/ipaddr.js/lib/ipaddr.js.d.ts create mode 100644 node_modules/ipaddr.js/package.json create mode 100644 node_modules/jake/Makefile create mode 100644 node_modules/jake/README.md create mode 100755 node_modules/jake/bin/bash_completion.sh create mode 100755 node_modules/jake/bin/cli.js create mode 100644 node_modules/jake/jakefile.js create mode 100644 node_modules/jake/lib/api.js create mode 100644 node_modules/jake/lib/jake.js create mode 100644 node_modules/jake/lib/loader.js create mode 100644 node_modules/jake/lib/namespace.js create mode 100644 node_modules/jake/lib/package_task.js create mode 100644 node_modules/jake/lib/parseargs.js create mode 100644 node_modules/jake/lib/program.js create mode 100644 node_modules/jake/lib/publish_task.js create mode 100644 node_modules/jake/lib/rule.js create mode 100644 node_modules/jake/lib/task/directory_task.js create mode 100644 node_modules/jake/lib/task/file_task.js create mode 100644 node_modules/jake/lib/task/index.js create mode 100644 node_modules/jake/lib/task/task.js create mode 100644 node_modules/jake/lib/test_task.js create mode 100644 node_modules/jake/lib/utils/file.js create mode 100644 node_modules/jake/lib/utils/index.js create mode 100644 node_modules/jake/lib/utils/logger.js create mode 100644 node_modules/jake/package.json create mode 100644 node_modules/jake/test/integration/concurrent.js create mode 100644 node_modules/jake/test/integration/file.js create mode 100644 node_modules/jake/test/integration/file_task.js create mode 100644 node_modules/jake/test/integration/helpers.js create mode 100644 node_modules/jake/test/integration/jakefile.js create mode 100644 node_modules/jake/test/integration/jakelib/concurrent.jake.js create mode 100644 node_modules/jake/test/integration/jakelib/publish.jake.js create mode 100644 node_modules/jake/test/integration/jakelib/required_module.jake.js create mode 100644 node_modules/jake/test/integration/jakelib/rule.jake.js create mode 100644 node_modules/jake/test/integration/list_tasks.js create mode 100644 node_modules/jake/test/integration/publish_task.js create mode 100644 node_modules/jake/test/integration/rule.js create mode 100644 node_modules/jake/test/integration/selfdep.js create mode 100644 node_modules/jake/test/integration/task_base.js create mode 100644 node_modules/jake/test/unit/jakefile.js create mode 100644 node_modules/jake/test/unit/namespace.js create mode 100644 node_modules/jake/test/unit/parseargs.js create mode 100644 node_modules/jake/usage.txt create mode 100644 node_modules/long/.npmignore create mode 100644 node_modules/long/.travis.yml create mode 100644 node_modules/long/LICENSE create mode 100644 node_modules/long/Long.png create mode 100644 node_modules/long/README.md create mode 100644 node_modules/long/bower.json create mode 100644 node_modules/long/dist/Long.js create mode 100644 node_modules/long/dist/Long.min.js create mode 100644 node_modules/long/dist/Long.min.js.gz create mode 100644 node_modules/long/dist/Long.min.map create mode 100644 node_modules/long/dist/README.md create mode 100644 node_modules/long/doco/INDEX.md create mode 100644 node_modules/long/doco/Long.md create mode 100644 node_modules/long/donate.png create mode 100644 node_modules/long/externs/Long.js create mode 100644 node_modules/long/index.js create mode 100644 node_modules/long/jsdoc.json create mode 100644 node_modules/long/package.json create mode 100644 node_modules/long/scripts/build.js create mode 100644 node_modules/long/src/Long.js create mode 100644 node_modules/long/src/bower.json create mode 100644 node_modules/long/src/wrap.js create mode 100644 node_modules/long/tests/goog.math.long.js create mode 100644 node_modules/long/tests/suite.js create mode 100644 node_modules/media-typer/HISTORY.md create mode 100644 node_modules/media-typer/LICENSE create mode 100644 node_modules/media-typer/README.md create mode 100644 node_modules/media-typer/index.js create mode 100644 node_modules/media-typer/package.json create mode 100644 node_modules/merge-descriptors/HISTORY.md create mode 100644 node_modules/merge-descriptors/LICENSE create mode 100644 node_modules/merge-descriptors/README.md create mode 100644 node_modules/merge-descriptors/index.js create mode 100644 node_modules/merge-descriptors/package.json create mode 100644 node_modules/methods/HISTORY.md create mode 100644 node_modules/methods/LICENSE create mode 100644 node_modules/methods/README.md create mode 100644 node_modules/methods/index.js create mode 100644 node_modules/methods/package.json create mode 100644 node_modules/mime-db/HISTORY.md create mode 100644 node_modules/mime-db/LICENSE create mode 100644 node_modules/mime-db/README.md create mode 100644 node_modules/mime-db/db.json create mode 100644 node_modules/mime-db/index.js create mode 100644 node_modules/mime-db/package.json create mode 100644 node_modules/mime-types/HISTORY.md create mode 100644 node_modules/mime-types/LICENSE create mode 100644 node_modules/mime-types/README.md create mode 100644 node_modules/mime-types/index.js create mode 100644 node_modules/mime-types/package.json create mode 100644 node_modules/mime/.npmignore create mode 100644 node_modules/mime/CHANGELOG.md create mode 100644 node_modules/mime/LICENSE create mode 100644 node_modules/mime/README.md create mode 100755 node_modules/mime/cli.js create mode 100644 node_modules/mime/mime.js create mode 100644 node_modules/mime/package.json create mode 100755 node_modules/mime/src/build.js create mode 100644 node_modules/mime/src/test.js create mode 100644 node_modules/mime/types.json create mode 100644 node_modules/minimatch/LICENSE create mode 100644 node_modules/minimatch/README.md create mode 100644 node_modules/minimatch/minimatch.js create mode 100644 node_modules/minimatch/package.json create mode 100644 node_modules/ms/index.js create mode 100644 node_modules/ms/license.md create mode 100644 node_modules/ms/package.json create mode 100644 node_modules/ms/readme.md create mode 100644 node_modules/negotiator/HISTORY.md create mode 100644 node_modules/negotiator/LICENSE create mode 100644 node_modules/negotiator/README.md create mode 100644 node_modules/negotiator/index.js create mode 100644 node_modules/negotiator/lib/charset.js create mode 100644 node_modules/negotiator/lib/encoding.js create mode 100644 node_modules/negotiator/lib/language.js create mode 100644 node_modules/negotiator/lib/mediaType.js create mode 100644 node_modules/negotiator/package.json create mode 100644 node_modules/object-inspect/.eslintrc create mode 100644 node_modules/object-inspect/.github/FUNDING.yml create mode 100644 node_modules/object-inspect/.nycrc create mode 100644 node_modules/object-inspect/CHANGELOG.md create mode 100644 node_modules/object-inspect/LICENSE create mode 100644 node_modules/object-inspect/example/all.js create mode 100644 node_modules/object-inspect/example/circular.js create mode 100644 node_modules/object-inspect/example/fn.js create mode 100644 node_modules/object-inspect/example/inspect.js create mode 100644 node_modules/object-inspect/index.js create mode 100644 node_modules/object-inspect/package-support.json create mode 100644 node_modules/object-inspect/package.json create mode 100644 node_modules/object-inspect/readme.markdown create mode 100644 node_modules/object-inspect/test-core-js.js create mode 100644 node_modules/object-inspect/test/bigint.js create mode 100644 node_modules/object-inspect/test/browser/dom.js create mode 100644 node_modules/object-inspect/test/circular.js create mode 100644 node_modules/object-inspect/test/deep.js create mode 100644 node_modules/object-inspect/test/element.js create mode 100644 node_modules/object-inspect/test/err.js create mode 100644 node_modules/object-inspect/test/fakes.js create mode 100644 node_modules/object-inspect/test/fn.js create mode 100644 node_modules/object-inspect/test/has.js create mode 100644 node_modules/object-inspect/test/holes.js create mode 100644 node_modules/object-inspect/test/indent-option.js create mode 100644 node_modules/object-inspect/test/inspect.js create mode 100644 node_modules/object-inspect/test/lowbyte.js create mode 100644 node_modules/object-inspect/test/number.js create mode 100644 node_modules/object-inspect/test/quoteStyle.js create mode 100644 node_modules/object-inspect/test/toStringTag.js create mode 100644 node_modules/object-inspect/test/undef.js create mode 100644 node_modules/object-inspect/test/values.js create mode 100644 node_modules/object-inspect/util.inspect.js create mode 100644 node_modules/on-finished/HISTORY.md create mode 100644 node_modules/on-finished/LICENSE create mode 100644 node_modules/on-finished/README.md create mode 100644 node_modules/on-finished/index.js create mode 100644 node_modules/on-finished/package.json create mode 100644 node_modules/parseurl/HISTORY.md create mode 100644 node_modules/parseurl/LICENSE create mode 100644 node_modules/parseurl/README.md create mode 100644 node_modules/parseurl/index.js create mode 100644 node_modules/parseurl/package.json create mode 100644 node_modules/path-to-regexp/History.md create mode 100644 node_modules/path-to-regexp/LICENSE create mode 100644 node_modules/path-to-regexp/Readme.md create mode 100644 node_modules/path-to-regexp/index.js create mode 100644 node_modules/path-to-regexp/package.json create mode 100644 node_modules/proxy-addr/HISTORY.md create mode 100644 node_modules/proxy-addr/LICENSE create mode 100644 node_modules/proxy-addr/README.md create mode 100644 node_modules/proxy-addr/index.js create mode 100644 node_modules/proxy-addr/package.json create mode 100644 node_modules/qs/.editorconfig create mode 100644 node_modules/qs/.eslintrc create mode 100644 node_modules/qs/.github/FUNDING.yml create mode 100644 node_modules/qs/.nycrc create mode 100644 node_modules/qs/CHANGELOG.md create mode 100644 node_modules/qs/LICENSE.md create mode 100644 node_modules/qs/README.md create mode 100644 node_modules/qs/dist/qs.js create mode 100644 node_modules/qs/lib/formats.js create mode 100644 node_modules/qs/lib/index.js create mode 100644 node_modules/qs/lib/parse.js create mode 100644 node_modules/qs/lib/stringify.js create mode 100644 node_modules/qs/lib/utils.js create mode 100644 node_modules/qs/package.json create mode 100644 node_modules/qs/test/parse.js create mode 100644 node_modules/qs/test/stringify.js create mode 100644 node_modules/qs/test/utils.js create mode 100644 node_modules/range-parser/HISTORY.md create mode 100644 node_modules/range-parser/LICENSE create mode 100644 node_modules/range-parser/README.md create mode 100644 node_modules/range-parser/index.js create mode 100644 node_modules/range-parser/package.json create mode 100644 node_modules/raw-body/HISTORY.md create mode 100644 node_modules/raw-body/LICENSE create mode 100644 node_modules/raw-body/README.md create mode 100644 node_modules/raw-body/SECURITY.md create mode 100644 node_modules/raw-body/index.d.ts create mode 100644 node_modules/raw-body/index.js create mode 100644 node_modules/raw-body/package.json create mode 100644 node_modules/safe-buffer/LICENSE create mode 100644 node_modules/safe-buffer/README.md create mode 100644 node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/safe-buffer/index.js create mode 100644 node_modules/safe-buffer/package.json create mode 100644 node_modules/safer-buffer/LICENSE create mode 100644 node_modules/safer-buffer/Porting-Buffer.md create mode 100644 node_modules/safer-buffer/Readme.md create mode 100644 node_modules/safer-buffer/dangerous.js create mode 100644 node_modules/safer-buffer/package.json create mode 100644 node_modules/safer-buffer/safer.js create mode 100644 node_modules/safer-buffer/tests.js create mode 100644 node_modules/send/HISTORY.md create mode 100644 node_modules/send/LICENSE create mode 100644 node_modules/send/README.md create mode 100644 node_modules/send/SECURITY.md create mode 100644 node_modules/send/index.js create mode 100644 node_modules/send/node_modules/ms/index.js create mode 100644 node_modules/send/node_modules/ms/license.md create mode 100644 node_modules/send/node_modules/ms/package.json create mode 100644 node_modules/send/node_modules/ms/readme.md create mode 100644 node_modules/send/package.json create mode 100644 node_modules/serve-static/HISTORY.md create mode 100644 node_modules/serve-static/LICENSE create mode 100644 node_modules/serve-static/README.md create mode 100644 node_modules/serve-static/index.js create mode 100644 node_modules/serve-static/package.json create mode 100644 node_modules/setprototypeof/LICENSE create mode 100644 node_modules/setprototypeof/README.md create mode 100644 node_modules/setprototypeof/index.d.ts create mode 100644 node_modules/setprototypeof/index.js create mode 100644 node_modules/setprototypeof/package.json create mode 100644 node_modules/setprototypeof/test/index.js create mode 100644 node_modules/side-channel/.eslintignore create mode 100644 node_modules/side-channel/.eslintrc create mode 100644 node_modules/side-channel/.github/FUNDING.yml create mode 100644 node_modules/side-channel/.nycrc create mode 100644 node_modules/side-channel/CHANGELOG.md create mode 100644 node_modules/side-channel/LICENSE create mode 100644 node_modules/side-channel/README.md create mode 100644 node_modules/side-channel/index.js create mode 100644 node_modules/side-channel/package.json create mode 100644 node_modules/side-channel/test/index.js create mode 100644 node_modules/statuses/HISTORY.md create mode 100644 node_modules/statuses/LICENSE create mode 100644 node_modules/statuses/README.md create mode 100644 node_modules/statuses/codes.json create mode 100644 node_modules/statuses/index.js create mode 100644 node_modules/statuses/package.json create mode 100644 node_modules/supports-color/browser.js create mode 100644 node_modules/supports-color/index.js create mode 100644 node_modules/supports-color/license create mode 100644 node_modules/supports-color/package.json create mode 100644 node_modules/supports-color/readme.md create mode 100644 node_modules/toidentifier/HISTORY.md create mode 100644 node_modules/toidentifier/LICENSE create mode 100644 node_modules/toidentifier/README.md create mode 100644 node_modules/toidentifier/index.js create mode 100644 node_modules/toidentifier/package.json create mode 100644 node_modules/type-is/HISTORY.md create mode 100644 node_modules/type-is/LICENSE create mode 100644 node_modules/type-is/README.md create mode 100644 node_modules/type-is/index.js create mode 100644 node_modules/type-is/package.json create mode 100644 node_modules/unpipe/HISTORY.md create mode 100644 node_modules/unpipe/LICENSE create mode 100644 node_modules/unpipe/README.md create mode 100644 node_modules/unpipe/index.js create mode 100644 node_modules/unpipe/package.json create mode 100644 node_modules/utils-merge/.npmignore create mode 100644 node_modules/utils-merge/LICENSE create mode 100644 node_modules/utils-merge/README.md create mode 100644 node_modules/utils-merge/index.js create mode 100644 node_modules/utils-merge/package.json create mode 100644 node_modules/vary/HISTORY.md create mode 100644 node_modules/vary/LICENSE create mode 100644 node_modules/vary/README.md create mode 100644 node_modules/vary/index.js create mode 100644 node_modules/vary/package.json create mode 100644 public/css/error.css create mode 100644 public/css/index.css create mode 100644 public/error.html create mode 100644 public/images/cat.gif create mode 100644 public/index.html create mode 100644 public/src/index.js diff --git a/index.js b/index.js deleted file mode 100644 index 7b56b1a..0000000 --- a/index.js +++ /dev/null @@ -1,67 +0,0 @@ -const submit = document.getElementById("button"); -submit.addEventListener('click', validate); -function validate(e) { - e.preventDefault(); - - const url = document.getElementById("URL"); - const glink = document.getElementById("GLink"); - // if (!url) { - // /* Flag */ - // } - let valid = true; - const domainExp = new RegExp("http(s)*:\\/\\/[a-zA-Z0-9\\-]+(\\.[a-zA-Z0-9\\-]+)+"); - const filepathExp = new RegExp("[a-zA-Z]+"); - let count = 0; - let index = -1; - let domain = ""; - let filepath = ""; - for (let i=0; i < url.value.length; i++) { - if (url.value.charAt(i) == '/') { - count++; - } - if (count == 3) { - index = i; - break; - } - } - if (count >= 3) { - domain = url.value.substring(0, index); - if (index == url.value.length - 1) { - filepath = url.value.charAt(index); - } else { - filepath = url.value.substring(index, url.value.length - 1); - } - alert("Domain is " + domain + " filepath is " + filepath); - } else { - domain = url.value; - } - console.log(domain); - if (domain.match(domainExp)) /** and is available? */{ - const error = document.getElementById("error"); - if (error.classList.contains("visible")) { - error.classList.remove("visible"); - } - if (url.classList.contains("invalid")) { - url.classList.remove("invalid"); - } - url.classList.add("valid"); - error.setAttribute('aria-hidden', true); - error.setAttribute('aria-invalid', false); - console.log("Valid"); - return valid; - } else { - - /*flag*/ - const error = document.getElementById("error"); - error.classList.add("visible"); - - //error.classList.add("hidden"); - if (url.classList.contains("valid")) { - url.classList.remove("valid"); - } - url.classList.add("invalid"); - error.setAttribute('aria-hidden', false); - error.setAttribute('aria-invalid', true); - } - -} \ No newline at end of file diff --git a/node_modules/.bin/ejs b/node_modules/.bin/ejs new file mode 120000 index 0000000..88e80d0 --- /dev/null +++ b/node_modules/.bin/ejs @@ -0,0 +1 @@ +../ejs/bin/cli.js \ No newline at end of file diff --git a/node_modules/.bin/jake b/node_modules/.bin/jake new file mode 120000 index 0000000..3626745 --- /dev/null +++ b/node_modules/.bin/jake @@ -0,0 +1 @@ +../jake/bin/cli.js \ No newline at end of file diff --git a/node_modules/.bin/mime b/node_modules/.bin/mime new file mode 120000 index 0000000..fbb7ee0 --- /dev/null +++ b/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 06e6806..f136b6e 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -1,8 +1,14 @@ { - "name": "GLink", + "name": "glink", + "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { + "node_modules/@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==" + }, "node_modules/@types/node": { "version": "20.2.5", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.5.tgz", @@ -22,6 +28,87 @@ "@types/webidl-conversions": "*" } }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/adm-zip": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.10.tgz", + "integrity": "sha512-x0HvcHqVJNTPk/Bw8JbLWlWoo6Wwnsug0fnYYro1HBrjxZ3G7/AZk7Ahv8JwDe1uIcz8eBqvu86FuF1POiG7vQ==", + "engines": { + "node": ">=6.0" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/async": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/body-parser": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/bson": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/bson/-/bson-5.3.0.tgz", @@ -30,17 +117,508 @@ "node": ">=14.20.1" } }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/cassandra-driver": { + "version": "4.6.4", + "resolved": "https://registry.npmjs.org/cassandra-driver/-/cassandra-driver-4.6.4.tgz", + "integrity": "sha512-SksbIK0cZ2QZRx8ti7w+PnLqldyY+6kU2gRWFChwXFTtrD/ce8cQICDEHxyPwx+DeILwRnMrPf9cjUGizYw9Vg==", + "dependencies": { + "@types/long": "^4.0.0", + "@types/node": ">=8", + "adm-zip": "^0.5.3", + "long": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/ejs": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz", + "integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express": { + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", + "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.1", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.5.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.2.0", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.7", + "qs": "6.11.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.18.0", + "serve-static": "1.15.0", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/express/node_modules/body-parser": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", + "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.1", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/express/node_modules/raw-body": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", + "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/finalhandler": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", + "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, "node_modules/ip": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==" }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/jake": { + "version": "10.8.7", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.7.tgz", + "integrity": "sha512-ZDi3aP+fG/LchyBzUM804VjddnwfSfsdeYkwt8NcbKRvo4rFkjhs456iLFn3k2ZUWvNe4i48WACDbza8fhq2+w==", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.4", + "minimatch": "^3.1.2" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/long": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/long/-/long-2.4.0.tgz", + "integrity": "sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/memory-pager": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", "optional": true }, + "node_modules/merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/mongodb": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-5.6.0.tgz", @@ -82,6 +660,63 @@ "whatwg-url": "^11.0.0" } }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/object-inspect": { + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", + "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/punycode": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", @@ -90,6 +725,66 @@ "node": ">=6" } }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, "node_modules/saslprep": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", @@ -102,6 +797,66 @@ "node": ">=6" } }, + "node_modules/send": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/serve-static": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "dependencies": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.18.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/smart-buffer": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", @@ -133,6 +888,33 @@ "memory-pager": "^1.0.2" } }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, "node_modules/tr46": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", @@ -144,6 +926,42 @@ "node": ">=12" } }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", diff --git a/node_modules/@types/long/LICENSE b/node_modules/@types/long/LICENSE new file mode 100755 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/long/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/long/README.md b/node_modules/@types/long/README.md new file mode 100755 index 0000000..78faab2 --- /dev/null +++ b/node_modules/@types/long/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/long` + +# Summary +This package contains type definitions for long.js (https://github.com/dcodeIO/long.js). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/long. + +### Additional Details + * Last updated: Tue, 26 Apr 2022 19:31:52 GMT + * Dependencies: none + * Global values: `Long` + +# Credits +These definitions were written by [Peter Kooijmans](https://github.com/peterkooijmans). diff --git a/node_modules/@types/long/index.d.ts b/node_modules/@types/long/index.d.ts new file mode 100755 index 0000000..3a95005 --- /dev/null +++ b/node_modules/@types/long/index.d.ts @@ -0,0 +1,389 @@ +// Type definitions for long.js 4.0.0 +// Project: https://github.com/dcodeIO/long.js +// Definitions by: Peter Kooijmans +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// Definitions by: Denis Cappellin + +export = Long; +export as namespace Long; + +declare const Long: Long.LongConstructor; +type Long = Long.Long; +declare namespace Long { + interface LongConstructor { + /** + * Constructs a 64 bit two's-complement integer, given its low and high 32 bit values as signed integers. See the from* functions below for more convenient ways of constructing Longs. + */ + new( low: number, high?: number, unsigned?: boolean ): Long; + prototype: Long; + /** + * Maximum unsigned value. + */ + MAX_UNSIGNED_VALUE: Long; + + /** + * Maximum signed value. + */ + MAX_VALUE: Long; + + /** + * Minimum signed value. + */ + MIN_VALUE: Long; + + /** + * Signed negative one. + */ + NEG_ONE: Long; + + /** + * Signed one. + */ + ONE: Long; + + /** + * Unsigned one. + */ + UONE: Long; + + /** + * Unsigned zero. + */ + UZERO: Long; + + /** + * Signed zero + */ + ZERO: Long; + + /** + * Returns a Long representing the 64 bit integer that comes by concatenating the given low and high bits. Each is assumed to use 32 bits. + */ + fromBits( lowBits:number, highBits:number, unsigned?:boolean ): Long; + + /** + * Returns a Long representing the given 32 bit integer value. + */ + fromInt( value: number, unsigned?: boolean ): Long; + + /** + * Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned. + */ + fromNumber( value: number, unsigned?: boolean ): Long; + + /** + * Returns a Long representation of the given string, written using the specified radix. + */ + fromString( str: string, unsigned?: boolean | number, radix?: number ): Long; + + /** + * Creates a Long from its byte representation. + */ + fromBytes( bytes: number[], unsigned?: boolean, le?: boolean ): Long; + + /** + * Creates a Long from its little endian byte representation. + */ + fromBytesLE( bytes: number[], unsigned?: boolean ): Long; + + /** + * Creates a Long from its little endian byte representation. + */ + fromBytesBE( bytes: number[], unsigned?: boolean ): Long; + + /** + * Tests if the specified object is a Long. + */ + isLong( obj: any ): obj is Long; + + /** + * Converts the specified value to a Long. + */ + fromValue( val: Long | number | string | {low: number, high: number, unsigned: boolean}, unsigned?: boolean ): Long; + } + interface Long + { + /** + * The high 32 bits as a signed value. + */ + high: number; + + /** + * The low 32 bits as a signed value. + */ + low: number; + + /** + * Whether unsigned or not. + */ + unsigned: boolean; + + /** + * Returns the sum of this and the specified Long. + */ + add( addend: number | Long | string ): Long; + + /** + * Returns the bitwise AND of this Long and the specified. + */ + and( other: Long | number | string ): Long; + + /** + * Compares this Long's value with the specified's. + */ + compare( other: Long | number | string ): number; + + /** + * Compares this Long's value with the specified's. + */ + comp( other: Long | number | string ): number; + + /** + * Returns this Long divided by the specified. + */ + divide( divisor: Long | number | string ): Long; + + /** + * Returns this Long divided by the specified. + */ + div( divisor: Long | number | string ): Long; + + /** + * Tests if this Long's value equals the specified's. + */ + equals( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value equals the specified's. + */ + eq( other: Long | number | string ): boolean; + + /** + * Gets the high 32 bits as a signed integer. + */ + getHighBits(): number; + + /** + * Gets the high 32 bits as an unsigned integer. + */ + getHighBitsUnsigned(): number; + + /** + * Gets the low 32 bits as a signed integer. + */ + getLowBits(): number; + + /** + * Gets the low 32 bits as an unsigned integer. + */ + getLowBitsUnsigned(): number; + + /** + * Gets the number of bits needed to represent the absolute value of this Long. + */ + getNumBitsAbs(): number; + + /** + * Tests if this Long's value is greater than the specified's. + */ + greaterThan( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is greater than the specified's. + */ + gt( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is greater than or equal the specified's. + */ + greaterThanOrEqual( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is greater than or equal the specified's. + */ + gte( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is even. + */ + isEven(): boolean; + + /** + * Tests if this Long's value is negative. + */ + isNegative(): boolean; + + /** + * Tests if this Long's value is odd. + */ + isOdd(): boolean; + + /** + * Tests if this Long's value is positive. + */ + isPositive(): boolean; + + /** + * Tests if this Long's value equals zero. + */ + isZero(): boolean; + + /** + * Tests if this Long's value is less than the specified's. + */ + lessThan( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is less than the specified's. + */ + lt( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is less than or equal the specified's. + */ + lessThanOrEqual( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value is less than or equal the specified's. + */ + lte( other: Long | number | string ): boolean; + + /** + * Returns this Long modulo the specified. + */ + modulo( other: Long | number | string ): Long; + + /** + * Returns this Long modulo the specified. + */ + mod( other: Long | number | string ): Long; + + /** + * Returns the product of this and the specified Long. + */ + multiply( multiplier: Long | number | string ): Long; + + /** + * Returns the product of this and the specified Long. + */ + mul( multiplier: Long | number | string ): Long; + + /** + * Negates this Long's value. + */ + negate(): Long; + + /** + * Negates this Long's value. + */ + neg(): Long; + + /** + * Returns the bitwise NOT of this Long. + */ + not(): Long; + + /** + * Tests if this Long's value differs from the specified's. + */ + notEquals( other: Long | number | string ): boolean; + + /** + * Tests if this Long's value differs from the specified's. + */ + neq( other: Long | number | string ): boolean; + + /** + * Returns the bitwise OR of this Long and the specified. + */ + or( other: Long | number | string ): Long; + + /** + * Returns this Long with bits shifted to the left by the given amount. + */ + shiftLeft( numBits: number | Long ): Long; + + /** + * Returns this Long with bits shifted to the left by the given amount. + */ + shl( numBits: number | Long ): Long; + + /** + * Returns this Long with bits arithmetically shifted to the right by the given amount. + */ + shiftRight( numBits: number | Long ): Long; + + /** + * Returns this Long with bits arithmetically shifted to the right by the given amount. + */ + shr( numBits: number | Long ): Long; + + /** + * Returns this Long with bits logically shifted to the right by the given amount. + */ + shiftRightUnsigned( numBits: number | Long ): Long; + + /** + * Returns this Long with bits logically shifted to the right by the given amount. + */ + shru( numBits: number | Long ): Long; + + /** + * Returns the difference of this and the specified Long. + */ + subtract( subtrahend: number | Long | string ): Long; + + /** + * Returns the difference of this and the specified Long. + */ + sub( subtrahend: number | Long |string ): Long; + + /** + * Converts the Long to a 32 bit integer, assuming it is a 32 bit integer. + */ + toInt(): number; + + /** + * Converts the Long to a the nearest floating-point representation of this value (double, 53 bit mantissa). + */ + toNumber(): number; + + /** + * Converts this Long to its byte representation. + */ + + toBytes( le?: boolean ): number[]; + + /** + * Converts this Long to its little endian byte representation. + */ + + toBytesLE(): number[]; + + /** + * Converts this Long to its big endian byte representation. + */ + + toBytesBE(): number[]; + + /** + * Converts this Long to signed. + */ + toSigned(): Long; + + /** + * Converts the Long to a string written in the specified radix. + */ + toString( radix?: number ): string; + + /** + * Converts this Long to unsigned. + */ + toUnsigned(): Long; + + /** + * Returns the bitwise XOR of this Long and the given one. + */ + xor( other: Long | number | string ): Long; + } +} diff --git a/node_modules/@types/long/package.json b/node_modules/@types/long/package.json new file mode 100755 index 0000000..35b1e75 --- /dev/null +++ b/node_modules/@types/long/package.json @@ -0,0 +1,25 @@ +{ + "name": "@types/long", + "version": "4.0.2", + "description": "TypeScript definitions for long.js", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/long", + "license": "MIT", + "contributors": [ + { + "name": "Peter Kooijmans", + "url": "https://github.com/peterkooijmans", + "githubUsername": "peterkooijmans" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/long" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "ce51a9fcaeb3f15cee5396e1c4f4b5ca2986a066f9bbe885a51dcdc6dbb22fd5", + "typeScriptVersion": "3.9" +} \ No newline at end of file diff --git a/node_modules/accepts/HISTORY.md b/node_modules/accepts/HISTORY.md new file mode 100644 index 0000000..cb5990c --- /dev/null +++ b/node_modules/accepts/HISTORY.md @@ -0,0 +1,243 @@ +1.3.8 / 2022-02-02 +================== + + * deps: mime-types@~2.1.34 + - deps: mime-db@~1.51.0 + * deps: negotiator@0.6.3 + +1.3.7 / 2019-04-29 +================== + + * deps: negotiator@0.6.2 + - Fix sorting charset, encoding, and language with extra parameters + +1.3.6 / 2019-04-28 +================== + + * deps: mime-types@~2.1.24 + - deps: mime-db@~1.40.0 + +1.3.5 / 2018-02-28 +================== + + * deps: mime-types@~2.1.18 + - deps: mime-db@~1.33.0 + +1.3.4 / 2017-08-22 +================== + + * deps: mime-types@~2.1.16 + - deps: mime-db@~1.29.0 + +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/node_modules/accepts/LICENSE b/node_modules/accepts/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/accepts/README.md b/node_modules/accepts/README.md new file mode 100644 index 0000000..82680c5 --- /dev/null +++ b/node_modules/accepts/README.md @@ -0,0 +1,140 @@ +# accepts + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][github-actions-ci-image]][github-actions-ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). +Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` + as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install accepts +``` + +## API + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app (req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch (accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master +[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master +[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci +[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml +[node-version-image]: https://badgen.net/npm/node/accepts +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/accepts +[npm-url]: https://npmjs.org/package/accepts +[npm-version-image]: https://badgen.net/npm/v/accepts diff --git a/node_modules/accepts/index.js b/node_modules/accepts/index.js new file mode 100644 index 0000000..e9b2f63 --- /dev/null +++ b/node_modules/accepts/index.js @@ -0,0 +1,238 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts (req) { + if (!(this instanceof Accepts)) { + return new Accepts(req) + } + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + // no accept header, return first given type + if (!this.headers.accept) { + return types[0] + } + + var mimes = types.map(extToMime) + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) + var first = accepts[0] + + return first + ? types[mimes.indexOf(first)] + : false +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime (type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime (type) { + return typeof type === 'string' +} diff --git a/node_modules/accepts/package.json b/node_modules/accepts/package.json new file mode 100644 index 0000000..0f2d15d --- /dev/null +++ b/node_modules/accepts/package.json @@ -0,0 +1,47 @@ +{ + "name": "accepts", + "description": "Higher-level content negotiation", + "version": "1.3.8", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/accepts", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "4.3.1", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ] +} diff --git a/node_modules/adm-zip/LICENSE b/node_modules/adm-zip/LICENSE new file mode 100644 index 0000000..f748c3d --- /dev/null +++ b/node_modules/adm-zip/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2012 Another-D-Mention Software and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/adm-zip/README.md b/node_modules/adm-zip/README.md new file mode 100644 index 0000000..60d7a12 --- /dev/null +++ b/node_modules/adm-zip/README.md @@ -0,0 +1,65 @@ +# ADM-ZIP for NodeJS with added support for electron original-fs + +ADM-ZIP is a pure JavaScript implementation for zip data compression for [NodeJS](https://nodejs.org/). + +# Installation + +With [npm](https://www.npmjs.com/) do: + + $ npm install adm-zip + +## What is it good for? + +The library allows you to: + +- decompress zip files directly to disk or in memory buffers +- compress files and store them to disk in .zip format or in compressed buffers +- update content of/add new/delete files from an existing .zip + +# Dependencies + +There are no other nodeJS libraries that ADM-ZIP is dependent of + +# Examples + +## Basic usage + +```javascript +var AdmZip = require("adm-zip"); + +// reading archives +var zip = new AdmZip("./my_file.zip"); +var zipEntries = zip.getEntries(); // an array of ZipEntry records + +zipEntries.forEach(function (zipEntry) { + console.log(zipEntry.toString()); // outputs zip entries information + if (zipEntry.entryName == "my_file.txt") { + console.log(zipEntry.getData().toString("utf8")); + } +}); +// outputs the content of some_folder/my_file.txt +console.log(zip.readAsText("some_folder/my_file.txt")); +// extracts the specified file to the specified location +zip.extractEntryTo(/*entry name*/ "some_folder/my_file.txt", /*target path*/ "/home/me/tempfolder", /*maintainEntryPath*/ false, /*overwrite*/ true); +// extracts everything +zip.extractAllTo(/*target path*/ "/home/me/zipcontent/", /*overwrite*/ true); + +// creating archives +var zip = new AdmZip(); + +// add file directly +var content = "inner content of the file"; +zip.addFile("test.txt", Buffer.from(content, "utf8"), "entry comment goes here"); +// add local file +zip.addLocalFile("/home/me/some_picture.png"); +// get everything as a buffer +var willSendthis = zip.toBuffer(); +// or write everything to disk +zip.writeZip(/*target file name*/ "/home/me/files.zip"); + +// ... more examples in the wiki +``` + +For more detailed information please check out the [wiki](https://github.com/cthackers/adm-zip/wiki). + +[![Build Status](https://travis-ci.org/cthackers/adm-zip.svg?branch=master)](https://travis-ci.org/cthackers/adm-zip) diff --git a/node_modules/adm-zip/adm-zip.js b/node_modules/adm-zip/adm-zip.js new file mode 100644 index 0000000..e1f1ce5 --- /dev/null +++ b/node_modules/adm-zip/adm-zip.js @@ -0,0 +1,786 @@ +const Utils = require("./util"); +const pth = require("path"); +const ZipEntry = require("./zipEntry"); +const ZipFile = require("./zipFile"); + +const get_Bool = (val, def) => (typeof val === "boolean" ? val : def); +const get_Str = (val, def) => (typeof val === "string" ? val : def); + +const defaultOptions = { + // option "noSort" : if true it disables files sorting + noSort: false, + // read entries during load (initial loading may be slower) + readEntries: false, + // default method is none + method: Utils.Constants.NONE, + // file system + fs: null +}; + +module.exports = function (/**String*/ input, /** object */ options) { + let inBuffer = null; + + // create object based default options, allowing them to be overwritten + const opts = Object.assign(Object.create(null), defaultOptions); + + // test input variable + if (input && "object" === typeof input) { + // if value is not buffer we accept it to be object with options + if (!(input instanceof Uint8Array)) { + Object.assign(opts, input); + input = opts.input ? opts.input : undefined; + if (opts.input) delete opts.input; + } + + // if input is buffer + if (Buffer.isBuffer(input)) { + inBuffer = input; + opts.method = Utils.Constants.BUFFER; + input = undefined; + } + } + + // assign options + Object.assign(opts, options); + + // instanciate utils filesystem + const filetools = new Utils(opts); + + // if input is file name we retrieve its content + if (input && "string" === typeof input) { + // load zip file + if (filetools.fs.existsSync(input)) { + opts.method = Utils.Constants.FILE; + opts.filename = input; + inBuffer = filetools.fs.readFileSync(input); + } else { + throw new Error(Utils.Errors.INVALID_FILENAME); + } + } + + // create variable + const _zip = new ZipFile(inBuffer, opts); + + const { canonical, sanitize } = Utils; + + function getEntry(/**Object*/ entry) { + if (entry && _zip) { + var item; + // If entry was given as a file name + if (typeof entry === "string") item = _zip.getEntry(entry); + // if entry was given as a ZipEntry object + if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); + + if (item) { + return item; + } + } + return null; + } + + function fixPath(zipPath) { + const { join, normalize, sep } = pth.posix; + // convert windows file separators and normalize + return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); + } + + return { + /** + * Extracts the given entry from the archive and returns the content as a Buffer object + * @param entry ZipEntry object or String with the full path of the entry + * + * @return Buffer or Null in case of error + */ + readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) { + var item = getEntry(entry); + return (item && item.getData(pass)) || null; + }, + + /** + * Asynchronous readFile + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * + * @return Buffer or Null in case of error + */ + readFileAsync: function (/**Object*/ entry, /**Function*/ callback) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(callback); + } else { + callback(null, "getEntry failed for:" + entry); + } + }, + + /** + * Extracts the given entry from the archive and returns the content as plain text in the given encoding + * @param entry ZipEntry object or String with the full path of the entry + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsText: function (/**Object*/ entry, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + var data = item.getData(); + if (data && data.length) { + return data.toString(encoding || "utf8"); + } + } + return ""; + }, + + /** + * Asynchronous readAsText + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(function (data, err) { + if (err) { + callback(data, err); + return; + } + + if (data && data.length) { + callback(data.toString(encoding || "utf8")); + } else { + callback(""); + } + }); + } else { + callback(""); + } + }, + + /** + * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory + * + * @param entry + */ + deleteFile: function (/**Object*/ entry) { + // @TODO: test deleteFile + var item = getEntry(entry); + if (item) { + _zip.deleteEntry(item.entryName); + } + }, + + /** + * Adds a comment to the zip. The zip must be rewritten after adding the comment. + * + * @param comment + */ + addZipComment: function (/**String*/ comment) { + // @TODO: test addZipComment + _zip.comment = comment; + }, + + /** + * Returns the zip comment + * + * @return String + */ + getZipComment: function () { + return _zip.comment || ""; + }, + + /** + * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment + * The comment cannot exceed 65535 characters in length + * + * @param entry + * @param comment + */ + addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) { + var item = getEntry(entry); + if (item) { + item.comment = comment; + } + }, + + /** + * Returns the comment of the specified entry + * + * @param entry + * @return String + */ + getZipEntryComment: function (/**Object*/ entry) { + var item = getEntry(entry); + if (item) { + return item.comment || ""; + } + return ""; + }, + + /** + * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content + * + * @param entry + * @param content + */ + updateFile: function (/**Object*/ entry, /**Buffer*/ content) { + var item = getEntry(entry); + if (item) { + item.setData(content); + } + }, + + /** + * Adds a file from the disk to the archive + * + * @param localPath File to add to zip + * @param zipPath Optional path inside the zip + * @param zipName Optional name for the file + */ + addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) { + if (filetools.fs.existsSync(localPath)) { + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // p - local file name + var p = localPath.split("\\").join("/").split("/").pop(); + + // add file name into zippath + zipPath += zipName ? zipName : p; + + // read file attributes + const _attr = filetools.fs.statSync(localPath); + + // add file into zip file + this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr); + } else { + throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } + }, + + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param localPath + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object + */ + addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter, /**=number|object*/ attr) { + // Prepare filter + if (filter instanceof RegExp) { + // if filter is RegExp wrap it + filter = (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filter); + } else if ("function" !== typeof filter) { + // if filter is not function we will replace it + filter = function () { + return true; + }; + } + + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // normalize the path first + localPath = pth.normalize(localPath); + + if (filetools.fs.existsSync(localPath)) { + const items = filetools.findFiles(localPath); + const self = this; + + if (items.length) { + items.forEach(function (filepath) { + var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix + if (filter(p)) { + var stats = filetools.fs.statSync(filepath); + if (stats.isFile()) { + self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", attr ? attr : stats); + } else { + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", attr ? attr : stats); + } + } + }); + } + } else { + throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } + }, + + /** + * Asynchronous addLocalFile + * @param localPath + * @param callback + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) { + if (filter instanceof RegExp) { + filter = (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filter); + } else if ("function" !== typeof filter) { + filter = function () { + return true; + }; + } + + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // normalize the path first + localPath = pth.normalize(localPath); + + var self = this; + filetools.fs.open(localPath, "r", function (err) { + if (err && err.code === "ENOENT") { + callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } else if (err) { + callback(undefined, err); + } else { + var items = filetools.findFiles(localPath); + var i = -1; + + var next = function () { + i += 1; + if (i < items.length) { + var filepath = items[i]; + var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix + p = p + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .replace(/[^\x20-\x7E]/g, ""); // accent fix + if (filter(p)) { + filetools.fs.stat(filepath, function (er0, stats) { + if (er0) callback(undefined, er0); + if (stats.isFile()) { + filetools.fs.readFile(filepath, function (er1, data) { + if (er1) { + callback(undefined, er1); + } else { + self.addFile(zipPath + p, data, "", stats); + next(); + } + }); + } else { + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); + next(); + } + }); + } else { + process.nextTick(() => { + next(); + }); + } + } else { + callback(true, undefined); + } + }; + + next(); + } + }); + }, + + /** + * + * @param {string} localPath - path where files will be extracted + * @param {object} props - optional properties + * @param {string} props.zipPath - optional path inside zip + * @param {regexp, function} props.filter - RegExp or Function if files match will be included. + */ + addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) { + return new Promise((resolve, reject) => { + const { filter, zipPath } = Object.assign({}, props); + this.addLocalFolderAsync( + localPath, + (done, err) => { + if (err) reject(err); + if (done) resolve(this); + }, + zipPath, + filter + ); + }); + }, + + /** + * Allows you to create a entry (file or directory) in the zip file. + * If you want to create a directory the entryName must end in / and a null buffer should be provided. + * Comment and attributes are optional + * + * @param {string} entryName + * @param {Buffer | string} content - file content as buffer or utf8 coded string + * @param {string} comment - file comment + * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object + */ + addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) { + let entry = getEntry(entryName); + const update = entry != null; + + // prepare new entry + if (!update) { + entry = new ZipEntry(); + entry.entryName = entryName; + } + entry.comment = comment || ""; + + const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; + + // last modification time from file stats + if (isStat) { + entry.header.time = attr.mtime; + } + + // Set file attribute + var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) + + // extended attributes field for Unix + // set file type either S_IFDIR / S_IFREG + let unix = entry.isDirectory ? 0x4000 : 0x8000; + + if (isStat) { + // File attributes from file stats + unix |= 0xfff & attr.mode; + } else if ("number" === typeof attr) { + // attr from given attr values + unix |= 0xfff & attr; + } else { + // Default values: + unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) + } + + fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes + + entry.attr = fileattr; + + entry.setData(content); + if (!update) _zip.setEntry(entry); + }, + + /** + * Returns an array of ZipEntry objects representing the files and folders inside the archive + * + * @return Array + */ + getEntries: function () { + return _zip ? _zip.entries : []; + }, + + /** + * Returns a ZipEntry object representing the file or folder specified by ``name``. + * + * @param name + * @return ZipEntry + */ + getEntry: function (/**String*/ name) { + return getEntry(name); + }, + + getEntryCount: function () { + return _zip.getEntryCount(); + }, + + forEach: function (callback) { + return _zip.forEach(callback); + }, + + /** + * Extracts the given entry to the given targetPath + * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted + * + * @param entry ZipEntry object or String with the full path of the entry + * @param targetPath Target folder where to write the file + * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder + * will be created in targetPath as well. Default is TRUE + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file) + * + * @return Boolean + */ + extractEntryTo: function ( + /**Object*/ entry, + /**String*/ targetPath, + /**Boolean*/ maintainEntryPath, + /**Boolean*/ overwrite, + /**Boolean*/ keepOriginalPermission, + /**String**/ outFileName + ) { + overwrite = get_Bool(overwrite, false); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + maintainEntryPath = get_Bool(maintainEntryPath, true); + outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined)); + + var item = getEntry(entry); + if (!item) { + throw new Error(Utils.Errors.NO_ENTRY); + } + + var entryName = canonical(item.entryName); + + var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); + + if (item.isDirectory) { + var children = _zip.getEntryChildren(item); + children.forEach(function (child) { + if (child.isDirectory) return; + var content = child.getData(); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + var name = canonical(child.entryName); + var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; + filetools.writeFileTo(childName, content, overwrite, fileAttr); + }); + return true; + } + + var content = item.getData(); + if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + + if (filetools.fs.existsSync(target) && !overwrite) { + throw new Error(Utils.Errors.CANT_OVERRIDE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(target, content, overwrite, fileAttr); + + return true; + }, + + /** + * Test the archive + * + */ + test: function (pass) { + if (!_zip) { + return false; + } + + for (var entry in _zip.entries) { + try { + if (entry.isDirectory) { + continue; + } + var content = _zip.entries[entry].getData(pass); + if (!content) { + return false; + } + } catch (err) { + return false; + } + } + return true; + }, + + /** + * Extracts the entire archive to the given location + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + */ + extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) { + overwrite = get_Bool(overwrite, false); + pass = get_Str(keepOriginalPermission, pass); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!_zip) { + throw new Error(Utils.Errors.NO_ZIP); + } + _zip.entries.forEach(function (entry) { + var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); + if (entry.isDirectory) { + filetools.makeDir(entryName); + return; + } + var content = entry.getData(pass); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(entryName, content, overwrite, fileAttr); + try { + filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); + } catch (err) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + }); + }, + + /** + * Asynchronous extractAllTo + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown. + */ + extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { + overwrite = get_Bool(overwrite, false); + if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!callback) { + callback = function (err) { + throw new Error(err); + }; + } + if (!_zip) { + callback(new Error(Utils.Errors.NO_ZIP)); + return; + } + + targetPath = pth.resolve(targetPath); + // convert entryName to + const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString()))); + const getError = (msg, file) => new Error(msg + ': "' + file + '"'); + + // separate directories from files + const dirEntries = []; + const fileEntries = new Set(); + _zip.entries.forEach((e) => { + if (e.isDirectory) { + dirEntries.push(e); + } else { + fileEntries.add(e); + } + }); + + // Create directory entries first synchronously + // this prevents race condition and assures folders are there before writing files + for (const entry of dirEntries) { + const dirPath = getPath(entry); + // The reverse operation for attr depend on method addFile() + const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + try { + filetools.makeDir(dirPath); + if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); + // in unix timestamp will change if files are later added to folder, but still + filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); + } catch (er) { + callback(getError("Unable to create folder", dirPath)); + } + } + + // callback wrapper, for some house keeping + const done = () => { + if (fileEntries.size === 0) { + callback(); + } + }; + + // Extract file entries asynchronously + for (const entry of fileEntries.values()) { + const entryName = pth.normalize(canonical(entry.entryName.toString())); + const filePath = sanitize(targetPath, entryName); + entry.getDataAsync(function (content, err_1) { + if (err_1) { + callback(new Error(err_1)); + return; + } + if (!content) { + callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); + } else { + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { + if (!succ) { + callback(getError("Unable to write file", filePath)); + return; + } + filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { + if (err_2) { + callback(getError("Unable to set times", filePath)); + return; + } + fileEntries.delete(entry); + // call the callback if it was last entry + done(); + }); + }); + } + }); + } + // call the callback if fileEntries was empty + done(); + }, + + /** + * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip + * + * @param targetFileName + * @param callback + */ + writeZip: function (/**String*/ targetFileName, /**Function*/ callback) { + if (arguments.length === 1) { + if (typeof targetFileName === "function") { + callback = targetFileName; + targetFileName = ""; + } + } + + if (!targetFileName && opts.filename) { + targetFileName = opts.filename; + } + if (!targetFileName) return; + + var zipData = _zip.compressToBuffer(); + if (zipData) { + var ok = filetools.writeFileTo(targetFileName, zipData, true); + if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); + } + }, + + writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { + const { overwrite, perm } = Object.assign({ overwrite: true }, props); + + return new Promise((resolve, reject) => { + // find file name + if (!targetFileName && opts.filename) targetFileName = opts.filename; + if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); + + this.toBufferPromise().then((zipData) => { + const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); + filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); + }, reject); + }); + }, + + toBufferPromise: function () { + return new Promise((resolve, reject) => { + _zip.toAsyncBuffer(resolve, reject); + }); + }, + + /** + * Returns the content of the entire zip file as a Buffer object + * + * @return Buffer + */ + toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) { + this.valueOf = 2; + if (typeof onSuccess === "function") { + _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); + return null; + } + return _zip.compressToBuffer(); + } + }; +}; diff --git a/node_modules/adm-zip/headers/entryHeader.js b/node_modules/adm-zip/headers/entryHeader.js new file mode 100644 index 0000000..572b9a7 --- /dev/null +++ b/node_modules/adm-zip/headers/entryHeader.js @@ -0,0 +1,338 @@ +var Utils = require("../util"), + Constants = Utils.Constants; + +/* The central directory file header */ +module.exports = function () { + var _verMade = 20, // v2.0 + _version = 10, // v1.0 + _flags = 0, + _method = 0, + _time = 0, + _crc = 0, + _compressedSize = 0, + _size = 0, + _fnameLen = 0, + _extraLen = 0, + _comLen = 0, + _diskStart = 0, + _inattr = 0, + _attr = 0, + _offset = 0; + + _verMade |= Utils.isWin ? 0x0a00 : 0x0300; + + // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. + // Without it file names may be corrupted for other apps when file names use unicode chars + _flags |= Constants.FLG_EFS; + + var _dataHeader = {}; + + function setTime(val) { + val = new Date(val); + _time = + (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980 + ((val.getMonth() + 1) << 21) | // b05-08 month + (val.getDate() << 16) | // b00-04 hour + // 2 bytes time + (val.getHours() << 11) | // b11-15 hour + (val.getMinutes() << 5) | // b05-10 minute + (val.getSeconds() >> 1); // b00-04 seconds divided by 2 + } + + setTime(+new Date()); + + return { + get made() { + return _verMade; + }, + set made(val) { + _verMade = val; + }, + + get version() { + return _version; + }, + set version(val) { + _version = val; + }, + + get flags() { + return _flags; + }, + set flags(val) { + _flags = val; + }, + + get method() { + return _method; + }, + set method(val) { + switch (val) { + case Constants.STORED: + this.version = 10; + case Constants.DEFLATED: + default: + this.version = 20; + } + _method = val; + }, + + get time() { + return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1); + }, + set time(val) { + setTime(val); + }, + + get crc() { + return _crc; + }, + set crc(val) { + _crc = Math.max(0, val) >>> 0; + }, + + get compressedSize() { + return _compressedSize; + }, + set compressedSize(val) { + _compressedSize = Math.max(0, val) >>> 0; + }, + + get size() { + return _size; + }, + set size(val) { + _size = Math.max(0, val) >>> 0; + }, + + get fileNameLength() { + return _fnameLen; + }, + set fileNameLength(val) { + _fnameLen = val; + }, + + get extraLength() { + return _extraLen; + }, + set extraLength(val) { + _extraLen = val; + }, + + get commentLength() { + return _comLen; + }, + set commentLength(val) { + _comLen = val; + }, + + get diskNumStart() { + return _diskStart; + }, + set diskNumStart(val) { + _diskStart = Math.max(0, val) >>> 0; + }, + + get inAttr() { + return _inattr; + }, + set inAttr(val) { + _inattr = Math.max(0, val) >>> 0; + }, + + get attr() { + return _attr; + }, + set attr(val) { + _attr = Math.max(0, val) >>> 0; + }, + + // get Unix file permissions + get fileAttr() { + return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0; + }, + + get offset() { + return _offset; + }, + set offset(val) { + _offset = Math.max(0, val) >>> 0; + }, + + get encripted() { + return (_flags & 1) === 1; + }, + + get entryHeaderSize() { + return Constants.CENHDR + _fnameLen + _extraLen + _comLen; + }, + + get realDataOffset() { + return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; + }, + + get dataHeader() { + return _dataHeader; + }, + + loadDataHeaderFromBinary: function (/*Buffer*/ input) { + var data = input.slice(_offset, _offset + Constants.LOCHDR); + // 30 bytes and should start with "PK\003\004" + if (data.readUInt32LE(0) !== Constants.LOCSIG) { + throw new Error(Utils.Errors.INVALID_LOC); + } + _dataHeader = { + // version needed to extract + version: data.readUInt16LE(Constants.LOCVER), + // general purpose bit flag + flags: data.readUInt16LE(Constants.LOCFLG), + // compression method + method: data.readUInt16LE(Constants.LOCHOW), + // modification time (2 bytes time, 2 bytes date) + time: data.readUInt32LE(Constants.LOCTIM), + // uncompressed file crc-32 value + crc: data.readUInt32LE(Constants.LOCCRC), + // compressed size + compressedSize: data.readUInt32LE(Constants.LOCSIZ), + // uncompressed size + size: data.readUInt32LE(Constants.LOCLEN), + // filename length + fnameLen: data.readUInt16LE(Constants.LOCNAM), + // extra field length + extraLen: data.readUInt16LE(Constants.LOCEXT) + }; + }, + + loadFromBinary: function (/*Buffer*/ data) { + // data should be 46 bytes and start with "PK 01 02" + if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { + throw new Error(Utils.Errors.INVALID_CEN); + } + // version made by + _verMade = data.readUInt16LE(Constants.CENVEM); + // version needed to extract + _version = data.readUInt16LE(Constants.CENVER); + // encrypt, decrypt flags + _flags = data.readUInt16LE(Constants.CENFLG); + // compression method + _method = data.readUInt16LE(Constants.CENHOW); + // modification time (2 bytes time, 2 bytes date) + _time = data.readUInt32LE(Constants.CENTIM); + // uncompressed file crc-32 value + _crc = data.readUInt32LE(Constants.CENCRC); + // compressed size + _compressedSize = data.readUInt32LE(Constants.CENSIZ); + // uncompressed size + _size = data.readUInt32LE(Constants.CENLEN); + // filename length + _fnameLen = data.readUInt16LE(Constants.CENNAM); + // extra field length + _extraLen = data.readUInt16LE(Constants.CENEXT); + // file comment length + _comLen = data.readUInt16LE(Constants.CENCOM); + // volume number start + _diskStart = data.readUInt16LE(Constants.CENDSK); + // internal file attributes + _inattr = data.readUInt16LE(Constants.CENATT); + // external file attributes + _attr = data.readUInt32LE(Constants.CENATX); + // LOC header offset + _offset = data.readUInt32LE(Constants.CENOFF); + }, + + dataHeaderToBinary: function () { + // LOC header size (30 bytes) + var data = Buffer.alloc(Constants.LOCHDR); + // "PK\003\004" + data.writeUInt32LE(Constants.LOCSIG, 0); + // version needed to extract + data.writeUInt16LE(_version, Constants.LOCVER); + // general purpose bit flag + data.writeUInt16LE(_flags, Constants.LOCFLG); + // compression method + data.writeUInt16LE(_method, Constants.LOCHOW); + // modification time (2 bytes time, 2 bytes date) + data.writeUInt32LE(_time, Constants.LOCTIM); + // uncompressed file crc-32 value + data.writeUInt32LE(_crc, Constants.LOCCRC); + // compressed size + data.writeUInt32LE(_compressedSize, Constants.LOCSIZ); + // uncompressed size + data.writeUInt32LE(_size, Constants.LOCLEN); + // filename length + data.writeUInt16LE(_fnameLen, Constants.LOCNAM); + // extra field length + data.writeUInt16LE(_extraLen, Constants.LOCEXT); + return data; + }, + + entryHeaderToBinary: function () { + // CEN header size (46 bytes) + var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); + // "PK\001\002" + data.writeUInt32LE(Constants.CENSIG, 0); + // version made by + data.writeUInt16LE(_verMade, Constants.CENVEM); + // version needed to extract + data.writeUInt16LE(_version, Constants.CENVER); + // encrypt, decrypt flags + data.writeUInt16LE(_flags, Constants.CENFLG); + // compression method + data.writeUInt16LE(_method, Constants.CENHOW); + // modification time (2 bytes time, 2 bytes date) + data.writeUInt32LE(_time, Constants.CENTIM); + // uncompressed file crc-32 value + data.writeUInt32LE(_crc, Constants.CENCRC); + // compressed size + data.writeUInt32LE(_compressedSize, Constants.CENSIZ); + // uncompressed size + data.writeUInt32LE(_size, Constants.CENLEN); + // filename length + data.writeUInt16LE(_fnameLen, Constants.CENNAM); + // extra field length + data.writeUInt16LE(_extraLen, Constants.CENEXT); + // file comment length + data.writeUInt16LE(_comLen, Constants.CENCOM); + // volume number start + data.writeUInt16LE(_diskStart, Constants.CENDSK); + // internal file attributes + data.writeUInt16LE(_inattr, Constants.CENATT); + // external file attributes + data.writeUInt32LE(_attr, Constants.CENATX); + // LOC header offset + data.writeUInt32LE(_offset, Constants.CENOFF); + // fill all with + data.fill(0x00, Constants.CENHDR); + return data; + }, + + toJSON: function () { + const bytes = function (nr) { + return nr + " bytes"; + }; + + return { + made: _verMade, + version: _version, + flags: _flags, + method: Utils.methodToString(_method), + time: this.time, + crc: "0x" + _crc.toString(16).toUpperCase(), + compressedSize: bytes(_compressedSize), + size: bytes(_size), + fileNameLength: bytes(_fnameLen), + extraLength: bytes(_extraLen), + commentLength: bytes(_comLen), + diskNumStart: _diskStart, + inAttr: _inattr, + attr: _attr, + offset: _offset, + entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/headers/index.js b/node_modules/adm-zip/headers/index.js new file mode 100644 index 0000000..b54a722 --- /dev/null +++ b/node_modules/adm-zip/headers/index.js @@ -0,0 +1,2 @@ +exports.EntryHeader = require("./entryHeader"); +exports.MainHeader = require("./mainHeader"); diff --git a/node_modules/adm-zip/headers/mainHeader.js b/node_modules/adm-zip/headers/mainHeader.js new file mode 100644 index 0000000..dcea01d --- /dev/null +++ b/node_modules/adm-zip/headers/mainHeader.js @@ -0,0 +1,130 @@ +var Utils = require("../util"), + Constants = Utils.Constants; + +/* The entries in the end of central directory */ +module.exports = function () { + var _volumeEntries = 0, + _totalEntries = 0, + _size = 0, + _offset = 0, + _commentLength = 0; + + return { + get diskEntries() { + return _volumeEntries; + }, + set diskEntries(/*Number*/ val) { + _volumeEntries = _totalEntries = val; + }, + + get totalEntries() { + return _totalEntries; + }, + set totalEntries(/*Number*/ val) { + _totalEntries = _volumeEntries = val; + }, + + get size() { + return _size; + }, + set size(/*Number*/ val) { + _size = val; + }, + + get offset() { + return _offset; + }, + set offset(/*Number*/ val) { + _offset = val; + }, + + get commentLength() { + return _commentLength; + }, + set commentLength(/*Number*/ val) { + _commentLength = val; + }, + + get mainHeaderSize() { + return Constants.ENDHDR + _commentLength; + }, + + loadFromBinary: function (/*Buffer*/ data) { + // data should be 22 bytes and start with "PK 05 06" + // or be 56+ bytes and start with "PK 06 06" for Zip64 + if ( + (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && + (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) + ) { + throw new Error(Utils.Errors.INVALID_END); + } + + if (data.readUInt32LE(0) === Constants.ENDSIG) { + // number of entries on this volume + _volumeEntries = data.readUInt16LE(Constants.ENDSUB); + // total number of entries + _totalEntries = data.readUInt16LE(Constants.ENDTOT); + // central directory size in bytes + _size = data.readUInt32LE(Constants.ENDSIZ); + // offset of first CEN header + _offset = data.readUInt32LE(Constants.ENDOFF); + // zip file comment length + _commentLength = data.readUInt16LE(Constants.ENDCOM); + } else { + // number of entries on this volume + _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB); + // total number of entries + _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); + // central directory size in bytes + _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE); + // offset of first CEN header + _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); + + _commentLength = 0; + } + }, + + toBinary: function () { + var b = Buffer.alloc(Constants.ENDHDR + _commentLength); + // "PK 05 06" signature + b.writeUInt32LE(Constants.ENDSIG, 0); + b.writeUInt32LE(0, 4); + // number of entries on this volume + b.writeUInt16LE(_volumeEntries, Constants.ENDSUB); + // total number of entries + b.writeUInt16LE(_totalEntries, Constants.ENDTOT); + // central directory size in bytes + b.writeUInt32LE(_size, Constants.ENDSIZ); + // offset of first CEN header + b.writeUInt32LE(_offset, Constants.ENDOFF); + // zip file comment length + b.writeUInt16LE(_commentLength, Constants.ENDCOM); + // fill comment memory with spaces so no garbage is left there + b.fill(" ", Constants.ENDHDR); + + return b; + }, + + toJSON: function () { + // creates 0x0000 style output + const offset = function (nr, len) { + let offs = nr.toString(16).toUpperCase(); + while (offs.length < len) offs = "0" + offs; + return "0x" + offs; + }; + + return { + diskEntries: _volumeEntries, + totalEntries: _totalEntries, + size: _size + " bytes", + offset: offset(_offset, 4), + commentLength: _commentLength + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; + // Misspelled \ No newline at end of file diff --git a/node_modules/adm-zip/methods/deflater.js b/node_modules/adm-zip/methods/deflater.js new file mode 100644 index 0000000..992de8f --- /dev/null +++ b/node_modules/adm-zip/methods/deflater.js @@ -0,0 +1,33 @@ +module.exports = function (/*Buffer*/ inbuf) { + var zlib = require("zlib"); + + var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; + + return { + deflate: function () { + return zlib.deflateRawSync(inbuf, opts); + }, + + deflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createDeflateRaw(opts), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; +}; diff --git a/node_modules/adm-zip/methods/index.js b/node_modules/adm-zip/methods/index.js new file mode 100644 index 0000000..5285677 --- /dev/null +++ b/node_modules/adm-zip/methods/index.js @@ -0,0 +1,3 @@ +exports.Deflater = require("./deflater"); +exports.Inflater = require("./inflater"); +exports.ZipCrypto = require("./zipcrypto"); diff --git a/node_modules/adm-zip/methods/inflater.js b/node_modules/adm-zip/methods/inflater.js new file mode 100644 index 0000000..3ed0d58 --- /dev/null +++ b/node_modules/adm-zip/methods/inflater.js @@ -0,0 +1,31 @@ +module.exports = function (/*Buffer*/ inbuf) { + var zlib = require("zlib"); + + return { + inflate: function () { + return zlib.inflateRawSync(inbuf); + }, + + inflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createInflateRaw(), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; +}; diff --git a/node_modules/adm-zip/methods/zipcrypto.js b/node_modules/adm-zip/methods/zipcrypto.js new file mode 100644 index 0000000..701b5ce --- /dev/null +++ b/node_modules/adm-zip/methods/zipcrypto.js @@ -0,0 +1,170 @@ +"use strict"; + +// node crypt, we use it for generate salt +// eslint-disable-next-line node/no-unsupported-features/node-builtins +const { randomFillSync } = require("crypto"); + +// generate CRC32 lookup table +const crctable = new Uint32Array(256).map((t, crc) => { + for (let j = 0; j < 8; j++) { + if (0 !== (crc & 1)) { + crc = (crc >>> 1) ^ 0xedb88320; + } else { + crc >>>= 1; + } + } + return crc >>> 0; +}); + +// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) +const uMul = (a, b) => Math.imul(a, b) >>> 0; + +// crc32 byte single update (actually same function is part of utils.crc32 function :) ) +const crc32update = (pCrc32, bval) => { + return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); +}; + +// function for generating salt for encrytion header +const genSalt = () => { + if ("function" === typeof randomFillSync) { + return randomFillSync(Buffer.alloc(12)); + } else { + // fallback if function is not defined + return genSalt.node(); + } +}; + +// salt generation with node random function (mainly as fallback) +genSalt.node = () => { + const salt = Buffer.alloc(12); + const len = salt.length; + for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; + return salt; +}; + +// general config +const config = { + genSalt +}; + +// Class Initkeys handles same basic ops with keys +function Initkeys(pw) { + const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); + this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); + for (let i = 0; i < pass.length; i++) { + this.updateKeys(pass[i]); + } +} + +Initkeys.prototype.updateKeys = function (byteValue) { + const keys = this.keys; + keys[0] = crc32update(keys[0], byteValue); + keys[1] += keys[0] & 0xff; + keys[1] = uMul(keys[1], 134775813) + 1; + keys[2] = crc32update(keys[2], keys[1] >>> 24); + return byteValue; +}; + +Initkeys.prototype.next = function () { + const k = (this.keys[2] | 2) >>> 0; // key + return (uMul(k, k ^ 1) >> 8) & 0xff; // decode +}; + +function make_decrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); + + // return decrypter function + return function (/*Buffer*/ data) { + // result - we create new Buffer for results + const result = Buffer.alloc(data.length); + let pos = 0; + // process input data + for (let c of data) { + //c ^= keys.next(); + //result[pos++] = c; // decode & Save Value + result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte + } + return result; + }; +} + +function make_encrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); + + // return encrypting function, result and pos is here so we dont have to merge buffers later + return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { + // result - we create new Buffer for results + if (!result) result = Buffer.alloc(data.length); + // process input data + for (let c of data) { + const k = keys.next(); // save key byte + result[pos++] = c ^ k; // save val + keys.updateKeys(c); // update keys with decoded byte + } + return result; + }; +} + +function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { + if (!data || !Buffer.isBuffer(data) || data.length < 12) { + return Buffer.alloc(0); + } + + // 1. We Initialize and generate decrypting function + const decrypter = make_decrypter(pwd); + + // 2. decrypt salt what is always 12 bytes and is a part of file content + const salt = decrypter(data.slice(0, 12)); + + // 3. does password meet expectations + if (salt[11] !== header.crc >>> 24) { + throw "ADM-ZIP: Wrong Password"; + } + + // 4. decode content + return decrypter(data.slice(12)); +} + +// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality +function _salter(data) { + if (Buffer.isBuffer(data) && data.length >= 12) { + // be aware - currently salting buffer data is modified + config.genSalt = function () { + return data.slice(0, 12); + }; + } else if (data === "node") { + // test salt generation with node random function + config.genSalt = genSalt.node; + } else { + // if value is not acceptable config gets reset. + config.genSalt = genSalt; + } +} + +function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { + // 1. test data if data is not Buffer we make buffer from it + if (data == null) data = Buffer.alloc(0); + // if data is not buffer be make buffer from it + if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); + + // 2. We Initialize and generate encrypting function + const encrypter = make_encrypter(pwd); + + // 3. generate salt (12-bytes of random data) + const salt = config.genSalt(); + salt[11] = (header.crc >>> 24) & 0xff; + + // old implementations (before PKZip 2.04g) used two byte check + if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; + + // 4. create output + const result = Buffer.alloc(data.length + 12); + encrypter(salt, result); + + // finally encode content + return encrypter(data, result, 12); +} + +module.exports = { decrypt, encrypt, _salter }; diff --git a/node_modules/adm-zip/package.json b/node_modules/adm-zip/package.json new file mode 100644 index 0000000..ebc59ea --- /dev/null +++ b/node_modules/adm-zip/package.json @@ -0,0 +1,48 @@ +{ + "name": "adm-zip", + "version": "0.5.10", + "description": "Javascript implementation of zip for nodejs with support for electron original-fs. Allows user to create or extract zip files both in memory or to/from disk", + "scripts": { + "test": "mocha -R spec", + "test:format": "npm run format:prettier:raw -- --check", + "format": "npm run format:prettier", + "format:prettier": "npm run format:prettier:raw -- --write", + "format:prettier:raw": "prettier \"**/*.{js,yml,json}\"" + }, + "keywords": [ + "zip", + "methods", + "archive", + "unzip" + ], + "homepage": "https://github.com/cthackers/adm-zip", + "author": "Nasca Iacob (https://github.com/cthackers)", + "bugs": { + "email": "sy@another-d-mention.ro", + "url": "https://github.com/cthackers/adm-zip/issues" + }, + "license": "MIT", + "files": [ + "adm-zip.js", + "headers", + "methods", + "util", + "zipEntry.js", + "zipFile.js", + "LICENSE" + ], + "main": "adm-zip.js", + "repository": { + "type": "git", + "url": "https://github.com/cthackers/adm-zip.git" + }, + "engines": { + "node": ">=6.0" + }, + "devDependencies": { + "chai": "^4.3.4", + "mocha": "^10.2.0", + "prettier": "^2.2.1", + "rimraf": "^3.0.2" + } +} diff --git a/node_modules/adm-zip/util/constants.js b/node_modules/adm-zip/util/constants.js new file mode 100644 index 0000000..119954b --- /dev/null +++ b/node_modules/adm-zip/util/constants.js @@ -0,0 +1,142 @@ +module.exports = { + /* The local file header */ + LOCHDR : 30, // LOC header size + LOCSIG : 0x04034b50, // "PK\003\004" + LOCVER : 4, // version needed to extract + LOCFLG : 6, // general purpose bit flag + LOCHOW : 8, // compression method + LOCTIM : 10, // modification time (2 bytes time, 2 bytes date) + LOCCRC : 14, // uncompressed file crc-32 value + LOCSIZ : 18, // compressed size + LOCLEN : 22, // uncompressed size + LOCNAM : 26, // filename length + LOCEXT : 28, // extra field length + + /* The Data descriptor */ + EXTSIG : 0x08074b50, // "PK\007\008" + EXTHDR : 16, // EXT header size + EXTCRC : 4, // uncompressed file crc-32 value + EXTSIZ : 8, // compressed size + EXTLEN : 12, // uncompressed size + + /* The central directory file header */ + CENHDR : 46, // CEN header size + CENSIG : 0x02014b50, // "PK\001\002" + CENVEM : 4, // version made by + CENVER : 6, // version needed to extract + CENFLG : 8, // encrypt, decrypt flags + CENHOW : 10, // compression method + CENTIM : 12, // modification time (2 bytes time, 2 bytes date) + CENCRC : 16, // uncompressed file crc-32 value + CENSIZ : 20, // compressed size + CENLEN : 24, // uncompressed size + CENNAM : 28, // filename length + CENEXT : 30, // extra field length + CENCOM : 32, // file comment length + CENDSK : 34, // volume number start + CENATT : 36, // internal file attributes + CENATX : 38, // external file attributes (host system dependent) + CENOFF : 42, // LOC header offset + + /* The entries in the end of central directory */ + ENDHDR : 22, // END header size + ENDSIG : 0x06054b50, // "PK\005\006" + ENDSUB : 8, // number of entries on this disk + ENDTOT : 10, // total number of entries + ENDSIZ : 12, // central directory size in bytes + ENDOFF : 16, // offset of first CEN header + ENDCOM : 20, // zip file comment length + + END64HDR : 20, // zip64 END header size + END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007" + END64START : 4, // number of the disk with the start of the zip64 + END64OFF : 8, // relative offset of the zip64 end of central directory + END64NUMDISKS : 16, // total number of disks + + ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006" + ZIP64HDR : 56, // zip64 record minimum size + ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE + ZIP64SIZE : 4, // zip64 size of the central directory record + ZIP64VEM : 12, // zip64 version made by + ZIP64VER : 14, // zip64 version needed to extract + ZIP64DSK : 16, // zip64 number of this disk + ZIP64DSKDIR : 20, // number of the disk with the start of the record directory + ZIP64SUB : 24, // number of entries on this disk + ZIP64TOT : 32, // total number of entries + ZIP64SIZB : 40, // zip64 central directory size in bytes + ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number + ZIP64EXTRA : 56, // extensible data sector + + /* Compression methods */ + STORED : 0, // no compression + SHRUNK : 1, // shrunk + REDUCED1 : 2, // reduced with compression factor 1 + REDUCED2 : 3, // reduced with compression factor 2 + REDUCED3 : 4, // reduced with compression factor 3 + REDUCED4 : 5, // reduced with compression factor 4 + IMPLODED : 6, // imploded + // 7 reserved for Tokenizing compression algorithm + DEFLATED : 8, // deflated + ENHANCED_DEFLATED: 9, // enhanced deflated + PKWARE : 10,// PKWare DCL imploded + // 11 reserved by PKWARE + BZIP2 : 12, // compressed using BZIP2 + // 13 reserved by PKWARE + LZMA : 14, // LZMA + // 15-17 reserved by PKWARE + IBM_TERSE : 18, // compressed using IBM TERSE + IBM_LZ77 : 19, // IBM LZ77 z + AES_ENCRYPT : 99, // WinZIP AES encryption method + + /* General purpose bit flag */ + // values can obtained with expression 2**bitnr + FLG_ENC : 1, // Bit 0: encrypted file + FLG_COMP1 : 2, // Bit 1, compression option + FLG_COMP2 : 4, // Bit 2, compression option + FLG_DESC : 8, // Bit 3, data descriptor + FLG_ENH : 16, // Bit 4, enhanced deflating + FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. + FLG_STR : 64, // Bit 6, strong encryption (patented) + // Bits 7-10: Currently unused. + FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) + // Bit 12: Reserved by PKWARE for enhanced compression. + // Bit 13: encrypted the Central Directory (patented). + // Bits 14-15: Reserved by PKWARE. + FLG_MSK : 4096, // mask header values + + /* Load type */ + FILE : 2, + BUFFER : 1, + NONE : 0, + + /* 4.5 Extensible data fields */ + EF_ID : 0, + EF_SIZE : 2, + + /* Header IDs */ + ID_ZIP64 : 0x0001, + ID_AVINFO : 0x0007, + ID_PFS : 0x0008, + ID_OS2 : 0x0009, + ID_NTFS : 0x000a, + ID_OPENVMS : 0x000c, + ID_UNIX : 0x000d, + ID_FORK : 0x000e, + ID_PATCH : 0x000f, + ID_X509_PKCS7 : 0x0014, + ID_X509_CERTID_F : 0x0015, + ID_X509_CERTID_C : 0x0016, + ID_STRONGENC : 0x0017, + ID_RECORD_MGT : 0x0018, + ID_X509_PKCS7_RL : 0x0019, + ID_IBM1 : 0x0065, + ID_IBM2 : 0x0066, + ID_POSZIP : 0x4690, + + EF_ZIP64_OR_32 : 0xffffffff, + EF_ZIP64_OR_16 : 0xffff, + EF_ZIP64_SUNCOMP : 0, + EF_ZIP64_SCOMP : 8, + EF_ZIP64_RHO : 16, + EF_ZIP64_DSN : 24 +}; diff --git a/node_modules/adm-zip/util/errors.js b/node_modules/adm-zip/util/errors.js new file mode 100644 index 0000000..dde469b --- /dev/null +++ b/node_modules/adm-zip/util/errors.js @@ -0,0 +1,35 @@ +module.exports = { + /* Header error messages */ + INVALID_LOC: "Invalid LOC header (bad signature)", + INVALID_CEN: "Invalid CEN header (bad signature)", + INVALID_END: "Invalid END header (bad signature)", + + /* ZipEntry error messages*/ + NO_DATA: "Nothing to decompress", + BAD_CRC: "CRC32 checksum failed", + FILE_IN_THE_WAY: "There is a file in the way: %s", + UNKNOWN_METHOD: "Invalid/unsupported compression method", + + /* Inflater error messages */ + AVAIL_DATA: "inflate::Available inflate data did not terminate", + INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", + TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", + INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", + INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", + INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", + INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", + INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", + INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", + INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", + + /* ADM-ZIP error messages */ + CANT_EXTRACT_FILE: "Could not extract the file", + CANT_OVERRIDE: "Target file already exists", + NO_ZIP: "No zip file was loaded", + NO_ENTRY: "Entry doesn't exist", + DIRECTORY_CONTENT_ERROR: "A directory cannot have content", + FILE_NOT_FOUND: "File not found: %s", + NOT_IMPLEMENTED: "Not implemented", + INVALID_FILENAME: "Invalid filename", + INVALID_FORMAT: "Invalid or unsupported zip format. No END header found" +}; diff --git a/node_modules/adm-zip/util/fattr.js b/node_modules/adm-zip/util/fattr.js new file mode 100644 index 0000000..163e2e5 --- /dev/null +++ b/node_modules/adm-zip/util/fattr.js @@ -0,0 +1,79 @@ +const fs = require("./fileSystem").require(); +const pth = require("path"); + +fs.existsSync = fs.existsSync || pth.existsSync; + +module.exports = function (/*String*/ path) { + var _path = path || "", + _obj = newAttr(), + _stat = null; + + function newAttr() { + return { + directory: false, + readonly: false, + hidden: false, + executable: false, + mtime: 0, + atime: 0 + }; + } + + if (_path && fs.existsSync(_path)) { + _stat = fs.statSync(_path); + _obj.directory = _stat.isDirectory(); + _obj.mtime = _stat.mtime; + _obj.atime = _stat.atime; + _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner + _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right + _obj.hidden = pth.basename(_path)[0] === "."; + } else { + console.warn("Invalid path: " + _path); + } + + return { + get directory() { + return _obj.directory; + }, + + get readOnly() { + return _obj.readonly; + }, + + get hidden() { + return _obj.hidden; + }, + + get mtime() { + return _obj.mtime; + }, + + get atime() { + return _obj.atime; + }, + + get executable() { + return _obj.executable; + }, + + decodeAttributes: function () {}, + + encodeAttributes: function () {}, + + toJSON: function () { + return { + path: _path, + isDirectory: _obj.directory, + isReadOnly: _obj.readonly, + isHidden: _obj.hidden, + isExecutable: _obj.executable, + mTime: _obj.mtime, + aTime: _obj.atime + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/util/fileSystem.js b/node_modules/adm-zip/util/fileSystem.js new file mode 100644 index 0000000..dee5c2a --- /dev/null +++ b/node_modules/adm-zip/util/fileSystem.js @@ -0,0 +1,11 @@ +exports.require = function () { + if (typeof process === "object" && process.versions && process.versions["electron"]) { + try { + const originalFs = require("original-fs"); + if (Object.keys(originalFs).length > 0) { + return originalFs; + } + } catch (e) {} + } + return require("fs"); +}; diff --git a/node_modules/adm-zip/util/index.js b/node_modules/adm-zip/util/index.js new file mode 100644 index 0000000..6790df4 --- /dev/null +++ b/node_modules/adm-zip/util/index.js @@ -0,0 +1,4 @@ +module.exports = require("./utils"); +module.exports.Constants = require("./constants"); +module.exports.Errors = require("./errors"); +module.exports.FileAttr = require("./fattr"); diff --git a/node_modules/adm-zip/util/utils.js b/node_modules/adm-zip/util/utils.js new file mode 100644 index 0000000..9d732ba --- /dev/null +++ b/node_modules/adm-zip/util/utils.js @@ -0,0 +1,247 @@ +const fsystem = require("./fileSystem").require(); +const pth = require("path"); +const Constants = require("./constants"); +const Errors = require("./errors"); +const isWin = typeof process === "object" && "win32" === process.platform; + +const is_Obj = (obj) => obj && typeof obj === "object"; + +// generate CRC32 lookup table +const crcTable = new Uint32Array(256).map((t, c) => { + for (let k = 0; k < 8; k++) { + if ((c & 1) !== 0) { + c = 0xedb88320 ^ (c >>> 1); + } else { + c >>>= 1; + } + } + return c >>> 0; +}); + +// UTILS functions + +function Utils(opts) { + this.sep = pth.sep; + this.fs = fsystem; + + if (is_Obj(opts)) { + // custom filesystem + if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { + this.fs = opts.fs; + } + } +} + +module.exports = Utils; + +// INSTANCED functions + +Utils.prototype.makeDir = function (/*String*/ folder) { + const self = this; + + // Sync - make directories tree + function mkdirSync(/*String*/ fpath) { + let resolvedPath = fpath.split(self.sep)[0]; + fpath.split(self.sep).forEach(function (name) { + if (!name || name.substr(-1, 1) === ":") return; + resolvedPath += self.sep + name; + var stat; + try { + stat = self.fs.statSync(resolvedPath); + } catch (e) { + self.fs.mkdirSync(resolvedPath); + } + if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); + }); + } + + mkdirSync(folder); +}; + +Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { + const self = this; + if (self.fs.existsSync(path)) { + if (!overwrite) return false; // cannot overwrite + + var stat = self.fs.statSync(path); + if (stat.isDirectory()) { + return false; + } + } + var folder = pth.dirname(path); + if (!self.fs.existsSync(folder)) { + self.makeDir(folder); + } + + var fd; + try { + fd = self.fs.openSync(path, "w", 438); // 0666 + } catch (e) { + self.fs.chmodSync(path, 438); + fd = self.fs.openSync(path, "w", 438); + } + if (fd) { + try { + self.fs.writeSync(fd, content, 0, content.length, 0); + } finally { + self.fs.closeSync(fd); + } + } + self.fs.chmodSync(path, attr || 438); + return true; +}; + +Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { + if (typeof attr === "function") { + callback = attr; + attr = undefined; + } + + const self = this; + + self.fs.exists(path, function (exist) { + if (exist && !overwrite) return callback(false); + + self.fs.stat(path, function (err, stat) { + if (exist && stat.isDirectory()) { + return callback(false); + } + + var folder = pth.dirname(path); + self.fs.exists(folder, function (exists) { + if (!exists) self.makeDir(folder); + + self.fs.open(path, "w", 438, function (err, fd) { + if (err) { + self.fs.chmod(path, 438, function () { + self.fs.open(path, "w", 438, function (err, fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + }); + }); + } else if (fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + } else { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + } + }); + }); + }); + }); +}; + +Utils.prototype.findFiles = function (/*String*/ path) { + const self = this; + + function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { + if (typeof pattern === "boolean") { + recursive = pattern; + pattern = undefined; + } + let files = []; + self.fs.readdirSync(dir).forEach(function (file) { + var path = pth.join(dir, file); + + if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); + + if (!pattern || pattern.test(path)) { + files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : "")); + } + }); + return files; + } + + return findSync(path, undefined, true); +}; + +Utils.prototype.getAttributes = function () {}; + +Utils.prototype.setAttributes = function () {}; + +// STATIC functions + +// crc32 single update (it is part of crc32) +Utils.crc32update = function (crc, byte) { + return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); +}; + +Utils.crc32 = function (buf) { + if (typeof buf === "string") { + buf = Buffer.from(buf, "utf8"); + } + // Generate crcTable + if (!crcTable.length) genCRCTable(); + + let len = buf.length; + let crc = ~0; + for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); + // xor and cast as uint32 number + return ~crc >>> 0; +}; + +Utils.methodToString = function (/*Number*/ method) { + switch (method) { + case Constants.STORED: + return "STORED (" + method + ")"; + case Constants.DEFLATED: + return "DEFLATED (" + method + ")"; + default: + return "UNSUPPORTED (" + method + ")"; + } +}; + +// removes ".." style path elements +Utils.canonical = function (/*string*/ path) { + if (!path) return ""; + // trick normalize think path is absolute + var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); + return pth.join(".", safeSuffix); +}; + +// make abolute paths taking prefix as root folder +Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { + prefix = pth.resolve(pth.normalize(prefix)); + var parts = name.split("/"); + for (var i = 0, l = parts.length; i < l; i++) { + var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); + if (path.indexOf(prefix) === 0) { + return path; + } + } + return pth.normalize(pth.join(prefix, pth.basename(name))); +}; + +// converts buffer, Uint8Array, string types to buffer +Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) { + if (Buffer.isBuffer(input)) { + return input; + } else if (input instanceof Uint8Array) { + return Buffer.from(input); + } else { + // expect string all other values are invalid and return empty buffer + return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0); + } +}; + +Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { + var slice = Buffer.from(buffer.slice(index, index + 8)); + slice.swap64(); + + return parseInt(`0x${slice.toString("hex")}`); +}; + +Utils.isWin = isWin; // Do we have windows system +Utils.crcTable = crcTable; diff --git a/node_modules/adm-zip/zipEntry.js b/node_modules/adm-zip/zipEntry.js new file mode 100644 index 0000000..8c3053b --- /dev/null +++ b/node_modules/adm-zip/zipEntry.js @@ -0,0 +1,333 @@ +var Utils = require("./util"), + Headers = require("./headers"), + Constants = Utils.Constants, + Methods = require("./methods"); + +module.exports = function (/*Buffer*/ input) { + var _entryHeader = new Headers.EntryHeader(), + _entryName = Buffer.alloc(0), + _comment = Buffer.alloc(0), + _isDirectory = false, + uncompressedData = null, + _extra = Buffer.alloc(0); + + function getCompressedDataFromZip() { + if (!input || !Buffer.isBuffer(input)) { + return Buffer.alloc(0); + } + _entryHeader.loadDataHeaderFromBinary(input); + return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); + } + + function crc32OK(data) { + // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written + if ((_entryHeader.flags & 0x8) !== 0x8) { + if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { + return false; + } + } else { + // @TODO: load and check data descriptor header + // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure + // (optionally preceded by a 4-byte signature) immediately after the compressed data: + } + return true; + } + + function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { + if (typeof callback === "undefined" && typeof async === "string") { + pass = async; + async = void 0; + } + if (_isDirectory) { + if (async && callback) { + callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error. + } + return Buffer.alloc(0); + } + + var compressedData = getCompressedDataFromZip(); + + if (compressedData.length === 0) { + // File is empty, nothing to decompress. + if (async && callback) callback(compressedData); + return compressedData; + } + + if (_entryHeader.encripted) { + if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { + throw new Error("ADM-ZIP: Incompatible password parameter"); + } + compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); + } + + var data = Buffer.alloc(_entryHeader.size); + + switch (_entryHeader.method) { + case Utils.Constants.STORED: + compressedData.copy(data); + if (!crc32OK(data)) { + if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error + throw new Error(Utils.Errors.BAD_CRC); + } else { + //si added otherwise did not seem to return data. + if (async && callback) callback(data); + return data; + } + case Utils.Constants.DEFLATED: + var inflater = new Methods.Inflater(compressedData); + if (!async) { + const result = inflater.inflate(data); + result.copy(data, 0); + if (!crc32OK(data)) { + throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString()); + } + return data; + } else { + inflater.inflateAsync(function (result) { + result.copy(result, 0); + if (callback) { + if (!crc32OK(result)) { + callback(result, Utils.Errors.BAD_CRC); //si added error + } else { + callback(result); + } + } + }); + } + break; + default: + if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD); + throw new Error(Utils.Errors.UNKNOWN_METHOD); + } + } + + function compress(/*Boolean*/ async, /*Function*/ callback) { + if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { + // no data set or the data wasn't changed to require recompression + if (async && callback) callback(getCompressedDataFromZip()); + return getCompressedDataFromZip(); + } + + if (uncompressedData.length && !_isDirectory) { + var compressedData; + // Local file header + switch (_entryHeader.method) { + case Utils.Constants.STORED: + _entryHeader.compressedSize = _entryHeader.size; + + compressedData = Buffer.alloc(uncompressedData.length); + uncompressedData.copy(compressedData); + + if (async && callback) callback(compressedData); + return compressedData; + default: + case Utils.Constants.DEFLATED: + var deflater = new Methods.Deflater(uncompressedData); + if (!async) { + var deflated = deflater.deflate(); + _entryHeader.compressedSize = deflated.length; + return deflated; + } else { + deflater.deflateAsync(function (data) { + compressedData = Buffer.alloc(data.length); + _entryHeader.compressedSize = data.length; + data.copy(compressedData); + callback && callback(compressedData); + }); + } + deflater = null; + break; + } + } else if (async && callback) { + callback(Buffer.alloc(0)); + } else { + return Buffer.alloc(0); + } + } + + function readUInt64LE(buffer, offset) { + return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset); + } + + function parseExtra(data) { + var offset = 0; + var signature, size, part; + while (offset < data.length) { + signature = data.readUInt16LE(offset); + offset += 2; + size = data.readUInt16LE(offset); + offset += 2; + part = data.slice(offset, offset + size); + offset += size; + if (Constants.ID_ZIP64 === signature) { + parseZip64ExtendedInformation(part); + } + } + } + + //Override header field values with values from the ZIP64 extra field + function parseZip64ExtendedInformation(data) { + var size, compressedSize, offset, diskNumStart; + + if (data.length >= Constants.EF_ZIP64_SCOMP) { + size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); + if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { + _entryHeader.size = size; + } + } + if (data.length >= Constants.EF_ZIP64_RHO) { + compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); + if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { + _entryHeader.compressedSize = compressedSize; + } + } + if (data.length >= Constants.EF_ZIP64_DSN) { + offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); + if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { + _entryHeader.offset = offset; + } + } + if (data.length >= Constants.EF_ZIP64_DSN + 4) { + diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); + if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { + _entryHeader.diskNumStart = diskNumStart; + } + } + } + + return { + get entryName() { + return _entryName.toString(); + }, + get rawEntryName() { + return _entryName; + }, + set entryName(val) { + _entryName = Utils.toBuffer(val); + var lastChar = _entryName[_entryName.length - 1]; + _isDirectory = lastChar === 47 || lastChar === 92; + _entryHeader.fileNameLength = _entryName.length; + }, + + get extra() { + return _extra; + }, + set extra(val) { + _extra = val; + _entryHeader.extraLength = val.length; + parseExtra(val); + }, + + get comment() { + return _comment.toString(); + }, + set comment(val) { + _comment = Utils.toBuffer(val); + _entryHeader.commentLength = _comment.length; + }, + + get name() { + var n = _entryName.toString(); + return _isDirectory + ? n + .substr(n.length - 1) + .split("/") + .pop() + : n.split("/").pop(); + }, + get isDirectory() { + return _isDirectory; + }, + + getCompressedData: function () { + return compress(false, null); + }, + + getCompressedDataAsync: function (/*Function*/ callback) { + compress(true, callback); + }, + + setData: function (value) { + uncompressedData = Utils.toBuffer(value); + if (!_isDirectory && uncompressedData.length) { + _entryHeader.size = uncompressedData.length; + _entryHeader.method = Utils.Constants.DEFLATED; + _entryHeader.crc = Utils.crc32(value); + _entryHeader.changed = true; + } else { + // folders and blank files should be stored + _entryHeader.method = Utils.Constants.STORED; + } + }, + + getData: function (pass) { + if (_entryHeader.changed) { + return uncompressedData; + } else { + return decompress(false, null, pass); + } + }, + + getDataAsync: function (/*Function*/ callback, pass) { + if (_entryHeader.changed) { + callback(uncompressedData); + } else { + decompress(true, callback, pass); + } + }, + + set attr(attr) { + _entryHeader.attr = attr; + }, + get attr() { + return _entryHeader.attr; + }, + + set header(/*Buffer*/ data) { + _entryHeader.loadFromBinary(data); + }, + + get header() { + return _entryHeader; + }, + + packHeader: function () { + // 1. create header (buffer) + var header = _entryHeader.entryHeaderToBinary(); + var addpos = Utils.Constants.CENHDR; + // 2. add file name + _entryName.copy(header, addpos); + addpos += _entryName.length; + // 3. add extra data + if (_entryHeader.extraLength) { + _extra.copy(header, addpos); + addpos += _entryHeader.extraLength; + } + // 4. add file comment + if (_entryHeader.commentLength) { + _comment.copy(header, addpos); + } + return header; + }, + + toJSON: function () { + const bytes = function (nr) { + return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; + }; + + return { + entryName: this.entryName, + name: this.name, + comment: this.comment, + isDirectory: this.isDirectory, + header: _entryHeader.toJSON(), + compressedData: bytes(input), + data: bytes(uncompressedData) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/zipFile.js b/node_modules/adm-zip/zipFile.js new file mode 100644 index 0000000..997226a --- /dev/null +++ b/node_modules/adm-zip/zipFile.js @@ -0,0 +1,384 @@ +const ZipEntry = require("./zipEntry"); +const Headers = require("./headers"); +const Utils = require("./util"); + +module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { + var entryList = [], + entryTable = {}, + _comment = Buffer.alloc(0), + mainHeader = new Headers.MainHeader(), + loadedEntries = false; + + // assign options + const opts = Object.assign(Object.create(null), options); + + const { noSort } = opts; + + if (inBuffer) { + // is a memory buffer + readMainHeader(opts.readEntries); + } else { + // none. is a new file + loadedEntries = true; + } + + function iterateEntries(callback) { + const totalEntries = mainHeader.diskEntries; // total number of entries + let index = mainHeader.offset; // offset of first CEN header + + for (let i = 0; i < totalEntries; i++) { + let tmp = index; + const entry = new ZipEntry(inBuffer); + + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); + + index += entry.header.entryHeaderSize; + + callback(entry); + } + } + + function readEntries() { + loadedEntries = true; + entryTable = {}; + entryList = new Array(mainHeader.diskEntries); // total number of entries + var index = mainHeader.offset; // offset of first CEN header + for (var i = 0; i < entryList.length; i++) { + var tmp = index, + entry = new ZipEntry(inBuffer); + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); + + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); + + if (entry.header.extraLength) { + entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); + } + + if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); + + index += entry.header.entryHeaderSize; + + entryList[i] = entry; + entryTable[entry.entryName] = entry; + } + } + + function readMainHeader(/*Boolean*/ readNow) { + var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size + max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length + n = max, + endStart = inBuffer.length, + endOffset = -1, // Start offset of the END header + commentEnd = 0; + + for (i; i >= n; i--) { + if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' + if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { + // "PK\005\006" + endOffset = i; + commentEnd = i; + endStart = i + Utils.Constants.ENDHDR; + // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature + n = i - Utils.Constants.END64HDR; + continue; + } + + if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { + // Found a zip64 signature, let's continue reading the whole zip64 record + n = max; + continue; + } + + if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { + // Found the zip64 record, let's determine it's size + endOffset = i; + endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; + break; + } + } + + if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT); + + mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); + if (mainHeader.commentLength) { + _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); + } + if (readNow) readEntries(); + } + + function sortEntries() { + if (entryList.length > 1 && !noSort) { + entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); + } + } + + return { + /** + * Returns an array of ZipEntry objects existent in the current opened archive + * @return Array + */ + get entries() { + if (!loadedEntries) { + readEntries(); + } + return entryList; + }, + + /** + * Archive comment + * @return {String} + */ + get comment() { + return _comment.toString(); + }, + set comment(val) { + _comment = Utils.toBuffer(val); + mainHeader.commentLength = _comment.length; + }, + + getEntryCount: function () { + if (!loadedEntries) { + return mainHeader.diskEntries; + } + + return entryList.length; + }, + + forEach: function (callback) { + if (!loadedEntries) { + iterateEntries(callback); + return; + } + + entryList.forEach(callback); + }, + + /** + * Returns a reference to the entry with the given name or null if entry is inexistent + * + * @param entryName + * @return ZipEntry + */ + getEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + return entryTable[entryName] || null; + }, + + /** + * Adds the given entry to the entry list + * + * @param entry + */ + setEntry: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + entryList.push(entry); + entryTable[entry.entryName] = entry; + mainHeader.totalEntries = entryList.length; + }, + + /** + * Removes the entry with the given name from the entry list. + * + * If the entry is a directory, then all nested files and directories will be removed + * @param entryName + */ + deleteEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + var entry = entryTable[entryName]; + if (entry && entry.isDirectory) { + var _self = this; + this.getEntryChildren(entry).forEach(function (child) { + if (child.entryName !== entryName) { + _self.deleteEntry(child.entryName); + } + }); + } + entryList.splice(entryList.indexOf(entry), 1); + delete entryTable[entryName]; + mainHeader.totalEntries = entryList.length; + }, + + /** + * Iterates and returns all nested files and directories of the given entry + * + * @param entry + * @return Array + */ + getEntryChildren: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + if (entry && entry.isDirectory) { + const list = []; + const name = entry.entryName; + const len = name.length; + + entryList.forEach(function (zipEntry) { + if (zipEntry.entryName.substr(0, len) === name) { + list.push(zipEntry); + } + }); + return list; + } + return []; + }, + + /** + * Returns the zip file + * + * @return Buffer + */ + compressToBuffer: function () { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); + + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; + + mainHeader.size = 0; + mainHeader.offset = 0; + + for (const entry of entryList) { + // compress data and set local and entry header accordingly. Reason why is called first + const compressedData = entry.getCompressedData(); + // 1. construct data header + entry.header.offset = dindex; + const dataHeader = entry.header.dataHeaderToBinary(); + const entryNameLen = entry.rawEntryName.length; + // 1.2. postheader - data after data header + const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); + entry.rawEntryName.copy(postHeader, 0); + postHeader.copy(entry.extra, entryNameLen); + + // 2. offsets + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + dindex += dataLength; + + // 3. store values in sequence + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); + + // 4. construct entry header + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + // 5. update main header + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + } + + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + // write data blocks + for (const content of dataBlock) { + content.copy(outBuffer, dindex); + dindex += content.length; + } + + // write central directory entries + for (const content of entryHeaders) { + content.copy(outBuffer, dindex); + dindex += content.length; + } + + // write main header + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + mh.copy(outBuffer, dindex); + + return outBuffer; + }, + + toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { + try { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); + + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; + + mainHeader.size = 0; + mainHeader.offset = 0; + + const compress2Buffer = function (entryLists) { + if (entryLists.length) { + const entry = entryLists.pop(); + const name = entry.entryName + entry.extra.toString(); + if (onItemStart) onItemStart(name); + entry.getCompressedDataAsync(function (compressedData) { + if (onItemEnd) onItemEnd(name); + + entry.header.offset = dindex; + // data header + const dataHeader = entry.header.dataHeaderToBinary(); + const postHeader = Buffer.alloc(name.length, name); + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + + dindex += dataLength; + + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); + + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + + compress2Buffer(entryLists); + }); + } else { + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + dataBlock.forEach(function (content) { + content.copy(outBuffer, dindex); // write data blocks + dindex += content.length; + }); + entryHeaders.forEach(function (content) { + content.copy(outBuffer, dindex); // write central directory entries + dindex += content.length; + }); + + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + + mh.copy(outBuffer, dindex); // write main header + + onSuccess(outBuffer); + } + }; + + compress2Buffer(entryList); + } catch (e) { + onFail(e); + } + } + }; +}; diff --git a/node_modules/ansi-styles/index.d.ts b/node_modules/ansi-styles/index.d.ts new file mode 100644 index 0000000..44a907e --- /dev/null +++ b/node_modules/ansi-styles/index.d.ts @@ -0,0 +1,345 @@ +declare type CSSColor = + | 'aliceblue' + | 'antiquewhite' + | 'aqua' + | 'aquamarine' + | 'azure' + | 'beige' + | 'bisque' + | 'black' + | 'blanchedalmond' + | 'blue' + | 'blueviolet' + | 'brown' + | 'burlywood' + | 'cadetblue' + | 'chartreuse' + | 'chocolate' + | 'coral' + | 'cornflowerblue' + | 'cornsilk' + | 'crimson' + | 'cyan' + | 'darkblue' + | 'darkcyan' + | 'darkgoldenrod' + | 'darkgray' + | 'darkgreen' + | 'darkgrey' + | 'darkkhaki' + | 'darkmagenta' + | 'darkolivegreen' + | 'darkorange' + | 'darkorchid' + | 'darkred' + | 'darksalmon' + | 'darkseagreen' + | 'darkslateblue' + | 'darkslategray' + | 'darkslategrey' + | 'darkturquoise' + | 'darkviolet' + | 'deeppink' + | 'deepskyblue' + | 'dimgray' + | 'dimgrey' + | 'dodgerblue' + | 'firebrick' + | 'floralwhite' + | 'forestgreen' + | 'fuchsia' + | 'gainsboro' + | 'ghostwhite' + | 'gold' + | 'goldenrod' + | 'gray' + | 'green' + | 'greenyellow' + | 'grey' + | 'honeydew' + | 'hotpink' + | 'indianred' + | 'indigo' + | 'ivory' + | 'khaki' + | 'lavender' + | 'lavenderblush' + | 'lawngreen' + | 'lemonchiffon' + | 'lightblue' + | 'lightcoral' + | 'lightcyan' + | 'lightgoldenrodyellow' + | 'lightgray' + | 'lightgreen' + | 'lightgrey' + | 'lightpink' + | 'lightsalmon' + | 'lightseagreen' + | 'lightskyblue' + | 'lightslategray' + | 'lightslategrey' + | 'lightsteelblue' + | 'lightyellow' + | 'lime' + | 'limegreen' + | 'linen' + | 'magenta' + | 'maroon' + | 'mediumaquamarine' + | 'mediumblue' + | 'mediumorchid' + | 'mediumpurple' + | 'mediumseagreen' + | 'mediumslateblue' + | 'mediumspringgreen' + | 'mediumturquoise' + | 'mediumvioletred' + | 'midnightblue' + | 'mintcream' + | 'mistyrose' + | 'moccasin' + | 'navajowhite' + | 'navy' + | 'oldlace' + | 'olive' + | 'olivedrab' + | 'orange' + | 'orangered' + | 'orchid' + | 'palegoldenrod' + | 'palegreen' + | 'paleturquoise' + | 'palevioletred' + | 'papayawhip' + | 'peachpuff' + | 'peru' + | 'pink' + | 'plum' + | 'powderblue' + | 'purple' + | 'rebeccapurple' + | 'red' + | 'rosybrown' + | 'royalblue' + | 'saddlebrown' + | 'salmon' + | 'sandybrown' + | 'seagreen' + | 'seashell' + | 'sienna' + | 'silver' + | 'skyblue' + | 'slateblue' + | 'slategray' + | 'slategrey' + | 'snow' + | 'springgreen' + | 'steelblue' + | 'tan' + | 'teal' + | 'thistle' + | 'tomato' + | 'turquoise' + | 'violet' + | 'wheat' + | 'white' + | 'whitesmoke' + | 'yellow' + | 'yellowgreen'; + +declare namespace ansiStyles { + interface ColorConvert { + /** + The RGB color space. + + @param red - (`0`-`255`) + @param green - (`0`-`255`) + @param blue - (`0`-`255`) + */ + rgb(red: number, green: number, blue: number): string; + + /** + The RGB HEX color space. + + @param hex - A hexadecimal string containing RGB data. + */ + hex(hex: string): string; + + /** + @param keyword - A CSS color name. + */ + keyword(keyword: CSSColor): string; + + /** + The HSL color space. + + @param hue - (`0`-`360`) + @param saturation - (`0`-`100`) + @param lightness - (`0`-`100`) + */ + hsl(hue: number, saturation: number, lightness: number): string; + + /** + The HSV color space. + + @param hue - (`0`-`360`) + @param saturation - (`0`-`100`) + @param value - (`0`-`100`) + */ + hsv(hue: number, saturation: number, value: number): string; + + /** + The HSV color space. + + @param hue - (`0`-`360`) + @param whiteness - (`0`-`100`) + @param blackness - (`0`-`100`) + */ + hwb(hue: number, whiteness: number, blackness: number): string; + + /** + Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color. + */ + ansi(ansi: number): string; + + /** + Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. + */ + ansi256(ansi: number): string; + } + + interface CSPair { + /** + The ANSI terminal control sequence for starting this style. + */ + readonly open: string; + + /** + The ANSI terminal control sequence for ending this style. + */ + readonly close: string; + } + + interface ColorBase { + readonly ansi: ColorConvert; + readonly ansi256: ColorConvert; + readonly ansi16m: ColorConvert; + + /** + The ANSI terminal control sequence for ending this color. + */ + readonly close: string; + } + + interface Modifier { + /** + Resets the current color chain. + */ + readonly reset: CSPair; + + /** + Make text bold. + */ + readonly bold: CSPair; + + /** + Emitting only a small amount of light. + */ + readonly dim: CSPair; + + /** + Make text italic. (Not widely supported) + */ + readonly italic: CSPair; + + /** + Make text underline. (Not widely supported) + */ + readonly underline: CSPair; + + /** + Inverse background and foreground colors. + */ + readonly inverse: CSPair; + + /** + Prints the text, but makes it invisible. + */ + readonly hidden: CSPair; + + /** + Puts a horizontal line through the center of the text. (Not widely supported) + */ + readonly strikethrough: CSPair; + } + + interface ForegroundColor { + readonly black: CSPair; + readonly red: CSPair; + readonly green: CSPair; + readonly yellow: CSPair; + readonly blue: CSPair; + readonly cyan: CSPair; + readonly magenta: CSPair; + readonly white: CSPair; + + /** + Alias for `blackBright`. + */ + readonly gray: CSPair; + + /** + Alias for `blackBright`. + */ + readonly grey: CSPair; + + readonly blackBright: CSPair; + readonly redBright: CSPair; + readonly greenBright: CSPair; + readonly yellowBright: CSPair; + readonly blueBright: CSPair; + readonly cyanBright: CSPair; + readonly magentaBright: CSPair; + readonly whiteBright: CSPair; + } + + interface BackgroundColor { + readonly bgBlack: CSPair; + readonly bgRed: CSPair; + readonly bgGreen: CSPair; + readonly bgYellow: CSPair; + readonly bgBlue: CSPair; + readonly bgCyan: CSPair; + readonly bgMagenta: CSPair; + readonly bgWhite: CSPair; + + /** + Alias for `bgBlackBright`. + */ + readonly bgGray: CSPair; + + /** + Alias for `bgBlackBright`. + */ + readonly bgGrey: CSPair; + + readonly bgBlackBright: CSPair; + readonly bgRedBright: CSPair; + readonly bgGreenBright: CSPair; + readonly bgYellowBright: CSPair; + readonly bgBlueBright: CSPair; + readonly bgCyanBright: CSPair; + readonly bgMagentaBright: CSPair; + readonly bgWhiteBright: CSPair; + } +} + +declare const ansiStyles: { + readonly modifier: ansiStyles.Modifier; + readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase; + readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase; + readonly codes: ReadonlyMap; +} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier; + +export = ansiStyles; diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..5d82581 --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,163 @@ +'use strict'; + +const wrapAnsi16 = (fn, offset) => (...args) => { + const code = fn(...args); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => (...args) => { + const code = fn(...args); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => (...args) => { + const rgb = fn(...args); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +const ansi2ansi = n => n; +const rgb2rgb = (r, g, b) => [r, g, b]; + +const setLazyProperty = (object, property, get) => { + Object.defineProperty(object, property, { + get: () => { + const value = get(); + + Object.defineProperty(object, property, { + value, + enumerable: true, + configurable: true + }); + + return value; + }, + enumerable: true, + configurable: true + }); +}; + +/** @type {typeof import('color-convert')} */ +let colorConvert; +const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => { + if (colorConvert === undefined) { + colorConvert = require('color-convert'); + } + + const offset = isBackground ? 10 : 0; + const styles = {}; + + for (const [sourceSpace, suite] of Object.entries(colorConvert)) { + const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace; + if (sourceSpace === targetSpace) { + styles[name] = wrap(identity, offset); + } else if (typeof suite === 'object') { + styles[name] = wrap(suite[targetSpace], offset); + } + } + + return styles; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + + // Bright color + blackBright: [90, 39], + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Alias bright black as gray (and grey) + styles.color.gray = styles.color.blackBright; + styles.bgColor.bgGray = styles.bgColor.bgBlackBright; + styles.color.grey = styles.color.blackBright; + styles.bgColor.bgGrey = styles.bgColor.bgBlackBright; + + for (const [groupName, group] of Object.entries(styles)) { + for (const [styleName, style] of Object.entries(group)) { + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + } + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false)); + setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false)); + setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false)); + setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true)); + setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true)); + setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true)); + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..7539328 --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "4.3.0", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "funding": "https://github.com/chalk/ansi-styles?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^2.0.1" + }, + "devDependencies": { + "@types/color-convert": "^1.9.0", + "ava": "^2.3.0", + "svg-term-cli": "^2.1.1", + "tsd": "^0.11.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..24883de --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,152 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + +## Install + +``` +$ npm install ansi-styles +``` + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#abcdef') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `blackBright` (alias: `gray`, `grey`) +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` (alias: `bgGray`, `bgGrey`) +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +The following color spaces from `color-convert` are supported: + +- `rgb` +- `hex` +- `keyword` +- `hsl` +- `hsv` +- `hwb` +- `ansi` +- `ansi256` + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + +## For enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/array-flatten/LICENSE b/node_modules/array-flatten/LICENSE new file mode 100644 index 0000000..983fbe8 --- /dev/null +++ b/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-flatten/README.md b/node_modules/array-flatten/README.md new file mode 100644 index 0000000..91fa5b6 --- /dev/null +++ b/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/node_modules/array-flatten/array-flatten.js b/node_modules/array-flatten/array-flatten.js new file mode 100644 index 0000000..089117b --- /dev/null +++ b/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/node_modules/array-flatten/package.json b/node_modules/array-flatten/package.json new file mode 100644 index 0000000..1a24e2a --- /dev/null +++ b/node_modules/array-flatten/package.json @@ -0,0 +1,39 @@ +{ + "name": "array-flatten", + "version": "1.1.1", + "description": "Flatten an array of nested arrays into a single flat array", + "main": "array-flatten.js", + "files": [ + "array-flatten.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "homepage": "https://github.com/blakeembrey/array-flatten", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + } +} diff --git a/node_modules/async/CHANGELOG.md b/node_modules/async/CHANGELOG.md new file mode 100644 index 0000000..8a9a1bf --- /dev/null +++ b/node_modules/async/CHANGELOG.md @@ -0,0 +1,348 @@ +# v3.2.4 +- Fix a bug in `priorityQueue` where it didn't wait for the result. (#1725) +- Fix a bug where `unshiftAsync` was included in `priorityQueue`. (#1790) + +# v3.2.3 +- Fix bugs in comment parsing in `autoInject`. (#1767, #1780) + +# v3.2.2 +- Fix potential prototype pollution exploit + +# v3.2.1 +- Use `queueMicrotask` if available to the environment (#1761) +- Minor perf improvement in `priorityQueue` (#1727) +- More examples in documentation (#1726) +- Various doc fixes (#1708, #1712, #1717, #1740, #1739, #1749, #1756) +- Improved test coverage (#1754) + +# v3.2.0 +- Fix a bug in Safari related to overwriting `func.name` +- Remove built-in browserify configuration (#1653) +- Varios doc fixes (#1688, #1703, #1704) + +# v3.1.1 +- Allow redefining `name` property on wrapped functions. + +# v3.1.0 + +- Added `q.pushAsync` and `q.unshiftAsync`, analagous to `q.push` and `q.unshift`, except they always do not accept a callback, and reject if processing the task errors. (#1659) +- Promises returned from `q.push` and `q.unshift` when a callback is not passed now resolve even if an error ocurred. (#1659) +- Fixed a parsing bug in `autoInject` with complicated function bodies (#1663) +- Added ES6+ configuration for Browserify bundlers (#1653) +- Various doc fixes (#1664, #1658, #1665, #1652) + +# v3.0.1 + +## Bug fixes +- Fixed a regression where arrays passed to `queue` and `cargo` would be completely flattened. (#1645) +- Clarified Async's browser support (#1643) + + +# v3.0.0 + +The `async`/`await` release! + +There are a lot of new features and subtle breaking changes in this major version, but the biggest feature is that most Async methods return a Promise if you omit the callback, meaning you can `await` them from within an `async` function. + +```js +const results = await async.mapLimit(urls, 5, async url => { + const resp = await fetch(url) + return resp.body +}) +``` + +## Breaking Changes +- Most Async methods return a Promise when the final callback is omitted, making them `await`-able! (#1572) +- We are now making heavy use of ES2015 features, this means we have dropped out-of-the-box support for Node 4 and earlier, and many old versions of browsers. (#1541, #1553) +- In `queue`, `priorityQueue`, `cargo` and `cargoQueue`, the "event"-style methods, like `q.drain` and `q.saturated` are now methods that register a callback, rather than properties you assign a callback to. They are now of the form `q.drain(callback)`. If you do not pass a callback a Promise will be returned for the next occurrence of the event, making them `await`-able, e.g. `await q.drain()`. (#1586, #1641) +- Calling `callback(false)` will cancel an async method, preventing further iteration and callback calls. This is useful for preventing memory leaks when you break out of an async flow by calling an outer callback. (#1064, #1542) +- `during` and `doDuring` have been removed, and instead `whilst`, `doWhilst`, `until` and `doUntil` now have asynchronous `test` functions. (#850, #1557) +- `limits` of less than 1 now cause an error to be thrown in queues and collection methods. (#1249, #1552) +- `memoize` no longer memoizes errors (#1465, #1466) +- `applyEach`/`applyEachSeries` have a simpler interface, to make them more easily type-able. It always returns a function that takes in a single callback argument. If that callback is omitted, a promise is returned, making it awaitable. (#1228, #1640) + +## New Features +- Async generators are now supported in all the Collection methods. (#1560) +- Added `cargoQueue`, a queue with both `concurrency` and `payload` size parameters. (#1567) +- Queue objects returned from `queue` now have a `Symbol.iterator` method, meaning they can be iterated over to inspect the current list of items in the queue. (#1459, #1556) +- A ESM-flavored `async.mjs` is included in the `async` package. This is described in the `package.json` `"module"` field, meaning it should be automatically used by Webpack and other compatible bundlers. + +## Bug fixes +- Better handle arbitrary error objects in `asyncify` (#1568, #1569) + +## Other +- Removed Lodash as a dependency (#1283, #1528) +- Miscellaneous docs fixes (#1393, #1501, #1540, #1543, #1558, #1563, #1564, #1579, #1581) +- Miscellaneous test fixes (#1538) + +------- + +# v2.6.1 +- Updated lodash to prevent `npm audit` warnings. (#1532, #1533) +- Made `async-es` more optimized for webpack users (#1517) +- Fixed a stack overflow with large collections and a synchronous iterator (#1514) +- Various small fixes/chores (#1505, #1511, #1527, #1530) + +# v2.6.0 +- Added missing aliases for many methods. Previously, you could not (e.g.) `require('async/find')` or use `async.anyLimit`. (#1483) +- Improved `queue` performance. (#1448, #1454) +- Add missing sourcemap (#1452, #1453) +- Various doc updates (#1448, #1471, #1483) + +# v2.5.0 +- Added `concatLimit`, the `Limit` equivalent of [`concat`](https://caolan.github.io/async/docs.html#concat) ([#1426](https://github.com/caolan/async/issues/1426), [#1430](https://github.com/caolan/async/pull/1430)) +- `concat` improvements: it now preserves order, handles falsy values and the `iteratee` callback takes a variable number of arguments ([#1437](https://github.com/caolan/async/issues/1437), [#1436](https://github.com/caolan/async/pull/1436)) +- Fixed an issue in `queue` where there was a size discrepancy between `workersList().length` and `running()` ([#1428](https://github.com/caolan/async/issues/1428), [#1429](https://github.com/caolan/async/pull/1429)) +- Various doc fixes ([#1422](https://github.com/caolan/async/issues/1422), [#1424](https://github.com/caolan/async/pull/1424)) + +# v2.4.1 +- Fixed a bug preventing functions wrapped with `timeout()` from being re-used. ([#1418](https://github.com/caolan/async/issues/1418), [#1419](https://github.com/caolan/async/issues/1419)) + +# v2.4.0 +- Added `tryEach`, for running async functions in parallel, where you only expect one to succeed. ([#1365](https://github.com/caolan/async/issues/1365), [#687](https://github.com/caolan/async/issues/687)) +- Improved performance, most notably in `parallel` and `waterfall` ([#1395](https://github.com/caolan/async/issues/1395)) +- Added `queue.remove()`, for removing items in a `queue` ([#1397](https://github.com/caolan/async/issues/1397), [#1391](https://github.com/caolan/async/issues/1391)) +- Fixed using `eval`, preventing Async from running in pages with Content Security Policy ([#1404](https://github.com/caolan/async/issues/1404), [#1403](https://github.com/caolan/async/issues/1403)) +- Fixed errors thrown in an `asyncify`ed function's callback being caught by the underlying Promise ([#1408](https://github.com/caolan/async/issues/1408)) +- Fixed timing of `queue.empty()` ([#1367](https://github.com/caolan/async/issues/1367)) +- Various doc fixes ([#1314](https://github.com/caolan/async/issues/1314), [#1394](https://github.com/caolan/async/issues/1394), [#1412](https://github.com/caolan/async/issues/1412)) + +# v2.3.0 +- Added support for ES2017 `async` functions. Wherever you can pass a Node-style/CPS function that uses a callback, you can also pass an `async` function. Previously, you had to wrap `async` functions with `asyncify`. The caveat is that it will only work if `async` functions are supported natively in your environment, transpiled implementations can't be detected. ([#1386](https://github.com/caolan/async/issues/1386), [#1390](https://github.com/caolan/async/issues/1390)) +- Small doc fix ([#1392](https://github.com/caolan/async/issues/1392)) + +# v2.2.0 +- Added `groupBy`, and the `Series`/`Limit` equivalents, analogous to [`_.groupBy`](http://lodash.com/docs#groupBy) ([#1364](https://github.com/caolan/async/issues/1364)) +- Fixed `transform` bug when `callback` was not passed ([#1381](https://github.com/caolan/async/issues/1381)) +- Added note about `reflect` to `parallel` docs ([#1385](https://github.com/caolan/async/issues/1385)) + +# v2.1.5 +- Fix `auto` bug when function names collided with Array.prototype ([#1358](https://github.com/caolan/async/issues/1358)) +- Improve some error messages ([#1349](https://github.com/caolan/async/issues/1349)) +- Avoid stack overflow case in queue +- Fixed an issue in `some`, `every` and `find` where processing would continue after the result was determined. +- Cleanup implementations of `some`, `every` and `find` + +# v2.1.3 +- Make bundle size smaller +- Create optimized hotpath for `filter` in array case. + +# v2.1.2 +- Fixed a stackoverflow bug with `detect`, `some`, `every` on large inputs ([#1293](https://github.com/caolan/async/issues/1293)). + +# v2.1.0 + +- `retry` and `retryable` now support an optional `errorFilter` function that determines if the `task` should retry on the error ([#1256](https://github.com/caolan/async/issues/1256), [#1261](https://github.com/caolan/async/issues/1261)) +- Optimized array iteration in `race`, `cargo`, `queue`, and `priorityQueue` ([#1253](https://github.com/caolan/async/issues/1253)) +- Added alias documentation to doc site ([#1251](https://github.com/caolan/async/issues/1251), [#1254](https://github.com/caolan/async/issues/1254)) +- Added [BootStrap scrollspy](http://getbootstrap.com/javascript/#scrollspy) to docs to highlight in the sidebar the current method being viewed ([#1289](https://github.com/caolan/async/issues/1289), [#1300](https://github.com/caolan/async/issues/1300)) +- Various minor doc fixes ([#1263](https://github.com/caolan/async/issues/1263), [#1264](https://github.com/caolan/async/issues/1264), [#1271](https://github.com/caolan/async/issues/1271), [#1278](https://github.com/caolan/async/issues/1278), [#1280](https://github.com/caolan/async/issues/1280), [#1282](https://github.com/caolan/async/issues/1282), [#1302](https://github.com/caolan/async/issues/1302)) + +# v2.0.1 + +- Significantly optimized all iteration based collection methods such as `each`, `map`, `filter`, etc ([#1245](https://github.com/caolan/async/issues/1245), [#1246](https://github.com/caolan/async/issues/1246), [#1247](https://github.com/caolan/async/issues/1247)). + +# v2.0.0 + +Lots of changes here! + +First and foremost, we have a slick new [site for docs](https://caolan.github.io/async/). Special thanks to [**@hargasinski**](https://github.com/hargasinski) for his work converting our old docs to `jsdoc` format and implementing the new website. Also huge ups to [**@ivanseidel**](https://github.com/ivanseidel) for designing our new logo. It was a long process for both of these tasks, but I think these changes turned out extraordinary well. + +The biggest feature is modularization. You can now `require("async/series")` to only require the `series` function. Every Async library function is available this way. You still can `require("async")` to require the entire library, like you could do before. + +We also provide Async as a collection of ES2015 modules. You can now `import {each} from 'async-es'` or `import waterfall from 'async-es/waterfall'`. If you are using only a few Async functions, and are using a ES bundler such as Rollup, this can significantly lower your build size. + +Major thanks to [**@Kikobeats**](github.com/Kikobeats), [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for doing the majority of the modularization work, as well as [**@jdalton**](github.com/jdalton) and [**@Rich-Harris**](github.com/Rich-Harris) for advisory work on the general modularization strategy. + +Another one of the general themes of the 2.0 release is standardization of what an "async" function is. We are now more strictly following the node-style continuation passing style. That is, an async function is a function that: + +1. Takes a variable number of arguments +2. The last argument is always a callback +3. The callback can accept any number of arguments +4. The first argument passed to the callback will be treated as an error result, if the argument is truthy +5. Any number of result arguments can be passed after the "error" argument +6. The callback is called once and exactly once, either on the same tick or later tick of the JavaScript event loop. + +There were several cases where Async accepted some functions that did not strictly have these properties, most notably `auto`, `every`, `some`, `filter`, `reject` and `detect`. + +Another theme is performance. We have eliminated internal deferrals in all cases where they make sense. For example, in `waterfall` and `auto`, there was a `setImmediate` between each task -- these deferrals have been removed. A `setImmediate` call can add up to 1ms of delay. This might not seem like a lot, but it can add up if you are using many Async functions in the course of processing a HTTP request, for example. Nearly all asynchronous functions that do I/O already have some sort of deferral built in, so the extra deferral is unnecessary. The trade-off of this change is removing our built-in stack-overflow defense. Many synchronous callback calls in series can quickly overflow the JS call stack. If you do have a function that is sometimes synchronous (calling its callback on the same tick), and are running into stack overflows, wrap it with `async.ensureAsync()`. + +Another big performance win has been re-implementing `queue`, `cargo`, and `priorityQueue` with [doubly linked lists](https://en.wikipedia.org/wiki/Doubly_linked_list) instead of arrays. This has lead to queues being an order of [magnitude faster on large sets of tasks](https://github.com/caolan/async/pull/1205). + +## New Features + +- Async is now modularized. Individual functions can be `require()`d from the main package. (`require('async/auto')`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) +- Async is also available as a collection of ES2015 modules in the new `async-es` package. (`import {forEachSeries} from 'async-es'`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) +- Added `race`, analogous to `Promise.race()`. It will run an array of async tasks in parallel and will call its callback with the result of the first task to respond. ([#568](https://github.com/caolan/async/issues/568), [#1038](https://github.com/caolan/async/issues/1038)) +- Collection methods now accept ES2015 iterators. Maps, Sets, and anything that implements the iterator spec can now be passed directly to `each`, `map`, `parallel`, etc.. ([#579](https://github.com/caolan/async/issues/579), [#839](https://github.com/caolan/async/issues/839), [#1074](https://github.com/caolan/async/issues/1074)) +- Added `mapValues`, for mapping over the properties of an object and returning an object with the same keys. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) +- Added `timeout`, a wrapper for an async function that will make the task time-out after the specified time. ([#1007](https://github.com/caolan/async/issues/1007), [#1027](https://github.com/caolan/async/issues/1027)) +- Added `reflect` and `reflectAll`, analagous to [`Promise.reflect()`](http://bluebirdjs.com/docs/api/reflect.html), a wrapper for async tasks that always succeeds, by gathering results and errors into an object. ([#942](https://github.com/caolan/async/issues/942), [#1012](https://github.com/caolan/async/issues/1012), [#1095](https://github.com/caolan/async/issues/1095)) +- `constant` supports dynamic arguments -- it will now always use its last argument as the callback. ([#1016](https://github.com/caolan/async/issues/1016), [#1052](https://github.com/caolan/async/issues/1052)) +- `setImmediate` and `nextTick` now support arguments to partially apply to the deferred function, like the node-native versions do. ([#940](https://github.com/caolan/async/issues/940), [#1053](https://github.com/caolan/async/issues/1053)) +- `auto` now supports resolving cyclic dependencies using [Kahn's algorithm](https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm) ([#1140](https://github.com/caolan/async/issues/1140)). +- Added `autoInject`, a relative of `auto` that automatically spreads a task's dependencies as arguments to the task function. ([#608](https://github.com/caolan/async/issues/608), [#1055](https://github.com/caolan/async/issues/1055), [#1099](https://github.com/caolan/async/issues/1099), [#1100](https://github.com/caolan/async/issues/1100)) +- You can now limit the concurrency of `auto` tasks. ([#635](https://github.com/caolan/async/issues/635), [#637](https://github.com/caolan/async/issues/637)) +- Added `retryable`, a relative of `retry` that wraps an async function, making it retry when called. ([#1058](https://github.com/caolan/async/issues/1058)) +- `retry` now supports specifying a function that determines the next time interval, useful for exponential backoff, logging and other retry strategies. ([#1161](https://github.com/caolan/async/issues/1161)) +- `retry` will now pass all of the arguments the task function was resolved with to the callback ([#1231](https://github.com/caolan/async/issues/1231)). +- Added `q.unsaturated` -- callback called when a `queue`'s number of running workers falls below a threshold. ([#868](https://github.com/caolan/async/issues/868), [#1030](https://github.com/caolan/async/issues/1030), [#1033](https://github.com/caolan/async/issues/1033), [#1034](https://github.com/caolan/async/issues/1034)) +- Added `q.error` -- a callback called whenever a `queue` task calls its callback with an error. ([#1170](https://github.com/caolan/async/issues/1170)) +- `applyEach` and `applyEachSeries` now pass results to the final callback. ([#1088](https://github.com/caolan/async/issues/1088)) + +## Breaking changes + +- Calling a callback more than once is considered an error, and an error will be thrown. This had an explicit breaking change in `waterfall`. If you were relying on this behavior, you should more accurately represent your control flow as an event emitter or stream. ([#814](https://github.com/caolan/async/issues/814), [#815](https://github.com/caolan/async/issues/815), [#1048](https://github.com/caolan/async/issues/1048), [#1050](https://github.com/caolan/async/issues/1050)) +- `auto` task functions now always take the callback as the last argument. If a task has dependencies, the `results` object will be passed as the first argument. To migrate old task functions, wrap them with [`_.flip`](https://lodash.com/docs#flip) ([#1036](https://github.com/caolan/async/issues/1036), [#1042](https://github.com/caolan/async/issues/1042)) +- Internal `setImmediate` calls have been refactored away. This may make existing flows vulnerable to stack overflows if you use many synchronous functions in series. Use `ensureAsync` to work around this. ([#696](https://github.com/caolan/async/issues/696), [#704](https://github.com/caolan/async/issues/704), [#1049](https://github.com/caolan/async/issues/1049), [#1050](https://github.com/caolan/async/issues/1050)) +- `map` used to return an object when iterating over an object. `map` now always returns an array, like in other libraries. The previous object behavior has been split out into `mapValues`. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) +- `filter`, `reject`, `some`, `every`, `detect` and their families like `{METHOD}Series` and `{METHOD}Limit` now expect an error as the first callback argument, rather than just a simple boolean. Pass `null` as the first argument, or use `fs.access` instead of `fs.exists`. ([#118](https://github.com/caolan/async/issues/118), [#774](https://github.com/caolan/async/issues/774), [#1028](https://github.com/caolan/async/issues/1028), [#1041](https://github.com/caolan/async/issues/1041)) +- `{METHOD}` and `{METHOD}Series` are now implemented in terms of `{METHOD}Limit`. This is a major internal simplification, and is not expected to cause many problems, but it does subtly affect how functions execute internally. ([#778](https://github.com/caolan/async/issues/778), [#847](https://github.com/caolan/async/issues/847)) +- `retry`'s callback is now optional. Previously, omitting the callback would partially apply the function, meaning it could be passed directly as a task to `series` or `auto`. The partially applied "control-flow" behavior has been separated out into `retryable`. ([#1054](https://github.com/caolan/async/issues/1054), [#1058](https://github.com/caolan/async/issues/1058)) +- The test function for `whilst`, `until`, and `during` used to be passed non-error args from the iteratee function's callback, but this led to weirdness where the first call of the test function would be passed no args. We have made it so the test function is never passed extra arguments, and only the `doWhilst`, `doUntil`, and `doDuring` functions pass iteratee callback arguments to the test function ([#1217](https://github.com/caolan/async/issues/1217), [#1224](https://github.com/caolan/async/issues/1224)) +- The `q.tasks` array has been renamed `q._tasks` and is now implemented as a doubly linked list (DLL). Any code that used to interact with this array will need to be updated to either use the provided helpers or support DLLs ([#1205](https://github.com/caolan/async/issues/1205)). +- The timing of the `q.saturated()` callback in a `queue` has been modified to better reflect when tasks pushed to the queue will start queueing. ([#724](https://github.com/caolan/async/issues/724), [#1078](https://github.com/caolan/async/issues/1078)) +- Removed `iterator` method in favour of [ES2015 iterator protocol](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators ) which natively supports arrays ([#1237](https://github.com/caolan/async/issues/1237)) +- Dropped support for Component, Jam, SPM, and Volo ([#1175](https://github.com/caolan/async/issues/1175), #[#176](https://github.com/caolan/async/issues/176)) + +## Bug Fixes + +- Improved handling of no dependency cases in `auto` & `autoInject` ([#1147](https://github.com/caolan/async/issues/1147)). +- Fixed a bug where the callback generated by `asyncify` with `Promises` could resolve twice ([#1197](https://github.com/caolan/async/issues/1197)). +- Fixed several documented optional callbacks not actually being optional ([#1223](https://github.com/caolan/async/issues/1223)). + +## Other + +- Added `someSeries` and `everySeries` for symmetry, as well as a complete set of `any`/`anyLimit`/`anySeries` and `all`/`/allLmit`/`allSeries` aliases. +- Added `find` as an alias for `detect. (as well as `findLimit` and `findSeries`). +- Various doc fixes ([#1005](https://github.com/caolan/async/issues/1005), [#1008](https://github.com/caolan/async/issues/1008), [#1010](https://github.com/caolan/async/issues/1010), [#1015](https://github.com/caolan/async/issues/1015), [#1021](https://github.com/caolan/async/issues/1021), [#1037](https://github.com/caolan/async/issues/1037), [#1039](https://github.com/caolan/async/issues/1039), [#1051](https://github.com/caolan/async/issues/1051), [#1102](https://github.com/caolan/async/issues/1102), [#1107](https://github.com/caolan/async/issues/1107), [#1121](https://github.com/caolan/async/issues/1121), [#1123](https://github.com/caolan/async/issues/1123), [#1129](https://github.com/caolan/async/issues/1129), [#1135](https://github.com/caolan/async/issues/1135), [#1138](https://github.com/caolan/async/issues/1138), [#1141](https://github.com/caolan/async/issues/1141), [#1153](https://github.com/caolan/async/issues/1153), [#1216](https://github.com/caolan/async/issues/1216), [#1217](https://github.com/caolan/async/issues/1217), [#1232](https://github.com/caolan/async/issues/1232), [#1233](https://github.com/caolan/async/issues/1233), [#1236](https://github.com/caolan/async/issues/1236), [#1238](https://github.com/caolan/async/issues/1238)) + +Thank you [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for taking the lead on version 2 of async. + +------------------------------------------ + +# v1.5.2 +- Allow using `"constructor"` as an argument in `memoize` ([#998](https://github.com/caolan/async/issues/998)) +- Give a better error messsage when `auto` dependency checking fails ([#994](https://github.com/caolan/async/issues/994)) +- Various doc updates ([#936](https://github.com/caolan/async/issues/936), [#956](https://github.com/caolan/async/issues/956), [#979](https://github.com/caolan/async/issues/979), [#1002](https://github.com/caolan/async/issues/1002)) + +# v1.5.1 +- Fix issue with `pause` in `queue` with concurrency enabled ([#946](https://github.com/caolan/async/issues/946)) +- `while` and `until` now pass the final result to callback ([#963](https://github.com/caolan/async/issues/963)) +- `auto` will properly handle concurrency when there is no callback ([#966](https://github.com/caolan/async/issues/966)) +- `auto` will no. properly stop execution when an error occurs ([#988](https://github.com/caolan/async/issues/988), [#993](https://github.com/caolan/async/issues/993)) +- Various doc fixes ([#971](https://github.com/caolan/async/issues/971), [#980](https://github.com/caolan/async/issues/980)) + +# v1.5.0 + +- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) ([#892](https://github.com/caolan/async/issues/892)) +- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. ([#873](https://github.com/caolan/async/issues/873)) +- `auto` now accepts an optional `concurrency` argument to limit the number o. running tasks ([#637](https://github.com/caolan/async/issues/637)) +- Added `queue#workersList()`, to retrieve the lis. of currently running tasks. ([#891](https://github.com/caolan/async/issues/891)) +- Various code simplifications ([#896](https://github.com/caolan/async/issues/896), [#904](https://github.com/caolan/async/issues/904)) +- Various doc fixes :scroll: ([#890](https://github.com/caolan/async/issues/890), [#894](https://github.com/caolan/async/issues/894), [#903](https://github.com/caolan/async/issues/903), [#905](https://github.com/caolan/async/issues/905), [#912](https://github.com/caolan/async/issues/912)) + +# v1.4.2 + +- Ensure coverage files don't get published on npm ([#879](https://github.com/caolan/async/issues/879)) + +# v1.4.1 + +- Add in overlooked `detectLimit` method ([#866](https://github.com/caolan/async/issues/866)) +- Removed unnecessary files from npm releases ([#861](https://github.com/caolan/async/issues/861)) +- Removed usage of a reserved word to prevent :boom: in older environments ([#870](https://github.com/caolan/async/issues/870)) + +# v1.4.0 + +- `asyncify` now supports promises ([#840](https://github.com/caolan/async/issues/840)) +- Added `Limit` versions of `filter` and `reject` ([#836](https://github.com/caolan/async/issues/836)) +- Add `Limit` versions of `detect`, `some` and `every` ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) +- `some`, `every` and `detect` now short circuit early ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) +- Improve detection of the global object ([#804](https://github.com/caolan/async/issues/804)), enabling use in WebWorkers +- `whilst` now called with arguments from iterator ([#823](https://github.com/caolan/async/issues/823)) +- `during` now gets called with arguments from iterator ([#824](https://github.com/caolan/async/issues/824)) +- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0)) + + +# v1.3.0 + +New Features: +- Added `constant` +- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. ([#671](https://github.com/caolan/async/issues/671), [#806](https://github.com/caolan/async/issues/806)) +- Added `during` and `doDuring`, which are like `whilst` with an async truth test. ([#800](https://github.com/caolan/async/issues/800)) +- `retry` now accepts an `interval` parameter to specify a delay between retries. ([#793](https://github.com/caolan/async/issues/793)) +- `async` should work better in Web Workers due to better `root` detection ([#804](https://github.com/caolan/async/issues/804)) +- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` ([#642](https://github.com/caolan/async/issues/642)) +- Various internal updates ([#786](https://github.com/caolan/async/issues/786), [#801](https://github.com/caolan/async/issues/801), [#802](https://github.com/caolan/async/issues/802), [#803](https://github.com/caolan/async/issues/803)) +- Various doc fixes ([#790](https://github.com/caolan/async/issues/790), [#794](https://github.com/caolan/async/issues/794)) + +Bug Fixes: +- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. ([#740](https://github.com/caolan/async/issues/740), [#744](https://github.com/caolan/async/issues/744), [#783](https://github.com/caolan/async/issues/783)) + + +# v1.2.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) + + +# v1.2.0 + +New Features: + +- Added `timesLimit` ([#743](https://github.com/caolan/async/issues/743)) +- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. ([#747](https://github.com/caolan/async/issues/747), [#772](https://github.com/caolan/async/issues/772)) + +Bug Fixes: + +- Fixed a regression in `each` and family with empty arrays that have additional properties. ([#775](https://github.com/caolan/async/issues/775), [#777](https://github.com/caolan/async/issues/777)) + + +# v1.1.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) + + +# v1.1.0 + +New Features: + +- `cargo` now supports all of the same methods and event callbacks as `queue`. +- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. ([#769](https://github.com/caolan/async/issues/769)) +- Optimized `map`, `eachOf`, and `waterfall` families of functions +- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array ([#667](https://github.com/caolan/async/issues/667)). +- The callback is now optional for the composed results of `compose` and `seq`. ([#618](https://github.com/caolan/async/issues/618)) +- Reduced file size by 4kb, (minified version by 1kb) +- Added code coverage through `nyc` and `coveralls` ([#768](https://github.com/caolan/async/issues/768)) + +Bug Fixes: + +- `forever` will no longer stack overflow with a synchronous iterator ([#622](https://github.com/caolan/async/issues/622)) +- `eachLimit` and other limit functions will stop iterating once an error occurs ([#754](https://github.com/caolan/async/issues/754)) +- Always pass `null` in callbacks when there is no error ([#439](https://github.com/caolan/async/issues/439)) +- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue ([#668](https://github.com/caolan/async/issues/668)) +- `each` and family will properly handle an empty array ([#578](https://github.com/caolan/async/issues/578)) +- `eachSeries` and family will finish if the underlying array is modified during execution ([#557](https://github.com/caolan/async/issues/557)) +- `queue` will throw if a non-function is passed to `q.push()` ([#593](https://github.com/caolan/async/issues/593)) +- Doc fixes ([#629](https://github.com/caolan/async/issues/629), [#766](https://github.com/caolan/async/issues/766)) + + +# v1.0.0 + +No known breaking changes, we are simply complying with semver from here on out. + +Changes: + +- Start using a changelog! +- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) ([#168](https://github.com/caolan/async/issues/168) [#704](https://github.com/caolan/async/issues/704) [#321](https://github.com/caolan/async/issues/321)) +- Detect deadlocks in `auto` ([#663](https://github.com/caolan/async/issues/663)) +- Better support for require.js ([#527](https://github.com/caolan/async/issues/527)) +- Throw if queue created with concurrency `0` ([#714](https://github.com/caolan/async/issues/714)) +- Fix unneeded iteration in `queue.resume()` ([#758](https://github.com/caolan/async/issues/758)) +- Guard against timer mocking overriding `setImmediate` ([#609](https://github.com/caolan/async/issues/609) [#611](https://github.com/caolan/async/issues/611)) +- Miscellaneous doc fixes ([#542](https://github.com/caolan/async/issues/542) [#596](https://github.com/caolan/async/issues/596) [#615](https://github.com/caolan/async/issues/615) [#628](https://github.com/caolan/async/issues/628) [#631](https://github.com/caolan/async/issues/631) [#690](https://github.com/caolan/async/issues/690) [#729](https://github.com/caolan/async/issues/729)) +- Use single noop function internally ([#546](https://github.com/caolan/async/issues/546)) +- Optimize internal `_each`, `_map` and `_keys` functions. diff --git a/node_modules/async/LICENSE b/node_modules/async/LICENSE new file mode 100644 index 0000000..b18aed6 --- /dev/null +++ b/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2018 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/async/README.md b/node_modules/async/README.md new file mode 100644 index 0000000..77f645e --- /dev/null +++ b/node_modules/async/README.md @@ -0,0 +1,59 @@ +![Async Logo](https://raw.githubusercontent.com/caolan/async/master/logo/async-logo_readme.jpg) + +![Github Actions CI status](https://github.com/caolan/async/actions/workflows/ci.yml/badge.svg) +[![NPM version](https://img.shields.io/npm/v/async.svg)](https://www.npmjs.com/package/async) +[![Coverage Status](https://coveralls.io/repos/caolan/async/badge.svg?branch=master)](https://coveralls.io/r/caolan/async?branch=master) +[![Join the chat at https://gitter.im/caolan/async](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![jsDelivr Hits](https://data.jsdelivr.com/v1/package/npm/async/badge?style=rounded)](https://www.jsdelivr.com/package/npm/async) + + + +Async is a utility module which provides straight-forward, powerful functions for working with [asynchronous JavaScript](http://caolan.github.io/async/v3/global.html). Although originally designed for use with [Node.js](https://nodejs.org/) and installable via `npm i async`, it can also be used directly in the browser. A ESM/MJS version is included in the main `async` package that should automatically be used with compatible bundlers such as Webpack and Rollup. + +A pure ESM version of Async is available as [`async-es`](https://www.npmjs.com/package/async-es). + +For Documentation, visit + +*For Async v1.5.x documentation, go [HERE](https://github.com/caolan/async/blob/v1.5.2/README.md)* + + +```javascript +// for use with Node-style callbacks... +var async = require("async"); + +var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; +var configs = {}; + +async.forEachOf(obj, (value, key, callback) => { + fs.readFile(__dirname + value, "utf8", (err, data) => { + if (err) return callback(err); + try { + configs[key] = JSON.parse(data); + } catch (e) { + return callback(e); + } + callback(); + }); +}, err => { + if (err) console.error(err.message); + // configs is now a map of JSON data + doSomethingWith(configs); +}); +``` + +```javascript +var async = require("async"); + +// ...or ES2017 async functions +async.mapLimit(urls, 5, async function(url) { + const response = await fetch(url) + return response.body +}, (err, results) => { + if (err) throw err + // results is now an array of the response bodies + console.log(results) +}) +``` diff --git a/node_modules/async/all.js b/node_modules/async/all.js new file mode 100644 index 0000000..148db68 --- /dev/null +++ b/node_modules/async/all.js @@ -0,0 +1,119 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(every, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/allLimit.js b/node_modules/async/allLimit.js new file mode 100644 index 0000000..25b2c08 --- /dev/null +++ b/node_modules/async/allLimit.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everyLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/allSeries.js b/node_modules/async/allSeries.js new file mode 100644 index 0000000..147c3dc --- /dev/null +++ b/node_modules/async/allSeries.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everySeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/any.js b/node_modules/async/any.js new file mode 100644 index 0000000..2046cf6 --- /dev/null +++ b/node_modules/async/any.js @@ -0,0 +1,122 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(some, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/anyLimit.js b/node_modules/async/anyLimit.js new file mode 100644 index 0000000..c8a295a --- /dev/null +++ b/node_modules/async/anyLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/anySeries.js b/node_modules/async/anySeries.js new file mode 100644 index 0000000..ee0654b --- /dev/null +++ b/node_modules/async/anySeries.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/apply.js b/node_modules/async/apply.js new file mode 100644 index 0000000..5246833 --- /dev/null +++ b/node_modules/async/apply.js @@ -0,0 +1,55 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (fn, ...args) { + return (...callArgs) => fn(...args, ...callArgs); +}; + +module.exports = exports["default"]; /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ \ No newline at end of file diff --git a/node_modules/async/applyEach.js b/node_modules/async/applyEach.js new file mode 100644 index 0000000..b08c670 --- /dev/null +++ b/node_modules/async/applyEach.js @@ -0,0 +1,57 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _applyEach = require('./internal/applyEach.js'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _map = require('./map.js'); + +var _map2 = _interopRequireDefault(_map); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ +exports.default = (0, _applyEach2.default)(_map2.default); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/applyEachSeries.js b/node_modules/async/applyEachSeries.js new file mode 100644 index 0000000..6a19ca3 --- /dev/null +++ b/node_modules/async/applyEachSeries.js @@ -0,0 +1,37 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _applyEach = require('./internal/applyEach.js'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _mapSeries = require('./mapSeries.js'); + +var _mapSeries2 = _interopRequireDefault(_mapSeries); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ +exports.default = (0, _applyEach2.default)(_mapSeries2.default); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/asyncify.js b/node_modules/async/asyncify.js new file mode 100644 index 0000000..3c3bf88 --- /dev/null +++ b/node_modules/async/asyncify.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncify; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if ((0, _wrapAsync.isAsync)(func)) { + return function (...args /*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback); + }; + } + + return (0, _initialParams2.default)(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback); + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + (0, _setImmediate2.default)(e => { + throw e; + }, err); + } +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/auto.js b/node_modules/async/auto.js new file mode 100644 index 0000000..c4a85d4 --- /dev/null +++ b/node_modules/async/auto.js @@ -0,0 +1,333 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = auto; + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = (0, _once2.default)(callback || (0, _promiseCallback.promiseCallback)()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + '` has a non-existent dependency `' + dependencyName + '` in ' + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return; + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while (readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = (0, _onlyOnce2.default)((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return; + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return; + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = (0, _wrapAsync2.default)(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error('async.auto cannot execute tasks due to a recursive dependency'); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[_promiseCallback.PROMISE_SYMBOL]; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/autoInject.js b/node_modules/async/autoInject.js new file mode 100644 index 0000000..393baad --- /dev/null +++ b/node_modules/async/autoInject.js @@ -0,0 +1,182 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = autoInject; + +var _auto = require('./auto.js'); + +var _auto2 = _interopRequireDefault(_auto); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; +var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; +var FN_ARG_SPLIT = /,/; +var FN_ARG = /(=.+)?(\s*)$/; + +function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index + 1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = endIndex === -1 ? string.length : endIndex; + } else if (endBlockComment !== -1 && string[index] === '/' && string[index + 1] === '*') { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; +} + +function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src); + let [, args] = match; + return args.replace(/\s/g, '').split(FN_ARG_SPLIT).map(arg => arg.replace(FN_ARG, '').trim()); +} + +/** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ +function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = (0, _wrapAsync.isAsync)(taskFn); + var hasNoDeps = !fnIsAsync && taskFn.length === 1 || fnIsAsync && taskFn.length === 0; + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if (taskFn.length === 0 && !fnIsAsync && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + (0, _wrapAsync2.default)(taskFn)(...newArgs); + } + }); + + return (0, _auto2.default)(newTasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/bower.json b/node_modules/async/bower.json new file mode 100644 index 0000000..390c650 --- /dev/null +++ b/node_modules/async/bower.json @@ -0,0 +1,17 @@ +{ + "name": "async", + "main": "dist/async.js", + "ignore": [ + "bower_components", + "lib", + "test", + "node_modules", + "perf", + "support", + "**/.*", + "*.config.js", + "*.json", + "index.js", + "Makefile" + ] +} diff --git a/node_modules/async/cargo.js b/node_modules/async/cargo.js new file mode 100644 index 0000000..aa385f8 --- /dev/null +++ b/node_modules/async/cargo.js @@ -0,0 +1,63 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = cargo; + +var _queue = require('./internal/queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, Infinity, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concat, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/concatLimit.js b/node_modules/async/concatLimit.js new file mode 100644 index 0000000..3d170f1 --- /dev/null +++ b/node_modules/async/concatLimit.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +exports.default = (0, _awaitify2.default)(concatLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/concatSeries.js b/node_modules/async/concatSeries.js new file mode 100644 index 0000000..84add3b --- /dev/null +++ b/node_modules/async/concatSeries.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concatSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/constant.js b/node_modules/async/constant.js new file mode 100644 index 0000000..0759653 --- /dev/null +++ b/node_modules/async/constant.js @@ -0,0 +1,55 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (...args) { + return function (...ignoredArgs /*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; +}; + +module.exports = exports["default"]; /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ \ No newline at end of file diff --git a/node_modules/async/detect.js b/node_modules/async/detect.js new file mode 100644 index 0000000..05b2e5c --- /dev/null +++ b/node_modules/async/detect.js @@ -0,0 +1,96 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detect, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/detectLimit.js b/node_modules/async/detectLimit.js new file mode 100644 index 0000000..db6961e --- /dev/null +++ b/node_modules/async/detectLimit.js @@ -0,0 +1,48 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detectLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/detectSeries.js b/node_modules/async/detectSeries.js new file mode 100644 index 0000000..b9131b4 --- /dev/null +++ b/node_modules/async/detectSeries.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(1), coll, iteratee, callback); +} + +exports.default = (0, _awaitify2.default)(detectSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/dir.js b/node_modules/async/dir.js new file mode 100644 index 0000000..950d0a2 --- /dev/null +++ b/node_modules/async/dir.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _consoleFunc = require('./internal/consoleFunc.js'); + +var _consoleFunc2 = _interopRequireDefault(_consoleFunc); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ +exports.default = (0, _consoleFunc2.default)('dir'); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/dist/async.js b/node_modules/async/dist/async.js new file mode 100644 index 0000000..8d5e782 --- /dev/null +++ b/node_modules/async/dist/async.js @@ -0,0 +1,6059 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (factory((global.async = {}))); +}(this, (function (exports) { 'use strict'; + + /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ + function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); + } + + function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; + } + + /* istanbul ignore file */ + + var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; + var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; + var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + + function fallback(fn) { + setTimeout(fn, 0); + } + + function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); + } + + var _defer; + + if (hasQueueMicrotask) { + _defer = queueMicrotask; + } else if (hasSetImmediate) { + _defer = setImmediate; + } else if (hasNextTick) { + _defer = process.nextTick; + } else { + _defer = fallback; + } + + var setImmediate$1 = wrap(_defer); + + /** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ + function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } + + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); + } + + function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); + } + + function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } + } + + function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; + } + + function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; + } + + function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; + } + + function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; + } + + // conditionally promisify a function. + // only return a promise if a callback is omitted + function awaitify (asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } + + return awaitable + } + + function applyEach (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; + } + + function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); + } + + function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; + } + + // A temporary value used to identify if the loop should be broken. + // See #1064, #1293 + const breakLoop = {}; + + function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper + } + + function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); + } + + function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } + } + + function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } + } + + function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; + } + + function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } + + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); + } + + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } + + // for async generators + function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) + + if (err === false) { + done = true; + canceled = true; + return + } + + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } + + replenish(); + } + + var eachOfLimit = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } + + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; + }; + + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); + } + + var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); + + // eachOf implementation optimized for array-likes + function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } + } + + // a generic version of eachOf which can handle array, object, and iterator cases. + function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$2(coll, Infinity, iteratee, callback); + } + + /** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ + function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); + } + + var eachOf$1 = awaitify(eachOf, 3); + + /** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) + } + var map$1 = awaitify(map, 3); + + /** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ + var applyEach$1 = applyEach(map$1); + + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$2(coll, 1, iteratee, callback) + } + var eachOfSeries$1 = awaitify(eachOfSeries, 3); + + /** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + } + var mapSeries$1 = awaitify(mapSeries, 3); + + /** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ + var applyEachSeries = applyEach(mapSeries$1); + + const PROMISE_SYMBOL = Symbol('promiseCallback'); + + function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); + + return callback + } + + /** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] + } + + var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; + var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; + + function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; + } + + function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); + } + + /** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ + function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + + return auto(newTasks, callback); + } + + // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation + // used for queues. This implementation assumes that the node provided by the user can be modified + // to adjust the next and last properties. We implement only the minimal functionality + // for queue support. + class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty () { + while(this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this] + } + + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } + } + + function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; + } + + function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on (event, handler) { + events[event].push(handler); + } + + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + + }; + + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; + } + + /** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); + } + var reduce$1 = awaitify(reduce, 4); + + /** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ + function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; + } + + /** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ + function compose(...args) { + return seq(...args.reverse()); + } + + /** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) + } + var mapLimit$1 = awaitify(mapLimit, 4); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); + } + var concatLimit$1 = awaitify(concatLimit, 4); + + /** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ + function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) + } + var concat$1 = awaitify(concat, 3); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) + } + var concatSeries$1 = awaitify(concatSeries, 3); + + /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ + function constant(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; + } + + function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; + } + + /** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) + } + var detect$1 = awaitify(detect, 3); + + /** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) + } + var detectLimit$1 = awaitify(detectLimit, 4); + + /** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) + } + + var detectSeries$1 = awaitify(detectSeries, 3); + + function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) + } + + /** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ + var dir = consoleFunc('dir'); + + /** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); + } + + var doWhilst$1 = awaitify(doWhilst, 3); + + /** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); + } + + function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); + } + + /** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ + function eachLimit(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + + var each = awaitify(eachLimit, 3); + + /** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + var eachLimit$2 = awaitify(eachLimit$1, 4); + + /** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachSeries(coll, iteratee, callback) { + return eachLimit$2(coll, 1, iteratee, callback) + } + var eachSeries$1 = awaitify(eachSeries, 3); + + /** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ + function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; + } + + /** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) + } + var every$1 = awaitify(every, 3); + + /** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) + } + var everyLimit$1 = awaitify(everyLimit, 4); + + /** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) + } + var everySeries$1 = awaitify(everySeries, 3); + + function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); + } + + function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); + } + + function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); + } + + /** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) + } + var filter$1 = awaitify(filter, 3); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ + function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit(limit), coll, iteratee, callback) + } + var filterLimit$1 = awaitify(filterLimit, 4); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ + function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) + } + var filterSeries$1 = awaitify(filterSeries, 3); + + /** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ + function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); + } + var forever$1 = awaitify(forever, 2); + + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); + } + + var groupByLimit$1 = awaitify(groupByLimit, 4); + + /** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) + } + + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) + } + + /** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ + var log = consoleFunc('log'); + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); + } + + var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + + /** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) + } + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) + } + + /** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ + function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + } + + /* istanbul ignore file */ + + /** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ + var _defer$1; + + if (hasNextTick) { + _defer$1 = process.nextTick; + } else if (hasSetImmediate) { + _defer$1 = setImmediate; + } else { + _defer$1 = fallback; + } + + var nextTick = wrap(_defer$1); + + var parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); + }, 3); + + /** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function parallel$1(tasks, callback) { + return parallel(eachOf$1, tasks, callback); + } + + /** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ + function parallelLimit(tasks, limit, callback) { + return parallel(eachOfLimit(limit), tasks, callback); + } + + /** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + + /** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ + function queue$1 (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); + } + + // Binary min-heap implementation used for priority queue. + // Implementation is stable, i.e. push time is considered for equal priorities + class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty () { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } + } + + function leftChi(i) { + return (i<<1)+1; + } + + function parent(i) { + return ((i+1)>>1)-1; + } + + function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } + } + + /** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ + function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue$1(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; + } + + /** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ + function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } + } + + var race$1 = awaitify(race, 2); + + /** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); + } + + /** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ + function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); + } + + /** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ + function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; + } + + function reject(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); + } + + /** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function reject$1 (coll, iteratee, callback) { + return reject(eachOf$1, coll, iteratee, callback) + } + var reject$2 = awaitify(reject$1, 3); + + /** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectLimit (coll, limit, iteratee, callback) { + return reject(eachOfLimit(limit), coll, iteratee, callback) + } + var rejectLimit$1 = awaitify(rejectLimit, 4); + + /** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectSeries (coll, iteratee, callback) { + return reject(eachOfSeries$1, coll, iteratee, callback) + } + var rejectSeries$1 = awaitify(rejectSeries, 3); + + function constant$1(value) { + return function () { + return value; + } + } + + /** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ + const DEFAULT_TIMES = 5; + const DEFAULT_INTERVAL = 0; + + function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant$1(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] + } + + function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant$1(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } + } + + /** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ + function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] + }); + } + + /** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function series(tasks, callback) { + return parallel(eachOfSeries$1, tasks, callback); + } + + /** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) + } + var some$1 = awaitify(some, 3); + + /** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) + } + var someLimit$1 = awaitify(someLimit, 4); + + /** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) + } + var someSeries$1 = awaitify(someSeries, 3); + + /** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } + } + var sortBy$1 = awaitify(sortBy, 3); + + /** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ + function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); + } + + function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); + } + + /** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ + function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) + } + + /** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] + } + + /** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ + function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); + } + + var tryEach$1 = awaitify(tryEach); + + /** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ + function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; + } + + /** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ + function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); + } + var whilst$1 = awaitify(whilst, 3); + + /** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ + function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); + } + + /** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ + function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } + + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); + } + + var waterfall$1 = awaitify(waterfall); + + /** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + + var index = { + apply, + applyEach: applyEach$1, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo, + cargoQueue: cargo$1, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$2, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$2, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel: parallel$1, + parallelLimit, + priorityQueue, + queue: queue$1, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$2, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$2, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$2, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 + }; + + exports.default = index; + exports.apply = apply; + exports.applyEach = applyEach$1; + exports.applyEachSeries = applyEachSeries; + exports.asyncify = asyncify; + exports.auto = auto; + exports.autoInject = autoInject; + exports.cargo = cargo; + exports.cargoQueue = cargo$1; + exports.compose = compose; + exports.concat = concat$1; + exports.concatLimit = concatLimit$1; + exports.concatSeries = concatSeries$1; + exports.constant = constant; + exports.detect = detect$1; + exports.detectLimit = detectLimit$1; + exports.detectSeries = detectSeries$1; + exports.dir = dir; + exports.doUntil = doUntil; + exports.doWhilst = doWhilst$1; + exports.each = each; + exports.eachLimit = eachLimit$2; + exports.eachOf = eachOf$1; + exports.eachOfLimit = eachOfLimit$2; + exports.eachOfSeries = eachOfSeries$1; + exports.eachSeries = eachSeries$1; + exports.ensureAsync = ensureAsync; + exports.every = every$1; + exports.everyLimit = everyLimit$1; + exports.everySeries = everySeries$1; + exports.filter = filter$1; + exports.filterLimit = filterLimit$1; + exports.filterSeries = filterSeries$1; + exports.forever = forever$1; + exports.groupBy = groupBy; + exports.groupByLimit = groupByLimit$1; + exports.groupBySeries = groupBySeries; + exports.log = log; + exports.map = map$1; + exports.mapLimit = mapLimit$1; + exports.mapSeries = mapSeries$1; + exports.mapValues = mapValues; + exports.mapValuesLimit = mapValuesLimit$1; + exports.mapValuesSeries = mapValuesSeries; + exports.memoize = memoize; + exports.nextTick = nextTick; + exports.parallel = parallel$1; + exports.parallelLimit = parallelLimit; + exports.priorityQueue = priorityQueue; + exports.queue = queue$1; + exports.race = race$1; + exports.reduce = reduce$1; + exports.reduceRight = reduceRight; + exports.reflect = reflect; + exports.reflectAll = reflectAll; + exports.reject = reject$2; + exports.rejectLimit = rejectLimit$1; + exports.rejectSeries = rejectSeries$1; + exports.retry = retry; + exports.retryable = retryable; + exports.seq = seq; + exports.series = series; + exports.setImmediate = setImmediate$1; + exports.some = some$1; + exports.someLimit = someLimit$1; + exports.someSeries = someSeries$1; + exports.sortBy = sortBy$1; + exports.timeout = timeout; + exports.times = times; + exports.timesLimit = timesLimit; + exports.timesSeries = timesSeries; + exports.transform = transform; + exports.tryEach = tryEach$1; + exports.unmemoize = unmemoize; + exports.until = until; + exports.waterfall = waterfall$1; + exports.whilst = whilst$1; + exports.all = every$1; + exports.allLimit = everyLimit$1; + exports.allSeries = everySeries$1; + exports.any = some$1; + exports.anyLimit = someLimit$1; + exports.anySeries = someSeries$1; + exports.find = detect$1; + exports.findLimit = detectLimit$1; + exports.findSeries = detectSeries$1; + exports.flatMap = concat$1; + exports.flatMapLimit = concatLimit$1; + exports.flatMapSeries = concatSeries$1; + exports.forEach = each; + exports.forEachSeries = eachSeries$1; + exports.forEachLimit = eachLimit$2; + exports.forEachOf = eachOf$1; + exports.forEachOfSeries = eachOfSeries$1; + exports.forEachOfLimit = eachOfLimit$2; + exports.inject = reduce$1; + exports.foldl = reduce$1; + exports.foldr = reduceRight; + exports.select = filter$1; + exports.selectLimit = filterLimit$1; + exports.selectSeries = filterSeries$1; + exports.wrapSync = asyncify; + exports.during = whilst$1; + exports.doDuring = doWhilst$1; + + Object.defineProperty(exports, '__esModule', { value: true }); + +}))); diff --git a/node_modules/async/dist/async.min.js b/node_modules/async/dist/async.min.js new file mode 100644 index 0000000..a12963b --- /dev/null +++ b/node_modules/async/dist/async.min.js @@ -0,0 +1 @@ +(function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t(e.async={})})(this,function(e){'use strict';function t(e,...t){return(...n)=>e(...t,...n)}function n(e){return function(...t){var n=t.pop();return e.call(this,t,n)}}function a(e){setTimeout(e,0)}function i(e){return(t,...n)=>e(()=>t(...n))}function r(e){return u(e)?function(...t){const n=t.pop(),a=e.apply(this,t);return s(a,n)}:n(function(t,n){var a;try{a=e.apply(this,t)}catch(t){return n(t)}return a&&"function"==typeof a.then?s(a,n):void n(null,a)})}function s(e,t){return e.then(e=>{l(t,null,e)},e=>{l(t,e&&e.message?e:new Error(e))})}function l(e,t,n){try{e(t,n)}catch(e){_e(t=>{throw t},e)}}function u(e){return"AsyncFunction"===e[Symbol.toStringTag]}function d(e){return"AsyncGenerator"===e[Symbol.toStringTag]}function p(e){return"function"==typeof e[Symbol.asyncIterator]}function c(e){if("function"!=typeof e)throw new Error("expected a function");return u(e)?r(e):e}function o(e,t=e.length){if(!t)throw new Error("arity is undefined");return function(...n){return"function"==typeof n[t-1]?e.apply(this,n):new Promise((a,i)=>{n[t-1]=(e,...t)=>e?i(e):void a(1{c(e).apply(i,n.concat(t))},a)});return a}}function f(e,t,n,a){t=t||[];var i=[],r=0,s=c(n);return e(t,(e,t,n)=>{var a=r++;s(e,(e,t)=>{i[a]=t,n(e)})},e=>{a(e,i)})}function y(e){return e&&"number"==typeof e.length&&0<=e.length&&0==e.length%1}function m(e){function t(...t){if(null!==e){var n=e;e=null,n.apply(this,t)}}return Object.assign(t,e),t}function g(e){return e[Symbol.iterator]&&e[Symbol.iterator]()}function k(e){var t=-1,n=e.length;return function(){return++t=t||d||l||(d=!0,e.next().then(({value:e,done:t})=>{if(!(u||l))return d=!1,t?(l=!0,void(0>=p&&a(null))):void(p++,n(e,c,r),c++,i())}).catch(s))}function r(e,t){return p-=1,u?void 0:e?s(e):!1===e?(l=!0,void(u=!0)):t===be||l&&0>=p?(l=!0,a(null)):void i()}function s(e){u||(d=!1,l=!0,a(e))}let l=!1,u=!1,d=!1,p=0,c=0;i()}function O(e,t,n){function a(e,t){!1===e&&(l=!0);!0===l||(e?n(e):(++r===s||t===be)&&n(null))}n=m(n);var i=0,r=0,{length:s}=e,l=!1;for(0===s&&n(null);i{t=e,n=a}),e}function A(e,t,n){function a(e,t){g.push(()=>l(e,t))}function i(){if(!h){if(0===g.length&&0===o)return n(null,p);for(;g.length&&oe()),i()}function l(e,t){if(!f){var a=L((t,...a)=>{if(o--,!1===t)return void(h=!0);if(2>a.length&&([a]=a),t){var i={};if(Object.keys(p).forEach(e=>{i[e]=p[e]}),i[e]=a,f=!0,y=Object.create(null),h)return;n(t,i)}else p[e]=a,s(e)});o++;var i=c(t[t.length-1]);1{const i=e[a];Array.isArray(i)&&0<=i.indexOf(t)&&n.push(a)}),n}"number"!=typeof t&&(n=t,t=null),n=m(n||b());var d=Object.keys(e).length;if(!d)return n(null);t||(t=d);var p={},o=0,h=!1,f=!1,y=Object.create(null),g=[],k=[],v={};return Object.keys(e).forEach(t=>{var n=e[t];if(!Array.isArray(n))return a(t,[n]),void k.push(t);var i=n.slice(0,n.length-1),s=i.length;return 0===s?(a(t,n),void k.push(t)):void(v[t]=s,i.forEach(l=>{if(!e[l])throw new Error("async.auto task `"+t+"` has a non-existent dependency `"+l+"` in "+i.join(", "));r(l,()=>{s--,0===s&&a(t,n)})}))}),function(){for(var e,t=0;k.length;)e=k.pop(),t++,u(e).forEach(e=>{0==--v[e]&&k.push(e)});if(t!==d)throw new Error("async.auto cannot execute tasks due to a recursive dependency")}(),i(),n[Ce]}function I(e){let t="",n=0,a=e.indexOf("*/");for(;ne.replace(Ne,"").trim())}function j(e,t){var n={};return Object.keys(e).forEach(t=>{function a(e,t){var n=i.map(t=>e[t]);n.push(t),c(r)(...n)}var i,r=e[t],s=u(r),l=!s&&1===r.length||s&&0===r.length;if(Array.isArray(r))i=[...r],r=i.pop(),n[t]=i.concat(0{r(e,n),t(...a)};f[e].push(n)}function r(e,t){return e?t?void(f[e]=f[e].filter(e=>e!==t)):f[e]=[]:Object.keys(f).forEach(e=>f[e]=[])}function s(e,...t){f[e].forEach(e=>e(...t))}function l(e,t,n,a){function i(e,...t){return e?n?s(e):r():1>=t.length?r(t[0]):void r(t)}if(null!=a&&"function"!=typeof a)throw new Error("task callback must be a function");k.started=!0;var r,s,l=k._createTaskItem(e,n?i:a||i);if(t?k._tasks.unshift(l):k._tasks.push(l),y||(y=!0,_e(()=>{y=!1,k.process()})),n||!a)return new Promise((e,t)=>{r=e,s=t})}function u(e){return function(t,...n){o-=1;for(var a=0,r=e.length;as("drain")),!0)}if(null==t)t=1;else if(0===t)throw new RangeError("Concurrency must not be zero");var p=c(e),o=0,h=[];const f={error:[],drain:[],saturated:[],unsaturated:[],empty:[]};var y=!1;const m=e=>t=>t?void(r(e),a(e,t)):new Promise((t,n)=>{i(e,(e,a)=>e?n(e):void t(a))});var g=!1,k={_tasks:new Ve,_createTaskItem(e,t){return{data:e,callback:t}},*[Symbol.iterator](){yield*k._tasks[Symbol.iterator]()},concurrency:t,payload:n,buffer:t/4,started:!1,paused:!1,push(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!1,!1,t)):l(e,!1,!1,t)},pushAsync(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!1,!0,t)):l(e,!1,!0,t)},kill(){r(),k._tasks.empty()},unshift(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!0,!1,t)):l(e,!0,!1,t)},unshiftAsync(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!0,!0,t)):l(e,!0,!0,t)},remove(e){k._tasks.remove(e)},process(){var e=Math.min;if(!g){for(g=!0;!k.paused&&o{t.apply(n,e.concat((e,...t)=>{a(e,t)}))},(e,t)=>a(e,...t)),a[Ce]}}function P(...e){return C(...e.reverse())}function R(...e){return function(...t){var n=t.pop();return n(null,...e)}}function z(e,t){return(n,a,i,r)=>{var s,l=!1;const u=c(i);n(a,(n,a,i)=>{u(n,(a,r)=>a||!1===a?i(a):e(r)&&!s?(l=!0,s=t(!0,n),i(null,be)):void i())},e=>e?r(e):void r(null,l?s:t(!1)))}}function N(e){return(t,...n)=>c(t)(...n,(t,...n)=>{"object"==typeof console&&(t?console.error&&console.error(t):console[e]&&n.forEach(t=>console[e](t)))})}function V(e,t,n){const a=c(t);return Xe(e,(...e)=>{const t=e.pop();a(...e,(e,n)=>t(e,!n))},n)}function Y(e){return(t,n,a)=>e(t,a)}function q(e){return u(e)?e:function(...t){var n=t.pop(),a=!0;t.push((...e)=>{a?_e(()=>n(...e)):n(...e)}),e.apply(this,t),a=!1}}function D(e,t,n,a){var r=Array(t.length);e(t,(e,t,a)=>{n(e,(e,n)=>{r[t]=!!n,a(e)})},e=>{if(e)return a(e);for(var n=[],s=0;s{n(e,(n,r)=>n?a(n):void(r&&i.push({index:t,value:e}),a(n)))},e=>e?a(e):void a(null,i.sort((e,t)=>e.index-t.index).map(e=>e.value)))}function U(e,t,n,a){var i=y(t)?D:Q;return i(e,t,c(n),a)}function G(e,t,n){return ut(e,1/0,t,n)}function W(e,t,n){return ut(e,1,t,n)}function H(e,t,n){return pt(e,1/0,t,n)}function J(e,t,n){return pt(e,1,t,n)}function K(e,t=e=>e){var a=Object.create(null),r=Object.create(null),s=c(e),l=n((e,n)=>{var u=t(...e);u in a?_e(()=>n(null,...a[u])):u in r?r[u].push(n):(r[u]=[n],s(...e,(e,...t)=>{e||(a[u]=t);var n=r[u];delete r[u];for(var s=0,d=n.length;s{n(e[0],t)},t,1)}function ee(e){return(e<<1)+1}function te(e){return(e+1>>1)-1}function ne(e,t){return e.priority===t.priority?e.pushCount({data:e,priority:t})):{data:e,priority:t}}var a=$(e,t),{push:i,pushAsync:r}=a;return a._tasks=new ht,a._createTaskItem=({data:e,priority:t},n)=>({data:e,priority:t,callback:n}),a.push=function(e,t=0,a){return i(n(e,t),a)},a.pushAsync=function(e,t=0,a){return r(n(e,t),a)},delete a.unshift,delete a.unshiftAsync,a}function ie(e,t,n,a){var i=[...e].reverse();return qe(i,t,n,a)}function re(e){var t=c(e);return n(function(e,n){return e.push((e,...t)=>{let a={};if(e&&(a.error=e),0=t.length&&([i]=t),a.value=i}n(null,a)}),t.apply(this,e)})}function se(e){var t;return Array.isArray(e)?t=e.map(re):(t={},Object.keys(e).forEach(n=>{t[n]=re.call(this,e[n])})),t}function le(e,t,n,a){const i=c(n);return U(e,t,(e,t)=>{i(e,(e,n)=>{t(e,!n)})},a)}function ue(e){return function(){return e}}function de(e,t,n){function a(){r((e,...t)=>{!1===e||(e&&s++arguments.length&&"function"==typeof e?(n=t||b(),t=e):(pe(i,e),n=n||b()),"function"!=typeof t)throw new Error("Invalid arguments for async.retry");var r=c(t),s=1;return a(),n[Ce]}function pe(e,n){if("object"==typeof n)e.times=+n.times||kt,e.intervalFunc="function"==typeof n.interval?n.interval:ue(+n.interval||vt),e.errorFilter=n.errorFilter;else if("number"==typeof n||"string"==typeof n)e.times=+n||kt;else throw new Error("Invalid arguments for async.retry")}function ce(e,t){t||(t=e,e=null);let a=e&&e.arity||t.length;u(t)&&(a+=1);var i=c(t);return n((t,n)=>{function r(e){i(...t,e)}return(t.length{var s,l=!1;n.push((...e)=>{l||(r(...e),clearTimeout(s))}),s=setTimeout(function(){var t=e.name||"anonymous",n=new Error("Callback function \""+t+"\" timed out.");n.code="ETIMEDOUT",a&&(n.info=a),l=!0,r(n)},t),i(...n)})}function fe(e){for(var t=Array(e);e--;)t[e]=e;return t}function ye(e,t,n,a){var i=c(n);return De(fe(e),t,i,a)}function me(e,t,n){return ye(e,1/0,t,n)}function ge(e,t,n){return ye(e,1,t,n)}function ke(e,t,n,a){3>=arguments.length&&"function"==typeof t&&(a=n,n=t,t=Array.isArray(e)?[]:{}),a=m(a||b());var i=c(n);return Me(e,(e,n,a)=>{i(t,e,n,a)},e=>a(e,t)),a[Ce]}function ve(e){return(...t)=>(e.unmemoized||e)(...t)}function Se(e,t,n){const a=c(e);return _t(e=>a((t,n)=>e(t,!n)),t,n)}var xe,Le="function"==typeof queueMicrotask&&queueMicrotask,Ee="function"==typeof setImmediate&&setImmediate,Oe="object"==typeof process&&"function"==typeof process.nextTick;xe=Le?queueMicrotask:Ee?setImmediate:Oe?process.nextTick:a;var _e=i(xe);const be={};var Ae=e=>(t,n,a)=>{function i(e,t){if(!u)if(c-=1,e)l=!0,a(e);else if(!1===e)l=!0,u=!0;else{if(t===be||l&&0>=c)return l=!0,a(null);o||r()}}function r(){for(o=!0;c=c&&a(null));c+=1,n(t.value,t.key,L(i))}o=!1}if(a=m(a),0>=e)throw new RangeError("concurrency limit cannot be less than 1");if(!t)return a(null);if(d(t))return E(t,e,n,a);if(p(t))return E(t[Symbol.asyncIterator](),e,n,a);var s=x(t),l=!1,u=!1,c=0,o=!1;r()},Ie=o(function(e,t,n,a){return Ae(t)(e,c(n),a)},4),Me=o(function(e,t,n){var a=y(e)?O:_;return a(e,c(t),n)},3),je=o(function(e,t,n){return f(Me,e,t,n)},3),we=h(je),Be=o(function(e,t,n){return Ie(e,1,t,n)},3),Te=o(function(e,t,n){return f(Be,e,t,n)},3),Fe=h(Te);const Ce=Symbol("promiseCallback");var Pe=/^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/,Re=/^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/,ze=/,/,Ne=/(=.+)?(\s*)$/;class Ve{constructor(){this.head=this.tail=null,this.length=0}removeLink(e){return e.prev?e.prev.next=e.next:this.head=e.next,e.next?e.next.prev=e.prev:this.tail=e.prev,e.prev=e.next=null,this.length-=1,e}empty(){for(;this.head;)this.shift();return this}insertAfter(e,t){t.prev=e,t.next=e.next,e.next?e.next.prev=t:this.tail=t,e.next=t,this.length+=1}insertBefore(e,t){t.prev=e.prev,t.next=e,e.prev?e.prev.next=t:this.head=t,e.prev=t,this.length+=1}unshift(e){this.head?this.insertBefore(this.head,e):w(this,e)}push(e){this.tail?this.insertAfter(this.tail,e):w(this,e)}shift(){return this.head&&this.removeLink(this.head)}pop(){return this.tail&&this.removeLink(this.tail)}toArray(){return[...this]}*[Symbol.iterator](){for(var e=this.head;e;)yield e.data,e=e.next}remove(e){for(var t=this.head;t;){var{next:n}=t;e(t)&&this.removeLink(t),t=n}return this}}var Ye,qe=o(function(e,t,n,a){a=m(a);var r=c(n);return Be(e,(e,n,a)=>{r(t,e,(e,n)=>{t=n,a(e)})},e=>a(e,t))},4),De=o(function(e,t,n,a){return f(Ae(t),e,n,a)},4),Qe=o(function(e,t,n,a){var i=c(n);return De(e,t,(e,t)=>{i(e,(e,...n)=>e?t(e):t(e,n))},(e,t)=>{for(var n=[],r=0;re,(e,t)=>t)(Me,e,t,n)},3),He=o(function(e,t,n,a){return z(e=>e,(e,t)=>t)(Ae(t),e,n,a)},4),Je=o(function(e,t,n){return z(e=>e,(e,t)=>t)(Ae(1),e,t,n)},3),Ke=N("dir"),Xe=o(function(e,t,n){function a(e,...t){return e?n(e):void(!1===e||(r=t,l(...t,i)))}function i(e,t){return e?n(e):!1===e?void 0:t?void s(a):n(null,...r)}n=L(n);var r,s=c(e),l=c(t);return i(null,!0)},3),Ze=o(function(e,t,n){return Me(e,Y(c(t)),n)},3),$e=o(function(e,t,n,a){return Ae(t)(e,Y(c(n)),a)},4),et=o(function(e,t,n){return $e(e,1,t,n)},3),tt=o(function(e,t,n){return z(e=>!e,e=>!e)(Me,e,t,n)},3),nt=o(function(e,t,n,a){return z(e=>!e,e=>!e)(Ae(t),e,n,a)},4),at=o(function(e,t,n){return z(e=>!e,e=>!e)(Be,e,t,n)},3),it=o(function(e,t,n){return U(Me,e,t,n)},3),rt=o(function(e,t,n,a){return U(Ae(t),e,n,a)},4),st=o(function(e,t,n){return U(Be,e,t,n)},3),lt=o(function(e,t){function n(e){return e?a(e):void(!1===e||i(n))}var a=L(t),i=c(q(e));return n()},2),ut=o(function(e,t,n,a){var i=c(n);return De(e,t,(e,t)=>{i(e,(n,a)=>n?t(n):t(n,{key:a,val:e}))},(e,t)=>{for(var n={},{hasOwnProperty:r}=Object.prototype,s=0;s{r(e,t,(e,a)=>e?n(e):void(i[t]=a,n(e)))},e=>a(e,i))},4);Ye=Oe?process.nextTick:Ee?setImmediate:a;var ct=i(Ye),ot=o((e,t,n)=>{var a=y(t)?[]:{};e(t,(e,t,n)=>{c(e)((e,...i)=>{2>i.length&&([i]=i),a[t]=i,n(e)})},e=>n(e,a))},3);class ht{constructor(){this.heap=[],this.pushCount=Number.MIN_SAFE_INTEGER}get length(){return this.heap.length}empty(){return this.heap=[],this}percUp(e){for(let n;0e)(Me,e,t,n)},3),xt=o(function(e,t,n,a){return z(Boolean,e=>e)(Ae(t),e,n,a)},4),Lt=o(function(e,t,n){return z(Boolean,e=>e)(Be,e,t,n)},3),Et=o(function(e,t,n){function a(e,t){var n=e.criteria,a=t.criteria;return na?1:0}var i=c(t);return je(e,(e,t)=>{i(e,(n,a)=>n?t(n):void t(n,{value:e,criteria:a}))},(e,t)=>e?n(e):void n(null,t.sort(a).map(e=>e.value)))},3),Ot=o(function(e,t){var n,a=null;return et(e,(e,t)=>{c(e)((e,...i)=>!1===e?t(e):void(2>i.length?[n]=i:n=i,a=e,t(e?null:{})))},()=>t(a,n))}),_t=o(function(e,t,n){function a(e,...t){if(e)return n(e);l=t;!1===e||s(i)}function i(e,t){return e?n(e):!1===e?void 0:t?void r(a):n(null,...l)}n=L(n);var r=c(t),s=c(e),l=[];return s(i)},3),bt=o(function(e,t){function n(t){var n=c(e[i++]);n(...t,L(a))}function a(a,...r){return!1===a?void 0:a||i===e.length?t(a,...r):void n(r)}if(t=m(t),!Array.isArray(e))return t(new Error("First argument to waterfall must be an array of functions"));if(!e.length)return t();var i=0;n([])});e.default={apply:t,applyEach:we,applyEachSeries:Fe,asyncify:r,auto:A,autoInject:j,cargo:T,cargoQueue:F,compose:P,concat:Ue,concatLimit:Qe,concatSeries:Ge,constant:R,detect:We,detectLimit:He,detectSeries:Je,dir:Ke,doUntil:V,doWhilst:Xe,each:Ze,eachLimit:$e,eachOf:Me,eachOfLimit:Ie,eachOfSeries:Be,eachSeries:et,ensureAsync:q,every:tt,everyLimit:nt,everySeries:at,filter:it,filterLimit:rt,filterSeries:st,forever:lt,groupBy:G,groupByLimit:ut,groupBySeries:W,log:dt,map:je,mapLimit:De,mapSeries:Te,mapValues:H,mapValuesLimit:pt,mapValuesSeries:J,memoize:K,nextTick:ct,parallel:X,parallelLimit:Z,priorityQueue:ae,queue:$,race:ft,reduce:qe,reduceRight:ie,reflect:re,reflectAll:se,reject:yt,rejectLimit:mt,rejectSeries:gt,retry:de,retryable:ce,seq:C,series:oe,setImmediate:_e,some:St,someLimit:xt,someSeries:Lt,sortBy:Et,timeout:he,times:me,timesLimit:ye,timesSeries:ge,transform:ke,tryEach:Ot,unmemoize:ve,until:Se,waterfall:bt,whilst:_t,all:tt,allLimit:nt,allSeries:at,any:St,anyLimit:xt,anySeries:Lt,find:We,findLimit:He,findSeries:Je,flatMap:Ue,flatMapLimit:Qe,flatMapSeries:Ge,forEach:Ze,forEachSeries:et,forEachLimit:$e,forEachOf:Me,forEachOfSeries:Be,forEachOfLimit:Ie,inject:qe,foldl:qe,foldr:ie,select:it,selectLimit:rt,selectSeries:st,wrapSync:r,during:_t,doDuring:Xe},e.apply=t,e.applyEach=we,e.applyEachSeries=Fe,e.asyncify=r,e.auto=A,e.autoInject=j,e.cargo=T,e.cargoQueue=F,e.compose=P,e.concat=Ue,e.concatLimit=Qe,e.concatSeries=Ge,e.constant=R,e.detect=We,e.detectLimit=He,e.detectSeries=Je,e.dir=Ke,e.doUntil=V,e.doWhilst=Xe,e.each=Ze,e.eachLimit=$e,e.eachOf=Me,e.eachOfLimit=Ie,e.eachOfSeries=Be,e.eachSeries=et,e.ensureAsync=q,e.every=tt,e.everyLimit=nt,e.everySeries=at,e.filter=it,e.filterLimit=rt,e.filterSeries=st,e.forever=lt,e.groupBy=G,e.groupByLimit=ut,e.groupBySeries=W,e.log=dt,e.map=je,e.mapLimit=De,e.mapSeries=Te,e.mapValues=H,e.mapValuesLimit=pt,e.mapValuesSeries=J,e.memoize=K,e.nextTick=ct,e.parallel=X,e.parallelLimit=Z,e.priorityQueue=ae,e.queue=$,e.race=ft,e.reduce=qe,e.reduceRight=ie,e.reflect=re,e.reflectAll=se,e.reject=yt,e.rejectLimit=mt,e.rejectSeries=gt,e.retry=de,e.retryable=ce,e.seq=C,e.series=oe,e.setImmediate=_e,e.some=St,e.someLimit=xt,e.someSeries=Lt,e.sortBy=Et,e.timeout=he,e.times=me,e.timesLimit=ye,e.timesSeries=ge,e.transform=ke,e.tryEach=Ot,e.unmemoize=ve,e.until=Se,e.waterfall=bt,e.whilst=_t,e.all=tt,e.allLimit=nt,e.allSeries=at,e.any=St,e.anyLimit=xt,e.anySeries=Lt,e.find=We,e.findLimit=He,e.findSeries=Je,e.flatMap=Ue,e.flatMapLimit=Qe,e.flatMapSeries=Ge,e.forEach=Ze,e.forEachSeries=et,e.forEachLimit=$e,e.forEachOf=Me,e.forEachOfSeries=Be,e.forEachOfLimit=Ie,e.inject=qe,e.foldl=qe,e.foldr=ie,e.select=it,e.selectLimit=rt,e.selectSeries=st,e.wrapSync=r,e.during=_t,e.doDuring=Xe,Object.defineProperty(e,"__esModule",{value:!0})}); \ No newline at end of file diff --git a/node_modules/async/dist/async.mjs b/node_modules/async/dist/async.mjs new file mode 100644 index 0000000..d0cd59d --- /dev/null +++ b/node_modules/async/dist/async.mjs @@ -0,0 +1,5947 @@ +/** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ +function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); +} + +function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; +} + +/* istanbul ignore file */ + +var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; +var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; +var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + +function fallback(fn) { + setTimeout(fn, 0); +} + +function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); +} + +var _defer; + +if (hasQueueMicrotask) { + _defer = queueMicrotask; +} else if (hasSetImmediate) { + _defer = setImmediate; +} else if (hasNextTick) { + _defer = process.nextTick; +} else { + _defer = fallback; +} + +var setImmediate$1 = wrap(_defer); + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } + + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } +} + +function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; +} + +function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; +} + +function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; +} + +function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; +} + +// conditionally promisify a function. +// only return a promise if a callback is omitted +function awaitify (asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } + + return awaitable +} + +function applyEach (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; +} + +function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); +} + +function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; +} + +// A temporary value used to identify if the loop should be broken. +// See #1064, #1293 +const breakLoop = {}; + +function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper +} + +function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); +} + +function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } +} + +function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } +} + +function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; +} + +function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } + + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); +} + +function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; +} + +// for async generators +function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) + + if (err === false) { + done = true; + canceled = true; + return + } + + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } + + replenish(); +} + +var eachOfLimit = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } + + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; +}; + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); +} + +var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$2(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); +} + +var eachOf$1 = awaitify(eachOf, 3); + +/** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) +} +var map$1 = awaitify(map, 3); + +/** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ +var applyEach$1 = applyEach(map$1); + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$2(coll, 1, iteratee, callback) +} +var eachOfSeries$1 = awaitify(eachOfSeries, 3); + +/** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) +} +var mapSeries$1 = awaitify(mapSeries, 3); + +/** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ +var applyEachSeries = applyEach(mapSeries$1); + +const PROMISE_SYMBOL = Symbol('promiseCallback'); + +function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); + + return callback +} + +/** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] +} + +var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; +var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; +var FN_ARG_SPLIT = /,/; +var FN_ARG = /(=.+)?(\s*)$/; + +function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; +} + +function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); +} + +/** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ +function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + + return auto(newTasks, callback); +} + +// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation +// used for queues. This implementation assumes that the node provided by the user can be modified +// to adjust the next and last properties. We implement only the minimal functionality +// for queue support. +class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty () { + while(this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this] + } + + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } +} + +function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; +} + +function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on (event, handler) { + events[event].push(handler); + } + + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + + }; + + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; +} + +/** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +var reduce$1 = awaitify(reduce, 4); + +/** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ +function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; +} + +/** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ +function compose(...args) { + return seq(...args.reverse()); +} + +/** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) +} +var mapLimit$1 = awaitify(mapLimit, 4); + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +var concatLimit$1 = awaitify(concatLimit, 4); + +/** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) +} +var concat$1 = awaitify(concat, 3); + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) +} +var concatSeries$1 = awaitify(concatSeries, 3); + +/** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ +function constant(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; +} + +function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; +} + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) +} +var detect$1 = awaitify(detect, 3); + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) +} +var detectLimit$1 = awaitify(detectLimit, 4); + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) +} + +var detectSeries$1 = awaitify(detectSeries, 3); + +function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) +} + +/** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ +var dir = consoleFunc('dir'); + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +var doWhilst$1 = awaitify(doWhilst, 3); + +/** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); +} + +function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); +} + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); +} + +var each = awaitify(eachLimit, 3); + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); +} +var eachLimit$2 = awaitify(eachLimit$1, 4); + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return eachLimit$2(coll, 1, iteratee, callback) +} +var eachSeries$1 = awaitify(eachSeries, 3); + +/** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ +function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; +} + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) +} +var every$1 = awaitify(every, 3); + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) +} +var everyLimit$1 = awaitify(everyLimit, 4); + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) +} +var everySeries$1 = awaitify(everySeries, 3); + +function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); +} + +function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); +} + +function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); +} + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) +} +var filter$1 = awaitify(filter, 3); + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit(limit), coll, iteratee, callback) +} +var filterLimit$1 = awaitify(filterLimit, 4); + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) +} +var filterSeries$1 = awaitify(filterSeries, 3); + +/** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ +function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); +} +var forever$1 = awaitify(forever, 2); + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); +} + +var groupByLimit$1 = awaitify(groupByLimit, 4); + +/** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) +} + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) +} + +/** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ +var log = consoleFunc('log'); + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); +} + +var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + +/** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) +} + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) +} + +/** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ +function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; +} + +/* istanbul ignore file */ + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +var _defer$1; + +if (hasNextTick) { + _defer$1 = process.nextTick; +} else if (hasSetImmediate) { + _defer$1 = setImmediate; +} else { + _defer$1 = fallback; +} + +var nextTick = wrap(_defer$1); + +var parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); +}, 3); + +/** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function parallel$1(tasks, callback) { + return parallel(eachOf$1, tasks, callback); +} + +/** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ +function parallelLimit(tasks, limit, callback) { + return parallel(eachOfLimit(limit), tasks, callback); +} + +/** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + +/** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ +function queue$1 (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); +} + +// Binary min-heap implementation used for priority queue. +// Implementation is stable, i.e. push time is considered for equal priorities +class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty () { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } +} + +function leftChi(i) { + return (i<<1)+1; +} + +function parent(i) { + return ((i+1)>>1)-1; +} + +function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } +} + +/** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ +function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue$1(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; +} + +/** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ +function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } +} + +var race$1 = awaitify(race, 2); + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); +} + +/** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ +function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); +} + +/** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ +function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; +} + +function reject(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); +} + +/** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function reject$1 (coll, iteratee, callback) { + return reject(eachOf$1, coll, iteratee, callback) +} +var reject$2 = awaitify(reject$1, 3); + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectLimit (coll, limit, iteratee, callback) { + return reject(eachOfLimit(limit), coll, iteratee, callback) +} +var rejectLimit$1 = awaitify(rejectLimit, 4); + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectSeries (coll, iteratee, callback) { + return reject(eachOfSeries$1, coll, iteratee, callback) +} +var rejectSeries$1 = awaitify(rejectSeries, 3); + +function constant$1(value) { + return function () { + return value; + } +} + +/** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ +const DEFAULT_TIMES = 5; +const DEFAULT_INTERVAL = 0; + +function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant$1(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] +} + +function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant$1(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } +} + +/** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ +function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] + }); +} + +/** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function series(tasks, callback) { + return parallel(eachOfSeries$1, tasks, callback); +} + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) +} +var some$1 = awaitify(some, 3); + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) +} +var someLimit$1 = awaitify(someLimit, 4); + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) +} +var someSeries$1 = awaitify(someSeries, 3); + +/** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } +} +var sortBy$1 = awaitify(sortBy, 3); + +/** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ +function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); +} + +function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; +} + +/** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); +} + +/** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ +function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) +} + +/** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) +} + +/** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] +} + +/** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ +function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); +} + +var tryEach$1 = awaitify(tryEach); + +/** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ +function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; +} + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +var whilst$1 = awaitify(whilst, 3); + +/** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ +function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); +} + +/** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ +function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } + + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); +} + +var waterfall$1 = awaitify(waterfall); + +/** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + +var index = { + apply, + applyEach: applyEach$1, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo, + cargoQueue: cargo$1, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$2, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$2, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel: parallel$1, + parallelLimit, + priorityQueue, + queue: queue$1, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$2, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$2, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$2, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 +}; + +export default index; +export { apply, applyEach$1 as applyEach, applyEachSeries, asyncify, auto, autoInject, cargo, cargo$1 as cargoQueue, compose, concat$1 as concat, concatLimit$1 as concatLimit, concatSeries$1 as concatSeries, constant, detect$1 as detect, detectLimit$1 as detectLimit, detectSeries$1 as detectSeries, dir, doUntil, doWhilst$1 as doWhilst, each, eachLimit$2 as eachLimit, eachOf$1 as eachOf, eachOfLimit$2 as eachOfLimit, eachOfSeries$1 as eachOfSeries, eachSeries$1 as eachSeries, ensureAsync, every$1 as every, everyLimit$1 as everyLimit, everySeries$1 as everySeries, filter$1 as filter, filterLimit$1 as filterLimit, filterSeries$1 as filterSeries, forever$1 as forever, groupBy, groupByLimit$1 as groupByLimit, groupBySeries, log, map$1 as map, mapLimit$1 as mapLimit, mapSeries$1 as mapSeries, mapValues, mapValuesLimit$1 as mapValuesLimit, mapValuesSeries, memoize, nextTick, parallel$1 as parallel, parallelLimit, priorityQueue, queue$1 as queue, race$1 as race, reduce$1 as reduce, reduceRight, reflect, reflectAll, reject$2 as reject, rejectLimit$1 as rejectLimit, rejectSeries$1 as rejectSeries, retry, retryable, seq, series, setImmediate$1 as setImmediate, some$1 as some, someLimit$1 as someLimit, someSeries$1 as someSeries, sortBy$1 as sortBy, timeout, times, timesLimit, timesSeries, transform, tryEach$1 as tryEach, unmemoize, until, waterfall$1 as waterfall, whilst$1 as whilst, every$1 as all, everyLimit$1 as allLimit, everySeries$1 as allSeries, some$1 as any, someLimit$1 as anyLimit, someSeries$1 as anySeries, detect$1 as find, detectLimit$1 as findLimit, detectSeries$1 as findSeries, concat$1 as flatMap, concatLimit$1 as flatMapLimit, concatSeries$1 as flatMapSeries, each as forEach, eachSeries$1 as forEachSeries, eachLimit$2 as forEachLimit, eachOf$1 as forEachOf, eachOfSeries$1 as forEachOfSeries, eachOfLimit$2 as forEachOfLimit, reduce$1 as inject, reduce$1 as foldl, reduceRight as foldr, filter$1 as select, filterLimit$1 as selectLimit, filterSeries$1 as selectSeries, asyncify as wrapSync, whilst$1 as during, doWhilst$1 as doDuring }; diff --git a/node_modules/async/doDuring.js b/node_modules/async/doDuring.js new file mode 100644 index 0000000..4c98e9e --- /dev/null +++ b/node_modules/async/doDuring.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +exports.default = (0, _awaitify2.default)(doWhilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/doUntil.js b/node_modules/async/doUntil.js new file mode 100644 index 0000000..8aa0935 --- /dev/null +++ b/node_modules/async/doUntil.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = doUntil; + +var _doWhilst = require('./doWhilst.js'); + +var _doWhilst2 = _interopRequireDefault(_doWhilst); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doUntil(iteratee, test, callback) { + const _test = (0, _wrapAsync2.default)(test); + return (0, _doWhilst2.default)(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb(err, !truth)); + }, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/doWhilst.js b/node_modules/async/doWhilst.js new file mode 100644 index 0000000..4c98e9e --- /dev/null +++ b/node_modules/async/doWhilst.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +exports.default = (0, _awaitify2.default)(doWhilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/during.js b/node_modules/async/during.js new file mode 100644 index 0000000..32a4776 --- /dev/null +++ b/node_modules/async/during.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +exports.default = (0, _awaitify2.default)(whilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/each.js b/node_modules/async/each.js new file mode 100644 index 0000000..405d495 --- /dev/null +++ b/node_modules/async/each.js @@ -0,0 +1,129 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} + +exports.default = (0, _awaitify2.default)(eachLimit, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachLimit.js b/node_modules/async/eachLimit.js new file mode 100644 index 0000000..5f3d009 --- /dev/null +++ b/node_modules/async/eachLimit.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} +exports.default = (0, _awaitify2.default)(eachLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOf.js b/node_modules/async/eachOf.js new file mode 100644 index 0000000..c22614f --- /dev/null +++ b/node_modules/async/eachOf.js @@ -0,0 +1,185 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./internal/isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _breakLoop = require('./internal/breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = (0, _once2.default)(callback); + var index = 0, + completed = 0, + { length } = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return; + if (err) { + callback(err); + } else if (++completed === length || value === _breakLoop2.default) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOf, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOfLimit.js b/node_modules/async/eachOfLimit.js new file mode 100644 index 0000000..e9fc4db --- /dev/null +++ b/node_modules/async/eachOfLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit2 = require('./internal/eachOfLimit.js'); + +var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOfLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOfSeries.js b/node_modules/async/eachOfSeries.js new file mode 100644 index 0000000..cfb0f33 --- /dev/null +++ b/node_modules/async/eachOfSeries.js @@ -0,0 +1,39 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachOfSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachSeries.js b/node_modules/async/eachSeries.js new file mode 100644 index 0000000..d674d0c --- /dev/null +++ b/node_modules/async/eachSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachLimit = require('./eachLimit.js'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return (0, _eachLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/ensureAsync.js b/node_modules/async/ensureAsync.js new file mode 100644 index 0000000..ad8beb5 --- /dev/null +++ b/node_modules/async/ensureAsync.js @@ -0,0 +1,67 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = ensureAsync; + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ +function ensureAsync(fn) { + if ((0, _wrapAsync.isAsync)(fn)) return fn; + return function (...args /*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + (0, _setImmediate2.default)(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/every.js b/node_modules/async/every.js new file mode 100644 index 0000000..148db68 --- /dev/null +++ b/node_modules/async/every.js @@ -0,0 +1,119 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(every, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/everyLimit.js b/node_modules/async/everyLimit.js new file mode 100644 index 0000000..25b2c08 --- /dev/null +++ b/node_modules/async/everyLimit.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everyLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/everySeries.js b/node_modules/async/everySeries.js new file mode 100644 index 0000000..147c3dc --- /dev/null +++ b/node_modules/async/everySeries.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everySeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filter.js b/node_modules/async/filter.js new file mode 100644 index 0000000..303dc1f --- /dev/null +++ b/node_modules/async/filter.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filter, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filterLimit.js b/node_modules/async/filterLimit.js new file mode 100644 index 0000000..89e55f5 --- /dev/null +++ b/node_modules/async/filterLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit(coll, limit, iteratee, callback) { + return (0, _filter3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filterSeries.js b/node_modules/async/filterSeries.js new file mode 100644 index 0000000..a045e52 --- /dev/null +++ b/node_modules/async/filterSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/find.js b/node_modules/async/find.js new file mode 100644 index 0000000..05b2e5c --- /dev/null +++ b/node_modules/async/find.js @@ -0,0 +1,96 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detect, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/findLimit.js b/node_modules/async/findLimit.js new file mode 100644 index 0000000..db6961e --- /dev/null +++ b/node_modules/async/findLimit.js @@ -0,0 +1,48 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detectLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/findSeries.js b/node_modules/async/findSeries.js new file mode 100644 index 0000000..b9131b4 --- /dev/null +++ b/node_modules/async/findSeries.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(1), coll, iteratee, callback); +} + +exports.default = (0, _awaitify2.default)(detectSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/flatMap.js b/node_modules/async/flatMap.js new file mode 100644 index 0000000..8eed1ac --- /dev/null +++ b/node_modules/async/flatMap.js @@ -0,0 +1,115 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, Infinity, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concat, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/flatMapLimit.js b/node_modules/async/flatMapLimit.js new file mode 100644 index 0000000..3d170f1 --- /dev/null +++ b/node_modules/async/flatMapLimit.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +exports.default = (0, _awaitify2.default)(concatLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/flatMapSeries.js b/node_modules/async/flatMapSeries.js new file mode 100644 index 0000000..84add3b --- /dev/null +++ b/node_modules/async/flatMapSeries.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concatSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/foldl.js b/node_modules/async/foldl.js new file mode 100644 index 0000000..56e2db8 --- /dev/null +++ b/node_modules/async/foldl.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/foldr.js b/node_modules/async/foldr.js new file mode 100644 index 0000000..bee5391 --- /dev/null +++ b/node_modules/async/foldr.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reduceRight; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight(array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return (0, _reduce2.default)(reversed, memo, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEach.js b/node_modules/async/forEach.js new file mode 100644 index 0000000..405d495 --- /dev/null +++ b/node_modules/async/forEach.js @@ -0,0 +1,129 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} + +exports.default = (0, _awaitify2.default)(eachLimit, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachLimit.js b/node_modules/async/forEachLimit.js new file mode 100644 index 0000000..5f3d009 --- /dev/null +++ b/node_modules/async/forEachLimit.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} +exports.default = (0, _awaitify2.default)(eachLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOf.js b/node_modules/async/forEachOf.js new file mode 100644 index 0000000..c22614f --- /dev/null +++ b/node_modules/async/forEachOf.js @@ -0,0 +1,185 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./internal/isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _breakLoop = require('./internal/breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = (0, _once2.default)(callback); + var index = 0, + completed = 0, + { length } = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return; + if (err) { + callback(err); + } else if (++completed === length || value === _breakLoop2.default) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOf, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOfLimit.js b/node_modules/async/forEachOfLimit.js new file mode 100644 index 0000000..e9fc4db --- /dev/null +++ b/node_modules/async/forEachOfLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit2 = require('./internal/eachOfLimit.js'); + +var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOfLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOfSeries.js b/node_modules/async/forEachOfSeries.js new file mode 100644 index 0000000..cfb0f33 --- /dev/null +++ b/node_modules/async/forEachOfSeries.js @@ -0,0 +1,39 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachOfSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachSeries.js b/node_modules/async/forEachSeries.js new file mode 100644 index 0000000..d674d0c --- /dev/null +++ b/node_modules/async/forEachSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachLimit = require('./eachLimit.js'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return (0, _eachLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forever.js b/node_modules/async/forever.js new file mode 100644 index 0000000..2c8d5b8 --- /dev/null +++ b/node_modules/async/forever.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _ensureAsync = require('./ensureAsync.js'); + +var _ensureAsync2 = _interopRequireDefault(_ensureAsync); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ +function forever(fn, errback) { + var done = (0, _onlyOnce2.default)(errback); + var task = (0, _wrapAsync2.default)((0, _ensureAsync2.default)(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); +} +exports.default = (0, _awaitify2.default)(forever, 2); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupBy.js b/node_modules/async/groupBy.js new file mode 100644 index 0000000..6bb52aa --- /dev/null +++ b/node_modules/async/groupBy.js @@ -0,0 +1,108 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = groupBy; + +var _groupByLimit = require('./groupByLimit.js'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function groupBy(coll, iteratee, callback) { + return (0, _groupByLimit2.default)(coll, Infinity, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupByLimit.js b/node_modules/async/groupByLimit.js new file mode 100644 index 0000000..5766d6e --- /dev/null +++ b/node_modules/async/groupByLimit.js @@ -0,0 +1,71 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, { key, val }); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var { hasOwnProperty } = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var { key } = mapResults[i]; + var { val } = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); +} + +exports.default = (0, _awaitify2.default)(groupByLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupBySeries.js b/node_modules/async/groupBySeries.js new file mode 100644 index 0000000..6056743 --- /dev/null +++ b/node_modules/async/groupBySeries.js @@ -0,0 +1,36 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = groupBySeries; + +var _groupByLimit = require('./groupByLimit.js'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupBySeries(coll, iteratee, callback) { + return (0, _groupByLimit2.default)(coll, 1, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/index.js b/node_modules/async/index.js new file mode 100644 index 0000000..ce647d5 --- /dev/null +++ b/node_modules/async/index.js @@ -0,0 +1,588 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.doDuring = exports.during = exports.wrapSync = undefined; +exports.selectSeries = exports.selectLimit = exports.select = exports.foldr = exports.foldl = exports.inject = exports.forEachOfLimit = exports.forEachOfSeries = exports.forEachOf = exports.forEachLimit = exports.forEachSeries = exports.forEach = exports.flatMapSeries = exports.flatMapLimit = exports.flatMap = exports.findSeries = exports.findLimit = exports.find = exports.anySeries = exports.anyLimit = exports.any = exports.allSeries = exports.allLimit = exports.all = exports.whilst = exports.waterfall = exports.until = exports.unmemoize = exports.tryEach = exports.transform = exports.timesSeries = exports.timesLimit = exports.times = exports.timeout = exports.sortBy = exports.someSeries = exports.someLimit = exports.some = exports.setImmediate = exports.series = exports.seq = exports.retryable = exports.retry = exports.rejectSeries = exports.rejectLimit = exports.reject = exports.reflectAll = exports.reflect = exports.reduceRight = exports.reduce = exports.race = exports.queue = exports.priorityQueue = exports.parallelLimit = exports.parallel = exports.nextTick = exports.memoize = exports.mapValuesSeries = exports.mapValuesLimit = exports.mapValues = exports.mapSeries = exports.mapLimit = exports.map = exports.log = exports.groupBySeries = exports.groupByLimit = exports.groupBy = exports.forever = exports.filterSeries = exports.filterLimit = exports.filter = exports.everySeries = exports.everyLimit = exports.every = exports.ensureAsync = exports.eachSeries = exports.eachOfSeries = exports.eachOfLimit = exports.eachOf = exports.eachLimit = exports.each = exports.doWhilst = exports.doUntil = exports.dir = exports.detectSeries = exports.detectLimit = exports.detect = exports.constant = exports.concatSeries = exports.concatLimit = exports.concat = exports.compose = exports.cargoQueue = exports.cargo = exports.autoInject = exports.auto = exports.asyncify = exports.applyEachSeries = exports.applyEach = exports.apply = undefined; + +var _apply = require('./apply'); + +var _apply2 = _interopRequireDefault(_apply); + +var _applyEach = require('./applyEach'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _applyEachSeries = require('./applyEachSeries'); + +var _applyEachSeries2 = _interopRequireDefault(_applyEachSeries); + +var _asyncify = require('./asyncify'); + +var _asyncify2 = _interopRequireDefault(_asyncify); + +var _auto = require('./auto'); + +var _auto2 = _interopRequireDefault(_auto); + +var _autoInject = require('./autoInject'); + +var _autoInject2 = _interopRequireDefault(_autoInject); + +var _cargo = require('./cargo'); + +var _cargo2 = _interopRequireDefault(_cargo); + +var _cargoQueue = require('./cargoQueue'); + +var _cargoQueue2 = _interopRequireDefault(_cargoQueue); + +var _compose = require('./compose'); + +var _compose2 = _interopRequireDefault(_compose); + +var _concat = require('./concat'); + +var _concat2 = _interopRequireDefault(_concat); + +var _concatLimit = require('./concatLimit'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _concatSeries = require('./concatSeries'); + +var _concatSeries2 = _interopRequireDefault(_concatSeries); + +var _constant = require('./constant'); + +var _constant2 = _interopRequireDefault(_constant); + +var _detect = require('./detect'); + +var _detect2 = _interopRequireDefault(_detect); + +var _detectLimit = require('./detectLimit'); + +var _detectLimit2 = _interopRequireDefault(_detectLimit); + +var _detectSeries = require('./detectSeries'); + +var _detectSeries2 = _interopRequireDefault(_detectSeries); + +var _dir = require('./dir'); + +var _dir2 = _interopRequireDefault(_dir); + +var _doUntil = require('./doUntil'); + +var _doUntil2 = _interopRequireDefault(_doUntil); + +var _doWhilst = require('./doWhilst'); + +var _doWhilst2 = _interopRequireDefault(_doWhilst); + +var _each = require('./each'); + +var _each2 = _interopRequireDefault(_each); + +var _eachLimit = require('./eachLimit'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _eachOf = require('./eachOf'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _eachOfLimit = require('./eachOfLimit'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _eachOfSeries = require('./eachOfSeries'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _eachSeries = require('./eachSeries'); + +var _eachSeries2 = _interopRequireDefault(_eachSeries); + +var _ensureAsync = require('./ensureAsync'); + +var _ensureAsync2 = _interopRequireDefault(_ensureAsync); + +var _every = require('./every'); + +var _every2 = _interopRequireDefault(_every); + +var _everyLimit = require('./everyLimit'); + +var _everyLimit2 = _interopRequireDefault(_everyLimit); + +var _everySeries = require('./everySeries'); + +var _everySeries2 = _interopRequireDefault(_everySeries); + +var _filter = require('./filter'); + +var _filter2 = _interopRequireDefault(_filter); + +var _filterLimit = require('./filterLimit'); + +var _filterLimit2 = _interopRequireDefault(_filterLimit); + +var _filterSeries = require('./filterSeries'); + +var _filterSeries2 = _interopRequireDefault(_filterSeries); + +var _forever = require('./forever'); + +var _forever2 = _interopRequireDefault(_forever); + +var _groupBy = require('./groupBy'); + +var _groupBy2 = _interopRequireDefault(_groupBy); + +var _groupByLimit = require('./groupByLimit'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +var _groupBySeries = require('./groupBySeries'); + +var _groupBySeries2 = _interopRequireDefault(_groupBySeries); + +var _log = require('./log'); + +var _log2 = _interopRequireDefault(_log); + +var _map = require('./map'); + +var _map2 = _interopRequireDefault(_map); + +var _mapLimit = require('./mapLimit'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _mapSeries = require('./mapSeries'); + +var _mapSeries2 = _interopRequireDefault(_mapSeries); + +var _mapValues = require('./mapValues'); + +var _mapValues2 = _interopRequireDefault(_mapValues); + +var _mapValuesLimit = require('./mapValuesLimit'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +var _mapValuesSeries = require('./mapValuesSeries'); + +var _mapValuesSeries2 = _interopRequireDefault(_mapValuesSeries); + +var _memoize = require('./memoize'); + +var _memoize2 = _interopRequireDefault(_memoize); + +var _nextTick = require('./nextTick'); + +var _nextTick2 = _interopRequireDefault(_nextTick); + +var _parallel = require('./parallel'); + +var _parallel2 = _interopRequireDefault(_parallel); + +var _parallelLimit = require('./parallelLimit'); + +var _parallelLimit2 = _interopRequireDefault(_parallelLimit); + +var _priorityQueue = require('./priorityQueue'); + +var _priorityQueue2 = _interopRequireDefault(_priorityQueue); + +var _queue = require('./queue'); + +var _queue2 = _interopRequireDefault(_queue); + +var _race = require('./race'); + +var _race2 = _interopRequireDefault(_race); + +var _reduce = require('./reduce'); + +var _reduce2 = _interopRequireDefault(_reduce); + +var _reduceRight = require('./reduceRight'); + +var _reduceRight2 = _interopRequireDefault(_reduceRight); + +var _reflect = require('./reflect'); + +var _reflect2 = _interopRequireDefault(_reflect); + +var _reflectAll = require('./reflectAll'); + +var _reflectAll2 = _interopRequireDefault(_reflectAll); + +var _reject = require('./reject'); + +var _reject2 = _interopRequireDefault(_reject); + +var _rejectLimit = require('./rejectLimit'); + +var _rejectLimit2 = _interopRequireDefault(_rejectLimit); + +var _rejectSeries = require('./rejectSeries'); + +var _rejectSeries2 = _interopRequireDefault(_rejectSeries); + +var _retry = require('./retry'); + +var _retry2 = _interopRequireDefault(_retry); + +var _retryable = require('./retryable'); + +var _retryable2 = _interopRequireDefault(_retryable); + +var _seq = require('./seq'); + +var _seq2 = _interopRequireDefault(_seq); + +var _series = require('./series'); + +var _series2 = _interopRequireDefault(_series); + +var _setImmediate = require('./setImmediate'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _some = require('./some'); + +var _some2 = _interopRequireDefault(_some); + +var _someLimit = require('./someLimit'); + +var _someLimit2 = _interopRequireDefault(_someLimit); + +var _someSeries = require('./someSeries'); + +var _someSeries2 = _interopRequireDefault(_someSeries); + +var _sortBy = require('./sortBy'); + +var _sortBy2 = _interopRequireDefault(_sortBy); + +var _timeout = require('./timeout'); + +var _timeout2 = _interopRequireDefault(_timeout); + +var _times = require('./times'); + +var _times2 = _interopRequireDefault(_times); + +var _timesLimit = require('./timesLimit'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +var _timesSeries = require('./timesSeries'); + +var _timesSeries2 = _interopRequireDefault(_timesSeries); + +var _transform = require('./transform'); + +var _transform2 = _interopRequireDefault(_transform); + +var _tryEach = require('./tryEach'); + +var _tryEach2 = _interopRequireDefault(_tryEach); + +var _unmemoize = require('./unmemoize'); + +var _unmemoize2 = _interopRequireDefault(_unmemoize); + +var _until = require('./until'); + +var _until2 = _interopRequireDefault(_until); + +var _waterfall = require('./waterfall'); + +var _waterfall2 = _interopRequireDefault(_waterfall); + +var _whilst = require('./whilst'); + +var _whilst2 = _interopRequireDefault(_whilst); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + +/** + * Async is a utility module which provides straight-forward, powerful functions + * for working with asynchronous JavaScript. Although originally designed for + * use with [Node.js](http://nodejs.org) and installable via + * `npm install --save async`, it can also be used directly in the browser. + * @module async + * @see AsyncFunction + */ + +/** + * A collection of `async` functions for manipulating collections, such as + * arrays and objects. + * @module Collections + */ + +/** + * A collection of `async` functions for controlling the flow through a script. + * @module ControlFlow + */ + +/** + * A collection of `async` utility functions. + * @module Utils + */ + +exports.default = { + apply: _apply2.default, + applyEach: _applyEach2.default, + applyEachSeries: _applyEachSeries2.default, + asyncify: _asyncify2.default, + auto: _auto2.default, + autoInject: _autoInject2.default, + cargo: _cargo2.default, + cargoQueue: _cargoQueue2.default, + compose: _compose2.default, + concat: _concat2.default, + concatLimit: _concatLimit2.default, + concatSeries: _concatSeries2.default, + constant: _constant2.default, + detect: _detect2.default, + detectLimit: _detectLimit2.default, + detectSeries: _detectSeries2.default, + dir: _dir2.default, + doUntil: _doUntil2.default, + doWhilst: _doWhilst2.default, + each: _each2.default, + eachLimit: _eachLimit2.default, + eachOf: _eachOf2.default, + eachOfLimit: _eachOfLimit2.default, + eachOfSeries: _eachOfSeries2.default, + eachSeries: _eachSeries2.default, + ensureAsync: _ensureAsync2.default, + every: _every2.default, + everyLimit: _everyLimit2.default, + everySeries: _everySeries2.default, + filter: _filter2.default, + filterLimit: _filterLimit2.default, + filterSeries: _filterSeries2.default, + forever: _forever2.default, + groupBy: _groupBy2.default, + groupByLimit: _groupByLimit2.default, + groupBySeries: _groupBySeries2.default, + log: _log2.default, + map: _map2.default, + mapLimit: _mapLimit2.default, + mapSeries: _mapSeries2.default, + mapValues: _mapValues2.default, + mapValuesLimit: _mapValuesLimit2.default, + mapValuesSeries: _mapValuesSeries2.default, + memoize: _memoize2.default, + nextTick: _nextTick2.default, + parallel: _parallel2.default, + parallelLimit: _parallelLimit2.default, + priorityQueue: _priorityQueue2.default, + queue: _queue2.default, + race: _race2.default, + reduce: _reduce2.default, + reduceRight: _reduceRight2.default, + reflect: _reflect2.default, + reflectAll: _reflectAll2.default, + reject: _reject2.default, + rejectLimit: _rejectLimit2.default, + rejectSeries: _rejectSeries2.default, + retry: _retry2.default, + retryable: _retryable2.default, + seq: _seq2.default, + series: _series2.default, + setImmediate: _setImmediate2.default, + some: _some2.default, + someLimit: _someLimit2.default, + someSeries: _someSeries2.default, + sortBy: _sortBy2.default, + timeout: _timeout2.default, + times: _times2.default, + timesLimit: _timesLimit2.default, + timesSeries: _timesSeries2.default, + transform: _transform2.default, + tryEach: _tryEach2.default, + unmemoize: _unmemoize2.default, + until: _until2.default, + waterfall: _waterfall2.default, + whilst: _whilst2.default, + + // aliases + all: _every2.default, + allLimit: _everyLimit2.default, + allSeries: _everySeries2.default, + any: _some2.default, + anyLimit: _someLimit2.default, + anySeries: _someSeries2.default, + find: _detect2.default, + findLimit: _detectLimit2.default, + findSeries: _detectSeries2.default, + flatMap: _concat2.default, + flatMapLimit: _concatLimit2.default, + flatMapSeries: _concatSeries2.default, + forEach: _each2.default, + forEachSeries: _eachSeries2.default, + forEachLimit: _eachLimit2.default, + forEachOf: _eachOf2.default, + forEachOfSeries: _eachOfSeries2.default, + forEachOfLimit: _eachOfLimit2.default, + inject: _reduce2.default, + foldl: _reduce2.default, + foldr: _reduceRight2.default, + select: _filter2.default, + selectLimit: _filterLimit2.default, + selectSeries: _filterSeries2.default, + wrapSync: _asyncify2.default, + during: _whilst2.default, + doDuring: _doWhilst2.default +}; +exports.apply = _apply2.default; +exports.applyEach = _applyEach2.default; +exports.applyEachSeries = _applyEachSeries2.default; +exports.asyncify = _asyncify2.default; +exports.auto = _auto2.default; +exports.autoInject = _autoInject2.default; +exports.cargo = _cargo2.default; +exports.cargoQueue = _cargoQueue2.default; +exports.compose = _compose2.default; +exports.concat = _concat2.default; +exports.concatLimit = _concatLimit2.default; +exports.concatSeries = _concatSeries2.default; +exports.constant = _constant2.default; +exports.detect = _detect2.default; +exports.detectLimit = _detectLimit2.default; +exports.detectSeries = _detectSeries2.default; +exports.dir = _dir2.default; +exports.doUntil = _doUntil2.default; +exports.doWhilst = _doWhilst2.default; +exports.each = _each2.default; +exports.eachLimit = _eachLimit2.default; +exports.eachOf = _eachOf2.default; +exports.eachOfLimit = _eachOfLimit2.default; +exports.eachOfSeries = _eachOfSeries2.default; +exports.eachSeries = _eachSeries2.default; +exports.ensureAsync = _ensureAsync2.default; +exports.every = _every2.default; +exports.everyLimit = _everyLimit2.default; +exports.everySeries = _everySeries2.default; +exports.filter = _filter2.default; +exports.filterLimit = _filterLimit2.default; +exports.filterSeries = _filterSeries2.default; +exports.forever = _forever2.default; +exports.groupBy = _groupBy2.default; +exports.groupByLimit = _groupByLimit2.default; +exports.groupBySeries = _groupBySeries2.default; +exports.log = _log2.default; +exports.map = _map2.default; +exports.mapLimit = _mapLimit2.default; +exports.mapSeries = _mapSeries2.default; +exports.mapValues = _mapValues2.default; +exports.mapValuesLimit = _mapValuesLimit2.default; +exports.mapValuesSeries = _mapValuesSeries2.default; +exports.memoize = _memoize2.default; +exports.nextTick = _nextTick2.default; +exports.parallel = _parallel2.default; +exports.parallelLimit = _parallelLimit2.default; +exports.priorityQueue = _priorityQueue2.default; +exports.queue = _queue2.default; +exports.race = _race2.default; +exports.reduce = _reduce2.default; +exports.reduceRight = _reduceRight2.default; +exports.reflect = _reflect2.default; +exports.reflectAll = _reflectAll2.default; +exports.reject = _reject2.default; +exports.rejectLimit = _rejectLimit2.default; +exports.rejectSeries = _rejectSeries2.default; +exports.retry = _retry2.default; +exports.retryable = _retryable2.default; +exports.seq = _seq2.default; +exports.series = _series2.default; +exports.setImmediate = _setImmediate2.default; +exports.some = _some2.default; +exports.someLimit = _someLimit2.default; +exports.someSeries = _someSeries2.default; +exports.sortBy = _sortBy2.default; +exports.timeout = _timeout2.default; +exports.times = _times2.default; +exports.timesLimit = _timesLimit2.default; +exports.timesSeries = _timesSeries2.default; +exports.transform = _transform2.default; +exports.tryEach = _tryEach2.default; +exports.unmemoize = _unmemoize2.default; +exports.until = _until2.default; +exports.waterfall = _waterfall2.default; +exports.whilst = _whilst2.default; +exports.all = _every2.default; +exports.allLimit = _everyLimit2.default; +exports.allSeries = _everySeries2.default; +exports.any = _some2.default; +exports.anyLimit = _someLimit2.default; +exports.anySeries = _someSeries2.default; +exports.find = _detect2.default; +exports.findLimit = _detectLimit2.default; +exports.findSeries = _detectSeries2.default; +exports.flatMap = _concat2.default; +exports.flatMapLimit = _concatLimit2.default; +exports.flatMapSeries = _concatSeries2.default; +exports.forEach = _each2.default; +exports.forEachSeries = _eachSeries2.default; +exports.forEachLimit = _eachLimit2.default; +exports.forEachOf = _eachOf2.default; +exports.forEachOfSeries = _eachOfSeries2.default; +exports.forEachOfLimit = _eachOfLimit2.default; +exports.inject = _reduce2.default; +exports.foldl = _reduce2.default; +exports.foldr = _reduceRight2.default; +exports.select = _filter2.default; +exports.selectLimit = _filterLimit2.default; +exports.selectSeries = _filterSeries2.default; +exports.wrapSync = _asyncify2.default; +exports.during = _whilst2.default; +exports.doDuring = _doWhilst2.default; \ No newline at end of file diff --git a/node_modules/async/inject.js b/node_modules/async/inject.js new file mode 100644 index 0000000..56e2db8 --- /dev/null +++ b/node_modules/async/inject.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/DoublyLinkedList.js b/node_modules/async/internal/DoublyLinkedList.js new file mode 100644 index 0000000..cd11c3b --- /dev/null +++ b/node_modules/async/internal/DoublyLinkedList.js @@ -0,0 +1,92 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation +// used for queues. This implementation assumes that the node provided by the user can be modified +// to adjust the next and last properties. We implement only the minimal functionality +// for queue support. +class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next;else this.head = node.next; + if (node.next) node.next.prev = node.prev;else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty() { + while (this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode;else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode;else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node);else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node);else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator]() { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove(testFn) { + var curr = this.head; + while (curr) { + var { next } = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } +} + +exports.default = DLL; +function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/Heap.js b/node_modules/async/internal/Heap.js new file mode 100644 index 0000000..80762fe --- /dev/null +++ b/node_modules/async/internal/Heap.js @@ -0,0 +1,120 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// Binary min-heap implementation used for priority queue. +// Implementation is stable, i.e. push time is considered for equal priorities +class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty() { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p = parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l = leftChi(index)) < this.heap.length) { + if (l + 1 < this.heap.length && smaller(this.heap[l + 1], this.heap[l])) { + l = l + 1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length - 1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length - 1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator]() { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove(testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length - 1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } +} + +exports.default = Heap; +function leftChi(i) { + return (i << 1) + 1; +} + +function parent(i) { + return (i + 1 >> 1) - 1; +} + +function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } else { + return x.pushCount < y.pushCount; + } +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/applyEach.js b/node_modules/async/internal/applyEach.js new file mode 100644 index 0000000..a3f4ef1 --- /dev/null +++ b/node_modules/async/internal/applyEach.js @@ -0,0 +1,29 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = (0, _awaitify2.default)(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + (0, _wrapAsync2.default)(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; +}; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/asyncEachOfLimit.js b/node_modules/async/internal/asyncEachOfLimit.js new file mode 100644 index 0000000..bba74c7 --- /dev/null +++ b/node_modules/async/internal/asyncEachOfLimit.js @@ -0,0 +1,75 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncEachOfLimit; + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// for async generators +function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return; + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({ value, done: iterDone }) => { + //console.log('got value', value) + if (canceled || done) return; + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return; + if (err) return handleError(err); + + if (err === false) { + done = true; + canceled = true; + return; + } + + if (result === _breakLoop2.default || done && running <= 0) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return; + awaiting = false; + done = true; + callback(err); + } + + replenish(); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/awaitify.js b/node_modules/async/internal/awaitify.js new file mode 100644 index 0000000..7b36f1a --- /dev/null +++ b/node_modules/async/internal/awaitify.js @@ -0,0 +1,27 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = awaitify; +// conditionally promisify a function. +// only return a promise if a callback is omitted +function awaitify(asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined'); + function awaitable(...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args); + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err); + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }); + } + + return awaitable; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/breakLoop.js b/node_modules/async/internal/breakLoop.js new file mode 100644 index 0000000..8245e55 --- /dev/null +++ b/node_modules/async/internal/breakLoop.js @@ -0,0 +1,10 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// A temporary value used to identify if the loop should be broken. +// See #1064, #1293 +const breakLoop = {}; +exports.default = breakLoop; +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/consoleFunc.js b/node_modules/async/internal/consoleFunc.js new file mode 100644 index 0000000..70347a5 --- /dev/null +++ b/node_modules/async/internal/consoleFunc.js @@ -0,0 +1,31 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = consoleFunc; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function consoleFunc(name) { + return (fn, ...args) => (0, _wrapAsync2.default)(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { + /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/createTester.js b/node_modules/async/internal/createTester.js new file mode 100644 index 0000000..7b2d734 --- /dev/null +++ b/node_modules/async/internal/createTester.js @@ -0,0 +1,40 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _createTester; + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = (0, _wrapAsync2.default)(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, _breakLoop2.default); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/eachOfLimit.js b/node_modules/async/internal/eachOfLimit.js new file mode 100644 index 0000000..fc26b20 --- /dev/null +++ b/node_modules/async/internal/eachOfLimit.js @@ -0,0 +1,90 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _iterator = require('./iterator.js'); + +var _iterator2 = _interopRequireDefault(_iterator); + +var _onlyOnce = require('./onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./wrapAsync.js'); + +var _asyncEachOfLimit = require('./asyncEachOfLimit.js'); + +var _asyncEachOfLimit2 = _interopRequireDefault(_asyncEachOfLimit); + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = limit => { + return (obj, iteratee, callback) => { + callback = (0, _once2.default)(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1'); + } + if (!obj) { + return callback(null); + } + if ((0, _wrapAsync.isAsyncGenerator)(obj)) { + return (0, _asyncEachOfLimit2.default)(obj, limit, iteratee, callback); + } + if ((0, _wrapAsync.isAsyncIterable)(obj)) { + return (0, _asyncEachOfLimit2.default)(obj[Symbol.asyncIterator](), limit, iteratee, callback); + } + var nextElem = (0, _iterator2.default)(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return; + running -= 1; + if (err) { + done = true; + callback(err); + } else if (err === false) { + done = true; + canceled = true; + } else if (value === _breakLoop2.default || done && running <= 0) { + done = true; + return callback(null); + } else if (!looping) { + replenish(); + } + } + + function replenish() { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, (0, _onlyOnce2.default)(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; +}; + +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/filter.js b/node_modules/async/internal/filter.js new file mode 100644 index 0000000..aef2b9d --- /dev/null +++ b/node_modules/async/internal/filter.js @@ -0,0 +1,55 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _filter; + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); +} + +function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({ index, value: x }); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results.sort((a, b) => a.index - b.index).map(v => v.value)); + }); +} + +function _filter(eachfn, coll, iteratee, callback) { + var filter = (0, _isArrayLike2.default)(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, (0, _wrapAsync2.default)(iteratee), callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/getIterator.js b/node_modules/async/internal/getIterator.js new file mode 100644 index 0000000..830a545 --- /dev/null +++ b/node_modules/async/internal/getIterator.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); +}; + +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/initialParams.js b/node_modules/async/internal/initialParams.js new file mode 100644 index 0000000..245378c --- /dev/null +++ b/node_modules/async/internal/initialParams.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (fn) { + return function (...args /*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; +}; + +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/isArrayLike.js b/node_modules/async/internal/isArrayLike.js new file mode 100644 index 0000000..ce07670 --- /dev/null +++ b/node_modules/async/internal/isArrayLike.js @@ -0,0 +1,10 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isArrayLike; +function isArrayLike(value) { + return value && typeof value.length === 'number' && value.length >= 0 && value.length % 1 === 0; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/iterator.js b/node_modules/async/internal/iterator.js new file mode 100644 index 0000000..90b0223 --- /dev/null +++ b/node_modules/async/internal/iterator.js @@ -0,0 +1,57 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = createIterator; + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _getIterator = require('./getIterator.js'); + +var _getIterator2 = _interopRequireDefault(_getIterator); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? { value: coll[i], key: i } : null; + }; +} + +function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) return null; + i++; + return { value: item.value, key: i }; + }; +} + +function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? { value: obj[key], key } : null; + }; +} + +function createIterator(coll) { + if ((0, _isArrayLike2.default)(coll)) { + return createArrayIterator(coll); + } + + var iterator = (0, _getIterator2.default)(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/map.js b/node_modules/async/internal/map.js new file mode 100644 index 0000000..af3fd09 --- /dev/null +++ b/node_modules/async/internal/map.js @@ -0,0 +1,30 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _asyncMap; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = (0, _wrapAsync2.default)(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/once.js b/node_modules/async/internal/once.js new file mode 100644 index 0000000..49f3727 --- /dev/null +++ b/node_modules/async/internal/once.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = once; +function once(fn) { + function wrapper(...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/onlyOnce.js b/node_modules/async/internal/onlyOnce.js new file mode 100644 index 0000000..6ad721b --- /dev/null +++ b/node_modules/async/internal/onlyOnce.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = onlyOnce; +function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/parallel.js b/node_modules/async/internal/parallel.js new file mode 100644 index 0000000..75741bb --- /dev/null +++ b/node_modules/async/internal/parallel.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = (0, _awaitify2.default)((eachfn, tasks, callback) => { + var results = (0, _isArrayLike2.default)(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + (0, _wrapAsync2.default)(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); +}, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/promiseCallback.js b/node_modules/async/internal/promiseCallback.js new file mode 100644 index 0000000..17a8301 --- /dev/null +++ b/node_modules/async/internal/promiseCallback.js @@ -0,0 +1,23 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +const PROMISE_SYMBOL = Symbol('promiseCallback'); + +function promiseCallback() { + let resolve, reject; + function callback(err, ...args) { + if (err) return reject(err); + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, reject = rej; + }); + + return callback; +} + +exports.promiseCallback = promiseCallback; +exports.PROMISE_SYMBOL = PROMISE_SYMBOL; \ No newline at end of file diff --git a/node_modules/async/internal/queue.js b/node_modules/async/internal/queue.js new file mode 100644 index 0000000..cbc590d --- /dev/null +++ b/node_modules/async/internal/queue.js @@ -0,0 +1,294 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = queue; + +var _onlyOnce = require('./onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _setImmediate = require('./setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _DoublyLinkedList = require('./DoublyLinkedList.js'); + +var _DoublyLinkedList2 = _interopRequireDefault(_DoublyLinkedList); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } else if (concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = (0, _wrapAsync2.default)(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on(event, handler) { + events[event].push(handler); + } + + function once(event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off(event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []); + if (!handler) return events[event] = []; + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger(event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback(err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res(); + if (args.length <= 1) return res(args[0]); + res(args); + } + + var item = q._createTaskItem(data, rejectOnError ? promiseCallback : callback || promiseCallback); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + (0, _setImmediate2.default)(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= q.concurrency - q.buffer) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + (0, _setImmediate2.default)(() => trigger('drain')); + return true; + } + return false; + } + + const eventMethod = name => handler => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err); + resolve(data); + }); + }); + } + off(name); + on(name, handler); + }; + + var isProcessing = false; + var q = { + _tasks: new _DoublyLinkedList2.default(), + _createTaskItem(data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator]() { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, false, false, callback)); + } + return _insert(data, false, false, callback); + }, + pushAsync(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, false, true, callback)); + } + return _insert(data, false, true, callback); + }, + kill() { + off(); + q._tasks.empty(); + }, + unshift(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, true, false, callback)); + } + return _insert(data, true, false, callback); + }, + unshiftAsync(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, true, true, callback)); + } + return _insert(data, true, true, callback); + }, + remove(testFn) { + q._tasks.remove(testFn); + }, + process() { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while (!q.paused && numRunning < q.concurrency && q._tasks.length) { + var tasks = [], + data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = (0, _onlyOnce2.default)(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length() { + return q._tasks.length; + }, + running() { + return numRunning; + }, + workersList() { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause() { + q.paused = true; + }, + resume() { + if (q.paused === false) { + return; + } + q.paused = false; + (0, _setImmediate2.default)(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + } + }); + return q; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/range.js b/node_modules/async/internal/range.js new file mode 100644 index 0000000..6680e64 --- /dev/null +++ b/node_modules/async/internal/range.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = range; +function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/reject.js b/node_modules/async/internal/reject.js new file mode 100644 index 0000000..7388ef4 --- /dev/null +++ b/node_modules/async/internal/reject.js @@ -0,0 +1,26 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reject; + +var _filter = require('./filter.js'); + +var _filter2 = _interopRequireDefault(_filter); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function reject(eachfn, arr, _iteratee, callback) { + const iteratee = (0, _wrapAsync2.default)(_iteratee); + return (0, _filter2.default)(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/setImmediate.js b/node_modules/async/internal/setImmediate.js new file mode 100644 index 0000000..513efd1 --- /dev/null +++ b/node_modules/async/internal/setImmediate.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.fallback = fallback; +exports.wrap = wrap; +/* istanbul ignore file */ + +var hasQueueMicrotask = exports.hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; +var hasSetImmediate = exports.hasSetImmediate = typeof setImmediate === 'function' && setImmediate; +var hasNextTick = exports.hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + +function fallback(fn) { + setTimeout(fn, 0); +} + +function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); +} + +var _defer; + +if (hasQueueMicrotask) { + _defer = queueMicrotask; +} else if (hasSetImmediate) { + _defer = setImmediate; +} else if (hasNextTick) { + _defer = process.nextTick; +} else { + _defer = fallback; +} + +exports.default = wrap(_defer); \ No newline at end of file diff --git a/node_modules/async/internal/withoutIndex.js b/node_modules/async/internal/withoutIndex.js new file mode 100644 index 0000000..ec45fa3 --- /dev/null +++ b/node_modules/async/internal/withoutIndex.js @@ -0,0 +1,10 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _withoutIndex; +function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/wrapAsync.js b/node_modules/async/internal/wrapAsync.js new file mode 100644 index 0000000..ad4d619 --- /dev/null +++ b/node_modules/async/internal/wrapAsync.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isAsyncIterable = exports.isAsyncGenerator = exports.isAsync = undefined; + +var _asyncify = require('../asyncify.js'); + +var _asyncify2 = _interopRequireDefault(_asyncify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; +} + +function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; +} + +function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; +} + +function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function'); + return isAsync(asyncFn) ? (0, _asyncify2.default)(asyncFn) : asyncFn; +} + +exports.default = wrapAsync; +exports.isAsync = isAsync; +exports.isAsyncGenerator = isAsyncGenerator; +exports.isAsyncIterable = isAsyncIterable; \ No newline at end of file diff --git a/node_modules/async/log.js b/node_modules/async/log.js new file mode 100644 index 0000000..8fc1ed5 --- /dev/null +++ b/node_modules/async/log.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _consoleFunc = require('./internal/consoleFunc.js'); + +var _consoleFunc2 = _interopRequireDefault(_consoleFunc); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ +exports.default = (0, _consoleFunc2.default)('log'); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/map.js b/node_modules/async/map.js new file mode 100644 index 0000000..ec4135d --- /dev/null +++ b/node_modules/async/map.js @@ -0,0 +1,142 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function map(coll, iteratee, callback) { + return (0, _map3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(map, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapLimit.js b/node_modules/async/mapLimit.js new file mode 100644 index 0000000..b5e461c --- /dev/null +++ b/node_modules/async/mapLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapLimit(coll, limit, iteratee, callback) { + return (0, _map3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(mapLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapSeries.js b/node_modules/async/mapSeries.js new file mode 100644 index 0000000..91f36bf --- /dev/null +++ b/node_modules/async/mapSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapSeries(coll, iteratee, callback) { + return (0, _map3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(mapSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValues.js b/node_modules/async/mapValues.js new file mode 100644 index 0000000..00da926 --- /dev/null +++ b/node_modules/async/mapValues.js @@ -0,0 +1,152 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = mapValues; + +var _mapValuesLimit = require('./mapValuesLimit.js'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function mapValues(obj, iteratee, callback) { + return (0, _mapValuesLimit2.default)(obj, Infinity, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValuesLimit.js b/node_modules/async/mapValuesLimit.js new file mode 100644 index 0000000..93066ee --- /dev/null +++ b/node_modules/async/mapValuesLimit.js @@ -0,0 +1,61 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesLimit(obj, limit, iteratee, callback) { + callback = (0, _once2.default)(callback); + var newObj = {}; + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfLimit2.default)(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); +} + +exports.default = (0, _awaitify2.default)(mapValuesLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValuesSeries.js b/node_modules/async/mapValuesSeries.js new file mode 100644 index 0000000..560058a --- /dev/null +++ b/node_modules/async/mapValuesSeries.js @@ -0,0 +1,37 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = mapValuesSeries; + +var _mapValuesLimit = require('./mapValuesLimit.js'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesSeries(obj, iteratee, callback) { + return (0, _mapValuesLimit2.default)(obj, 1, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/memoize.js b/node_modules/async/memoize.js new file mode 100644 index 0000000..6003e41 --- /dev/null +++ b/node_modules/async/memoize.js @@ -0,0 +1,91 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = memoize; + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ +function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = (0, _wrapAsync2.default)(fn); + var memoized = (0, _initialParams2.default)((args, callback) => { + var key = hasher(...args); + if (key in memo) { + (0, _setImmediate2.default)(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/nextTick.js b/node_modules/async/nextTick.js new file mode 100644 index 0000000..e6d321b --- /dev/null +++ b/node_modules/async/nextTick.js @@ -0,0 +1,52 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _setImmediate = require('./internal/setImmediate.js'); + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +var _defer; /* istanbul ignore file */ + + +if (_setImmediate.hasNextTick) { + _defer = process.nextTick; +} else if (_setImmediate.hasSetImmediate) { + _defer = setImmediate; +} else { + _defer = _setImmediate.fallback; +} + +exports.default = (0, _setImmediate.wrap)(_defer); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/package.json b/node_modules/async/package.json new file mode 100644 index 0000000..9c464bc --- /dev/null +++ b/node_modules/async/package.json @@ -0,0 +1,75 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "3.2.4", + "main": "dist/async.js", + "author": "Caolan McMahon", + "homepage": "https://caolan.github.io/async/", + "repository": { + "type": "git", + "url": "https://github.com/caolan/async.git" + }, + "bugs": { + "url": "https://github.com/caolan/async/issues" + }, + "keywords": [ + "async", + "callback", + "module", + "utility" + ], + "devDependencies": { + "@babel/eslint-parser": "^7.16.5", + "babel-core": "^6.26.3", + "babel-minify": "^0.5.0", + "babel-plugin-add-module-exports": "^1.0.4", + "babel-plugin-istanbul": "^6.1.1", + "babel-plugin-syntax-async-generators": "^6.13.0", + "babel-plugin-transform-es2015-modules-commonjs": "^6.26.2", + "babel-preset-es2015": "^6.3.13", + "babel-preset-es2017": "^6.22.0", + "babel-register": "^6.26.0", + "babelify": "^10.0.0", + "benchmark": "^2.1.1", + "bluebird": "^3.4.6", + "browserify": "^17.0.0", + "chai": "^4.2.0", + "cheerio": "^0.22.0", + "es6-promise": "^4.2.8", + "eslint": "^8.6.0", + "eslint-plugin-prefer-arrow": "^1.2.3", + "fs-extra": "^10.0.0", + "jsdoc": "^3.6.2", + "karma": "^6.3.12", + "karma-browserify": "^8.1.0", + "karma-firefox-launcher": "^2.1.2", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.0", + "karma-safari-launcher": "^1.0.0", + "mocha": "^6.1.4", + "native-promise-only": "^0.8.0-a", + "nyc": "^15.1.0", + "rollup": "^2.66.1", + "rollup-plugin-node-resolve": "^5.2.0", + "rollup-plugin-npm": "^2.0.0", + "rsvp": "^4.8.5", + "semver": "^7.3.5", + "yargs": "^17.3.1" + }, + "scripts": { + "coverage": "nyc npm run mocha-node-test -- --grep @nycinvalid --invert", + "jsdoc": "jsdoc -c ./support/jsdoc/jsdoc.json && node support/jsdoc/jsdoc-fix-html.js", + "lint": "eslint --fix .", + "mocha-browser-test": "karma start", + "mocha-node-test": "mocha", + "mocha-test": "npm run mocha-node-test && npm run mocha-browser-test", + "test": "npm run lint && npm run mocha-node-test" + }, + "license": "MIT", + "nyc": { + "exclude": [ + "test" + ] + }, + "module": "dist/async.mjs" +} \ No newline at end of file diff --git a/node_modules/async/parallel.js b/node_modules/async/parallel.js new file mode 100644 index 0000000..76bc624 --- /dev/null +++ b/node_modules/async/parallel.js @@ -0,0 +1,180 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = parallel; + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _parallel2 = require('./internal/parallel.js'); + +var _parallel3 = _interopRequireDefault(_parallel2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function parallel(tasks, callback) { + return (0, _parallel3.default)(_eachOf2.default, tasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/parallelLimit.js b/node_modules/async/parallelLimit.js new file mode 100644 index 0000000..dbe0bb8 --- /dev/null +++ b/node_modules/async/parallelLimit.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = parallelLimit; + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _parallel = require('./internal/parallel.js'); + +var _parallel2 = _interopRequireDefault(_parallel); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ +function parallelLimit(tasks, limit, callback) { + return (0, _parallel2.default)((0, _eachOfLimit2.default)(limit), tasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/priorityQueue.js b/node_modules/async/priorityQueue.js new file mode 100644 index 0000000..6006f66 --- /dev/null +++ b/node_modules/async/priorityQueue.js @@ -0,0 +1,86 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (worker, concurrency) { + // Start with a normal queue + var q = (0, _queue2.default)(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new _Heap2.default(); + q._createTaskItem = ({ data, priority }, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return { data: tasks, priority }; + } + return tasks.map(data => { + return { data, priority }; + }); + } + + // Override push to accept second parameter representing priority + q.push = function (data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function (data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; +}; + +var _queue = require('./queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +var _Heap = require('./internal/Heap.js'); + +var _Heap2 = _interopRequireDefault(_Heap); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports['default']; + +/** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ \ No newline at end of file diff --git a/node_modules/async/queue.js b/node_modules/async/queue.js new file mode 100644 index 0000000..c69becb --- /dev/null +++ b/node_modules/async/queue.js @@ -0,0 +1,167 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (worker, concurrency) { + var _worker = (0, _wrapAsync2.default)(worker); + return (0, _queue2.default)((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); +}; + +var _queue = require('./internal/queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports['default']; + +/** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + +/** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ \ No newline at end of file diff --git a/node_modules/async/race.js b/node_modules/async/race.js new file mode 100644 index 0000000..9595d88 --- /dev/null +++ b/node_modules/async/race.js @@ -0,0 +1,67 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ +function race(tasks, callback) { + callback = (0, _once2.default)(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + (0, _wrapAsync2.default)(tasks[i])(callback); + } +} + +exports.default = (0, _awaitify2.default)(race, 2); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reduce.js b/node_modules/async/reduce.js new file mode 100644 index 0000000..56e2db8 --- /dev/null +++ b/node_modules/async/reduce.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reduceRight.js b/node_modules/async/reduceRight.js new file mode 100644 index 0000000..bee5391 --- /dev/null +++ b/node_modules/async/reduceRight.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reduceRight; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight(array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return (0, _reduce2.default)(reversed, memo, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reflect.js b/node_modules/async/reflect.js new file mode 100644 index 0000000..297ed79 --- /dev/null +++ b/node_modules/async/reflect.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reflect; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ +function reflect(fn) { + var _fn = (0, _wrapAsync2.default)(fn); + return (0, _initialParams2.default)(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0) { + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reflectAll.js b/node_modules/async/reflectAll.js new file mode 100644 index 0000000..a862ff0 --- /dev/null +++ b/node_modules/async/reflectAll.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reflectAll; + +var _reflect = require('./reflect.js'); + +var _reflect2 = _interopRequireDefault(_reflect); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ +function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(_reflect2.default); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = _reflect2.default.call(this, tasks[key]); + }); + } + return results; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reject.js b/node_modules/async/reject.js new file mode 100644 index 0000000..cabd96e --- /dev/null +++ b/node_modules/async/reject.js @@ -0,0 +1,87 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function reject(coll, iteratee, callback) { + return (0, _reject3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(reject, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/rejectLimit.js b/node_modules/async/rejectLimit.js new file mode 100644 index 0000000..1a89925 --- /dev/null +++ b/node_modules/async/rejectLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectLimit(coll, limit, iteratee, callback) { + return (0, _reject3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(rejectLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/rejectSeries.js b/node_modules/async/rejectSeries.js new file mode 100644 index 0000000..6e1a1c5 --- /dev/null +++ b/node_modules/async/rejectSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectSeries(coll, iteratee, callback) { + return (0, _reject3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(rejectSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/retry.js b/node_modules/async/retry.js new file mode 100644 index 0000000..dba3030 --- /dev/null +++ b/node_modules/async/retry.js @@ -0,0 +1,159 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = retry; + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function constant(value) { + return function () { + return value; + }; +} + +/** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ +const DEFAULT_TIMES = 5; +const DEFAULT_INTERVAL = 0; + +function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || (0, _promiseCallback.promiseCallback)(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || (0, _promiseCallback.promiseCallback)(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = (0, _wrapAsync2.default)(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return; + if (err && attempt++ < options.times && (typeof options.errorFilter != 'function' || options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[_promiseCallback.PROMISE_SYMBOL]; +} + +function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? t.interval : constant(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/retryable.js b/node_modules/async/retryable.js new file mode 100644 index 0000000..1b1147c --- /dev/null +++ b/node_modules/async/retryable.js @@ -0,0 +1,77 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = retryable; + +var _retry = require('./retry.js'); + +var _retry2 = _interopRequireDefault(_retry); + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ +function retryable(opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = opts && opts.arity || task.length; + if ((0, _wrapAsync.isAsync)(task)) { + arity += 1; + } + var _task = (0, _wrapAsync2.default)(task); + return (0, _initialParams2.default)((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = (0, _promiseCallback.promiseCallback)(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) (0, _retry2.default)(opts, taskFn, callback);else (0, _retry2.default)(taskFn, callback); + + return callback[_promiseCallback.PROMISE_SYMBOL]; + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/select.js b/node_modules/async/select.js new file mode 100644 index 0000000..303dc1f --- /dev/null +++ b/node_modules/async/select.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filter, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/selectLimit.js b/node_modules/async/selectLimit.js new file mode 100644 index 0000000..89e55f5 --- /dev/null +++ b/node_modules/async/selectLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit(coll, limit, iteratee, callback) { + return (0, _filter3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/selectSeries.js b/node_modules/async/selectSeries.js new file mode 100644 index 0000000..a045e52 --- /dev/null +++ b/node_modules/async/selectSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/seq.js b/node_modules/async/seq.js new file mode 100644 index 0000000..28c825f --- /dev/null +++ b/node_modules/async/seq.js @@ -0,0 +1,79 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = seq; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ +function seq(...functions) { + var _functions = functions.map(_wrapAsync2.default); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = (0, _promiseCallback.promiseCallback)(); + } + + (0, _reduce2.default)(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, (err, results) => cb(err, ...results)); + + return cb[_promiseCallback.PROMISE_SYMBOL]; + }; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/series.js b/node_modules/async/series.js new file mode 100644 index 0000000..56e78f9 --- /dev/null +++ b/node_modules/async/series.js @@ -0,0 +1,186 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = series; + +var _parallel2 = require('./internal/parallel.js'); + +var _parallel3 = _interopRequireDefault(_parallel2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function series(tasks, callback) { + return (0, _parallel3.default)(_eachOfSeries2.default, tasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/setImmediate.js b/node_modules/async/setImmediate.js new file mode 100644 index 0000000..c712ec3 --- /dev/null +++ b/node_modules/async/setImmediate.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `setImmediate`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name setImmediate + * @static + * @memberOf module:Utils + * @method + * @see [async.nextTick]{@link module:Utils.nextTick} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +exports.default = _setImmediate2.default; +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/some.js b/node_modules/async/some.js new file mode 100644 index 0000000..2046cf6 --- /dev/null +++ b/node_modules/async/some.js @@ -0,0 +1,122 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(some, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/someLimit.js b/node_modules/async/someLimit.js new file mode 100644 index 0000000..c8a295a --- /dev/null +++ b/node_modules/async/someLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/someSeries.js b/node_modules/async/someSeries.js new file mode 100644 index 0000000..ee0654b --- /dev/null +++ b/node_modules/async/someSeries.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/sortBy.js b/node_modules/async/sortBy.js new file mode 100644 index 0000000..d17fb6a --- /dev/null +++ b/node_modules/async/sortBy.js @@ -0,0 +1,190 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map = require('./map.js'); + +var _map2 = _interopRequireDefault(_map); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function sortBy(coll, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _map2.default)(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, { value: x, criteria }); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, + b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } +} +exports.default = (0, _awaitify2.default)(sortBy, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timeout.js b/node_modules/async/timeout.js new file mode 100644 index 0000000..dd58eb3 --- /dev/null +++ b/node_modules/async/timeout.js @@ -0,0 +1,89 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timeout; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ +function timeout(asyncFn, milliseconds, info) { + var fn = (0, _wrapAsync2.default)(asyncFn); + + return (0, _initialParams2.default)((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/times.js b/node_modules/async/times.js new file mode 100644 index 0000000..4484c73 --- /dev/null +++ b/node_modules/async/times.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = times; + +var _timesLimit = require('./timesLimit.js'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ +function times(n, iteratee, callback) { + return (0, _timesLimit2.default)(n, Infinity, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timesLimit.js b/node_modules/async/timesLimit.js new file mode 100644 index 0000000..9fb0ba3 --- /dev/null +++ b/node_modules/async/timesLimit.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timesLimit; + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _range = require('./internal/range.js'); + +var _range2 = _interopRequireDefault(_range); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesLimit(count, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)((0, _range2.default)(count), limit, _iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timesSeries.js b/node_modules/async/timesSeries.js new file mode 100644 index 0000000..a10f0cb --- /dev/null +++ b/node_modules/async/timesSeries.js @@ -0,0 +1,32 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timesSeries; + +var _timesLimit = require('./timesLimit.js'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesSeries(n, iteratee, callback) { + return (0, _timesLimit2.default)(n, 1, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/transform.js b/node_modules/async/transform.js new file mode 100644 index 0000000..75b754e --- /dev/null +++ b/node_modules/async/transform.js @@ -0,0 +1,173 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = transform; + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function transform(coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = (0, _once2.default)(callback || (0, _promiseCallback.promiseCallback)()); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + + (0, _eachOf2.default)(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[_promiseCallback.PROMISE_SYMBOL]; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/tryEach.js b/node_modules/async/tryEach.js new file mode 100644 index 0000000..82fe8ec --- /dev/null +++ b/node_modules/async/tryEach.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachSeries = require('./eachSeries.js'); + +var _eachSeries2 = _interopRequireDefault(_eachSeries); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ +function tryEach(tasks, callback) { + var error = null; + var result; + return (0, _eachSeries2.default)(tasks, (task, taskCb) => { + (0, _wrapAsync2.default)(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); +} + +exports.default = (0, _awaitify2.default)(tryEach); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/unmemoize.js b/node_modules/async/unmemoize.js new file mode 100644 index 0000000..47a92b4 --- /dev/null +++ b/node_modules/async/unmemoize.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = unmemoize; +/** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ +function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/until.js b/node_modules/async/until.js new file mode 100644 index 0000000..3c71e51 --- /dev/null +++ b/node_modules/async/until.js @@ -0,0 +1,61 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = until; + +var _whilst = require('./whilst.js'); + +var _whilst2 = _interopRequireDefault(_whilst); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ +function until(test, iteratee, callback) { + const _test = (0, _wrapAsync2.default)(test); + return (0, _whilst2.default)(cb => _test((err, truth) => cb(err, !truth)), iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/waterfall.js b/node_modules/async/waterfall.js new file mode 100644 index 0000000..fcd0dc1 --- /dev/null +++ b/node_modules/async/waterfall.js @@ -0,0 +1,105 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ +function waterfall(tasks, callback) { + callback = (0, _once2.default)(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = (0, _wrapAsync2.default)(tasks[taskIndex++]); + task(...args, (0, _onlyOnce2.default)(next)); + } + + function next(err, ...args) { + if (err === false) return; + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); +} + +exports.default = (0, _awaitify2.default)(waterfall); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/whilst.js b/node_modules/async/whilst.js new file mode 100644 index 0000000..32a4776 --- /dev/null +++ b/node_modules/async/whilst.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +exports.default = (0, _awaitify2.default)(whilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/wrapSync.js b/node_modules/async/wrapSync.js new file mode 100644 index 0000000..3c3bf88 --- /dev/null +++ b/node_modules/async/wrapSync.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncify; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if ((0, _wrapAsync.isAsync)(func)) { + return function (...args /*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback); + }; + } + + return (0, _initialParams2.default)(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback); + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + (0, _setImmediate2.default)(e => { + throw e; + }, err); + } +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/body-parser/HISTORY.md b/node_modules/body-parser/HISTORY.md new file mode 100644 index 0000000..b892491 --- /dev/null +++ b/node_modules/body-parser/HISTORY.md @@ -0,0 +1,665 @@ +1.20.2 / 2023-02-21 +=================== + + * Fix strict json error message on Node.js 19+ + * deps: content-type@~1.0.5 + - perf: skip value escaping when unnecessary + * deps: raw-body@2.5.2 + +1.20.1 / 2022-10-06 +=================== + + * deps: qs@6.11.0 + * perf: remove unnecessary object clone + +1.20.0 / 2022-04-02 +=================== + + * Fix error message for json parse whitespace in `strict` + * Fix internal error when inflated body exceeds limit + * Prevent loss of async hooks context + * Prevent hanging when request already read + * deps: depd@2.0.0 + - Replace internal `eval` usage with `Function` constructor + - Use instance methods on `process` to check for listeners + * deps: http-errors@2.0.0 + - deps: depd@2.0.0 + - deps: statuses@2.0.1 + * deps: on-finished@2.4.1 + * deps: qs@6.10.3 + * deps: raw-body@2.5.1 + - deps: http-errors@2.0.0 + +1.19.2 / 2022-02-15 +=================== + + * deps: bytes@3.1.2 + * deps: qs@6.9.7 + * Fix handling of `__proto__` keys + * deps: raw-body@2.4.3 + - deps: bytes@3.1.2 + +1.19.1 / 2021-12-10 +=================== + + * deps: bytes@3.1.1 + * deps: http-errors@1.8.1 + - deps: inherits@2.0.4 + - deps: toidentifier@1.0.1 + - deps: setprototypeof@1.2.0 + * deps: qs@6.9.6 + * deps: raw-body@2.4.2 + - deps: bytes@3.1.1 + - deps: http-errors@1.8.1 + * deps: safe-buffer@5.2.1 + * deps: type-is@~1.6.18 + +1.19.0 / 2019-04-25 +=================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + * deps: qs@6.7.0 + - Fix parsing array brackets after index + * deps: raw-body@2.4.0 + - deps: bytes@3.1.0 + - deps: http-errors@1.7.2 + - deps: iconv-lite@0.4.24 + * deps: type-is@~1.6.17 + - deps: mime-types@~2.1.24 + - perf: prevent internal `throw` on invalid type + +1.18.3 / 2018-05-14 +=================== + + * Fix stack trace for strict json parse error + * deps: depd@~1.1.2 + - perf: remove argument reassignment + * deps: http-errors@~1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + * deps: qs@6.5.2 + * deps: raw-body@2.3.3 + - deps: http-errors@1.6.3 + - deps: iconv-lite@0.4.23 + * deps: type-is@~1.6.16 + - deps: mime-types@~2.1.18 + +1.18.2 / 2017-09-22 +=================== + + * deps: debug@2.6.9 + * perf: remove argument reassignment + +1.18.1 / 2017-09-12 +=================== + + * deps: content-type@~1.0.4 + - perf: remove argument reassignment + - perf: skip parameter parsing when no parameters + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + * deps: qs@6.5.1 + - Fix parsing & compacting very deep objects + * deps: raw-body@2.3.2 + - deps: iconv-lite@0.4.19 + +1.18.0 / 2017-09-08 +=================== + + * Fix JSON strict violation error to match native parse error + * Include the `body` property on verify errors + * Include the `type` property on all generated errors + * Use `http-errors` to set status code on errors + * deps: bytes@3.0.0 + * deps: debug@2.6.8 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + * deps: http-errors@~1.6.2 + - deps: depd@1.1.1 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + * deps: qs@6.5.0 + * deps: raw-body@2.3.1 + - Use `http-errors` for standard emitted errors + - deps: bytes@3.0.0 + - deps: iconv-lite@0.4.18 + - perf: skip buffer decoding on overage chunk + * perf: prevent internal `throw` when missing charset + +1.17.2 / 2017-05-17 +=================== + + * deps: debug@2.6.7 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + * deps: type-is@~1.6.15 + - deps: mime-types@~2.1.15 + +1.17.1 / 2017-03-06 +=================== + + * deps: qs@6.4.0 + - Fix regression parsing keys starting with `[` + +1.17.0 / 2017-03-01 +=================== + + * deps: http-errors@~1.6.1 + - Make `message` property enumerable for `HttpError`s + - deps: setprototypeof@1.0.3 + * deps: qs@6.3.1 + - Fix compacting nested arrays + +1.16.1 / 2017-02-10 +=================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.16.0 / 2017-01-17 +=================== + + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * deps: http-errors@~1.5.1 + - deps: inherits@2.0.3 + - deps: setprototypeof@1.0.2 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + * deps: qs@6.2.1 + - Fix array parsing from skipping empty values + * deps: raw-body@~2.2.0 + - deps: iconv-lite@0.4.15 + * deps: type-is@~1.6.14 + - deps: mime-types@~2.1.13 + +1.15.2 / 2016-06-19 +=================== + + * deps: bytes@2.4.0 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: http-errors@~1.5.0 + - Use `setprototypeof` module to replace `__proto__` setting + - deps: statuses@'>= 1.3.0 < 2' + - perf: enable strict mode + * deps: qs@6.2.0 + * deps: raw-body@~2.1.7 + - deps: bytes@2.4.0 + - perf: remove double-cleanup on happy path + * deps: type-is@~1.6.13 + - deps: mime-types@~2.1.11 + +1.15.1 / 2016-05-05 +=================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + * deps: raw-body@~2.1.6 + - deps: bytes@2.3.0 + * deps: type-is@~1.6.12 + - deps: mime-types@~2.1.10 + +1.15.0 / 2016-02-10 +=================== + + * deps: http-errors@~1.4.0 + - Add `HttpError` export, for `err instanceof createError.HttpError` + - deps: inherits@2.0.1 + - deps: statuses@'>= 1.2.1 < 2' + * deps: qs@6.1.0 + * deps: type-is@~1.6.11 + - deps: mime-types@~2.1.9 + +1.14.2 / 2015-12-16 +=================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + * deps: qs@5.2.0 + * deps: raw-body@~2.1.5 + - deps: bytes@2.2.0 + - deps: iconv-lite@0.4.13 + * deps: type-is@~1.6.10 + - deps: mime-types@~2.1.8 + +1.14.1 / 2015-09-27 +=================== + + * Fix issue where invalid charset results in 400 when `verify` used + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + * deps: raw-body@~2.1.4 + - Fix masking critical errors from `iconv-lite` + - deps: iconv-lite@0.4.12 + * deps: type-is@~1.6.9 + - deps: mime-types@~2.1.7 + +1.14.0 / 2015-09-16 +=================== + + * Fix JSON strict parse error to match syntax errors + * Provide static `require` analysis in `urlencoded` parser + * deps: depd@~1.1.0 + - Support web browser loading + * deps: qs@5.1.0 + * deps: raw-body@~2.1.3 + - Fix sync callback when attaching data listener causes sync read + * deps: type-is@~1.6.8 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.6 + +1.13.3 / 2015-07-31 +=================== + + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +1.13.2 / 2015-07-05 +=================== + + * deps: iconv-lite@0.4.11 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix user-visible incompatibilities from 3.1.0 + - Fix various parsing edge cases + * deps: raw-body@~2.1.2 + - Fix error stack traces to skip `makeError` + - deps: iconv-lite@0.4.11 + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +1.13.1 / 2015-06-16 +=================== + + * deps: qs@2.4.2 + - Downgraded from 3.1.0 because of user-visible incompatibilities + +1.13.0 / 2015-06-14 +=================== + + * Add `statusCode` property on `Error`s, in addition to `status` + * Change `type` default to `application/json` for JSON parser + * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser + * Provide static `require` analysis + * Use the `http-errors` module to generate errors + * deps: bytes@2.1.0 + - Slight optimizations + * deps: iconv-lite@0.4.10 + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + - Leading BOM is now removed when decoding + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: qs@3.1.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + - Parsed object now has `null` prototype + * deps: raw-body@~2.1.1 + - Use `unpipe` module for unpiping requests + - deps: iconv-lite@0.4.10 + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: remove argument reassignment + * perf: remove delete call + +1.12.4 / 2015-05-10 +=================== + + * deps: debug@~2.2.0 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: on-finished@~2.2.1 + * deps: raw-body@~2.0.1 + - Fix a false-positive when unpiping in Node.js 0.8 + - deps: bytes@2.0.1 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +1.12.3 / 2015-04-15 +=================== + + * Slight efficiency improvement when not debugging + * deps: depd@~1.0.1 + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + * deps: raw-body@1.3.4 + - Fix hanging callback if request aborts during read + - deps: iconv-lite@0.4.8 + +1.12.2 / 2015-03-16 +=================== + + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + +1.12.1 / 2015-03-15 +=================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +1.12.0 / 2015-02-13 +=================== + + * add `debug` messages + * accept a function for the `type` option + * use `content-type` to parse `Content-Type` headers + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + * deps: raw-body@1.3.3 + - deps: iconv-lite@0.4.7 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +1.11.0 / 2015-01-30 +=================== + + * make internal `extended: true` depth limit infinity + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +1.10.2 / 2015-01-20 +=================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + * deps: raw-body@1.3.2 + - deps: iconv-lite@0.4.6 + +1.10.1 / 2015-01-01 +=================== + + * deps: on-finished@~2.2.0 + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +1.10.0 / 2014-12-02 +=================== + + * make internal `extended: true` array limit dynamic + +1.9.3 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + * deps: raw-body@1.3.1 + - deps: iconv-lite@0.4.5 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +1.9.2 / 2014-10-27 +================== + + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +1.9.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +1.9.0 / 2014-09-24 +================== + + * include the charset in "unsupported charset" error message + * include the encoding in "unsupported content encoding" error message + * deps: depd@~1.0.0 + +1.8.4 / 2014-09-23 +================== + + * fix content encoding to be case-insensitive + +1.8.3 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +1.8.2 / 2014-09-15 +================== + + * deps: depd@0.4.5 + +1.8.1 / 2014-09-07 +================== + + * deps: media-typer@0.3.0 + * deps: type-is@~1.5.1 + +1.8.0 / 2014-09-05 +================== + + * make empty-body-handling consistent between chunked requests + - empty `json` produces `{}` + - empty `raw` produces `new Buffer(0)` + - empty `text` produces `''` + - empty `urlencoded` produces `{}` + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: type-is@~1.5.0 + - fix `hasbody` to be true for `content-length: 0` + +1.7.0 / 2014-09-01 +================== + + * add `parameterLimit` option to `urlencoded` parser + * change `urlencoded` extended array limit to 100 + * respond with 413 when over `parameterLimit` in `urlencoded` + +1.6.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +1.6.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +1.6.5 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.6.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + +1.6.3 / 2014-08-10 +================== + + * deps: qs@1.2.1 + +1.6.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +1.6.1 / 2014-08-06 +================== + + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +1.6.0 / 2014-08-05 +================== + + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + +1.5.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.5.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.5.0 / 2014-07-20 +================== + + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + * deps: raw-body@1.3.0 + - deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + - Fix `Cannot switch to old mode now` error on Node.js 0.10+ + * deps: type-is@~1.3.2 + +1.4.3 / 2014-06-19 +================== + + * deps: type-is@1.3.1 + - fix global variable leak + +1.4.2 / 2014-06-19 +================== + + * deps: type-is@1.3.0 + - improve type parsing + +1.4.1 / 2014-06-19 +================== + + * fix urlencoded extended deprecation message + +1.4.0 / 2014-06-19 +================== + + * add `text` parser + * add `raw` parser + * check accepted charset in content-type (accepts utf-8) + * check accepted encoding in content-encoding (accepts identity) + * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed + * deprecate `urlencoded()` without provided `extended` option + * lazy-load urlencoded parsers + * parsers split into files for reduced mem usage + * support gzip and deflate bodies + - set `inflate: false` to turn off + * deps: raw-body@1.2.2 + - Support all encodings from `iconv-lite` + +1.3.1 / 2014-06-11 +================== + + * deps: type-is@1.2.1 + - Switch dependency from mime to mime-types@1.0.0 + +1.3.0 / 2014-05-31 +================== + + * add `extended` option to urlencoded parser + +1.2.2 / 2014-05-27 +================== + + * deps: raw-body@1.1.6 + - assert stream encoding on node.js 0.8 + - assert stream encoding on node.js < 0.10.6 + - deps: bytes@1 + +1.2.1 / 2014-05-26 +================== + + * invoke `next(err)` after request fully read + - prevents hung responses and socket hang ups + +1.2.0 / 2014-05-11 +================== + + * add `verify` option + * deps: type-is@1.2.0 + - support suffix matching + +1.1.2 / 2014-05-11 +================== + + * improve json parser speed + +1.1.1 / 2014-05-11 +================== + + * fix repeated limit parsing with every request + +1.1.0 / 2014-05-10 +================== + + * add `type` option + * deps: pin for safety and consistency + +1.0.2 / 2014-04-14 +================== + + * use `type-is` module + +1.0.1 / 2014-03-20 +================== + + * lower default limits to 100kb diff --git a/node_modules/body-parser/LICENSE b/node_modules/body-parser/LICENSE new file mode 100644 index 0000000..386b7b6 --- /dev/null +++ b/node_modules/body-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/body-parser/README.md b/node_modules/body-parser/README.md new file mode 100644 index 0000000..38553bf --- /dev/null +++ b/node_modules/body-parser/README.md @@ -0,0 +1,465 @@ +# body-parser + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Node.js body parsing middleware. + +Parse incoming request bodies in a middleware before your handlers, available +under the `req.body` property. + +**Note** As `req.body`'s shape is based on user-controlled input, all +properties and values in this object are untrusted and should be validated +before trusting. For example, `req.body.foo.toString()` may fail in multiple +ways, for example the `foo` property may not be there or may not be a string, +and `toString` may not be a function and instead a string or other user input. + +[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). + +_This does not handle multipart bodies_, due to their complex and typically +large nature. For multipart bodies, you may be interested in the following +modules: + + * [busboy](https://www.npmjs.org/package/busboy#readme) and + [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) + * [multiparty](https://www.npmjs.org/package/multiparty#readme) and + [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) + * [formidable](https://www.npmjs.org/package/formidable#readme) + * [multer](https://www.npmjs.org/package/multer#readme) + +This module provides the following parsers: + + * [JSON body parser](#bodyparserjsonoptions) + * [Raw body parser](#bodyparserrawoptions) + * [Text body parser](#bodyparsertextoptions) + * [URL-encoded form body parser](#bodyparserurlencodedoptions) + +Other body parsers you might be interested in: + +- [body](https://www.npmjs.org/package/body#readme) +- [co-body](https://www.npmjs.org/package/co-body#readme) + +## Installation + +```sh +$ npm install body-parser +``` + +## API + +```js +var bodyParser = require('body-parser') +``` + +The `bodyParser` object exposes various factories to create middlewares. All +middlewares will populate the `req.body` property with the parsed body when +the `Content-Type` request header matches the `type` option, or an empty +object (`{}`) if there was no body to parse, the `Content-Type` was not matched, +or an error occurred. + +The various errors returned by this module are described in the +[errors section](#errors). + +### bodyParser.json([options]) + +Returns middleware that only parses `json` and only looks at requests where +the `Content-Type` header matches the `type` option. This parser accepts any +Unicode encoding of the body and supports automatic inflation of `gzip` and +`deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). + +#### Options + +The `json` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### reviver + +The `reviver` option is passed directly to `JSON.parse` as the second +argument. You can find more information on this argument +[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). + +##### strict + +When set to `true`, will only accept arrays and objects; when `false` will +accept anything `JSON.parse` accepts. Defaults to `true`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not a +function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `json`), a mime type (like `application/json`), or +a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a truthy +value. Defaults to `application/json`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.raw([options]) + +Returns middleware that parses all bodies as a `Buffer` and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a `Buffer` object +of the body. + +#### Options + +The `raw` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. +If not a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this +can be an extension name (like `bin`), a mime type (like +`application/octet-stream`), or a mime type with a wildcard (like `*/*` or +`application/*`). If a function, the `type` option is called as `fn(req)` +and the request is parsed if it returns a truthy value. Defaults to +`application/octet-stream`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.text([options]) + +Returns middleware that parses all bodies as a string and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` string containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a string of the +body. + +#### Options + +The `text` function takes an optional `options` object that may contain any of +the following keys: + +##### defaultCharset + +Specify the default character set for the text content if the charset is not +specified in the `Content-Type` header of the request. Defaults to `utf-8`. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `txt`), a mime type (like `text/plain`), or a mime +type with a wildcard (like `*/*` or `text/*`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a +truthy value. Defaults to `text/plain`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.urlencoded([options]) + +Returns middleware that only parses `urlencoded` bodies and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser accepts only UTF-8 encoding of the body and supports automatic +inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This object will contain +key-value pairs, where the value can be a string or array (when `extended` is +`false`), or any type (when `extended` is `true`). + +#### Options + +The `urlencoded` function takes an optional `options` object that may contain +any of the following keys: + +##### extended + +The `extended` option allows to choose between parsing the URL-encoded data +with the `querystring` library (when `false`) or the `qs` library (when +`true`). The "extended" syntax allows for rich objects and arrays to be +encoded into the URL-encoded format, allowing for a JSON-like experience +with URL-encoded. For more information, please +[see the qs library](https://www.npmjs.org/package/qs#readme). + +Defaults to `true`, but using the default has been deprecated. Please +research into the difference between `qs` and `querystring` and choose the +appropriate setting. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### parameterLimit + +The `parameterLimit` option controls the maximum number of parameters that +are allowed in the URL-encoded data. If a request contains more parameters +than this value, a 413 will be returned to the client. Defaults to `1000`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `urlencoded`), a mime type (like +`application/x-www-form-urlencoded`), or a mime type with a wildcard (like +`*/x-www-form-urlencoded`). If a function, the `type` option is called as +`fn(req)` and the request is parsed if it returns a truthy value. Defaults +to `application/x-www-form-urlencoded`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +## Errors + +The middlewares provided by this module create errors using the +[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors +will typically have a `status`/`statusCode` property that contains the suggested +HTTP response code, an `expose` property to determine if the `message` property +should be displayed to the client, a `type` property to determine the type of +error without matching against the `message`, and a `body` property containing +the read body, if available. + +The following are the common errors created, though any error can come through +for various reasons. + +### content encoding unsupported + +This error will occur when the request had a `Content-Encoding` header that +contained an encoding but the "inflation" option was set to `false`. The +`status` property is set to `415`, the `type` property is set to +`'encoding.unsupported'`, and the `charset` property will be set to the +encoding that is unsupported. + +### entity parse failed + +This error will occur when the request contained an entity that could not be +parsed by the middleware. The `status` property is set to `400`, the `type` +property is set to `'entity.parse.failed'`, and the `body` property is set to +the entity value that failed parsing. + +### entity verify failed + +This error will occur when the request contained an entity that could not be +failed verification by the defined `verify` option. The `status` property is +set to `403`, the `type` property is set to `'entity.verify.failed'`, and the +`body` property is set to the entity value that failed verification. + +### request aborted + +This error will occur when the request is aborted by the client before reading +the body has finished. The `received` property will be set to the number of +bytes received before the request was aborted and the `expected` property is +set to the number of expected bytes. The `status` property is set to `400` +and `type` property is set to `'request.aborted'`. + +### request entity too large + +This error will occur when the request body's size is larger than the "limit" +option. The `limit` property will be set to the byte limit and the `length` +property will be set to the request body's length. The `status` property is +set to `413` and the `type` property is set to `'entity.too.large'`. + +### request size did not match content length + +This error will occur when the request's length did not match the length from +the `Content-Length` header. This typically occurs when the request is malformed, +typically when the `Content-Length` header was calculated based on characters +instead of bytes. The `status` property is set to `400` and the `type` property +is set to `'request.size.invalid'`. + +### stream encoding should not be set + +This error will occur when something called the `req.setEncoding` method prior +to this middleware. This module operates directly on bytes only and you cannot +call `req.setEncoding` when using this module. The `status` property is set to +`500` and the `type` property is set to `'stream.encoding.set'`. + +### stream is not readable + +This error will occur when the request is no longer readable when this middleware +attempts to read it. This typically means something other than a middleware from +this module read the request body already and the middleware was also configured to +read the same request. The `status` property is set to `500` and the `type` +property is set to `'stream.not.readable'`. + +### too many parameters + +This error will occur when the content of the request exceeds the configured +`parameterLimit` for the `urlencoded` parser. The `status` property is set to +`413` and the `type` property is set to `'parameters.too.many'`. + +### unsupported charset "BOGUS" + +This error will occur when the request had a charset parameter in the +`Content-Type` header, but the `iconv-lite` module does not support it OR the +parser does not support it. The charset is contained in the message as well +as in the `charset` property. The `status` property is set to `415`, the +`type` property is set to `'charset.unsupported'`, and the `charset` property +is set to the charset that is unsupported. + +### unsupported content encoding "bogus" + +This error will occur when the request had a `Content-Encoding` header that +contained an unsupported encoding. The encoding is contained in the message +as well as in the `encoding` property. The `status` property is set to `415`, +the `type` property is set to `'encoding.unsupported'`, and the `encoding` +property is set to the encoding that is unsupported. + +## Examples + +### Express/Connect top-level generic + +This example demonstrates adding a generic JSON and URL-encoded parser as a +top-level middleware, which will parse the bodies of all incoming requests. +This is the simplest setup. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse application/x-www-form-urlencoded +app.use(bodyParser.urlencoded({ extended: false })) + +// parse application/json +app.use(bodyParser.json()) + +app.use(function (req, res) { + res.setHeader('Content-Type', 'text/plain') + res.write('you posted:\n') + res.end(JSON.stringify(req.body, null, 2)) +}) +``` + +### Express route-specific + +This example demonstrates adding body parsers specifically to the routes that +need them. In general, this is the most recommended way to use body-parser with +Express. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// create application/json parser +var jsonParser = bodyParser.json() + +// create application/x-www-form-urlencoded parser +var urlencodedParser = bodyParser.urlencoded({ extended: false }) + +// POST /login gets urlencoded bodies +app.post('/login', urlencodedParser, function (req, res) { + res.send('welcome, ' + req.body.username) +}) + +// POST /api/users gets JSON bodies +app.post('/api/users', jsonParser, function (req, res) { + // create user in req.body +}) +``` + +### Change accepted type for parsers + +All the parsers accept a `type` option which allows you to change the +`Content-Type` that the middleware will parse. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse various different custom JSON types as JSON +app.use(bodyParser.json({ type: 'application/*+json' })) + +// parse some custom thing into a Buffer +app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) + +// parse an HTML body into a string +app.use(bodyParser.text({ type: 'text/html' })) +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci +[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master +[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master +[node-version-image]: https://badgen.net/npm/node/body-parser +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/body-parser +[npm-url]: https://npmjs.org/package/body-parser +[npm-version-image]: https://badgen.net/npm/v/body-parser diff --git a/node_modules/body-parser/SECURITY.md b/node_modules/body-parser/SECURITY.md new file mode 100644 index 0000000..9694d42 --- /dev/null +++ b/node_modules/body-parser/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The Express team and community take all security bugs seriously. Thank you +for improving the security of Express. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `body-parser`. This +information can be found in the npm registry using the command +`npm owner ls body-parser`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/expressjs/body-parser/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/node_modules/body-parser/index.js b/node_modules/body-parser/index.js new file mode 100644 index 0000000..bb24d73 --- /dev/null +++ b/node_modules/body-parser/index.js @@ -0,0 +1,156 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var deprecate = require('depd')('body-parser') + +/** + * Cache of loaded parsers. + * @private + */ + +var parsers = Object.create(null) + +/** + * @typedef Parsers + * @type {function} + * @property {function} json + * @property {function} raw + * @property {function} text + * @property {function} urlencoded + */ + +/** + * Module exports. + * @type {Parsers} + */ + +exports = module.exports = deprecate.function(bodyParser, + 'bodyParser: use individual json/urlencoded middlewares') + +/** + * JSON parser. + * @public + */ + +Object.defineProperty(exports, 'json', { + configurable: true, + enumerable: true, + get: createParserGetter('json') +}) + +/** + * Raw parser. + * @public + */ + +Object.defineProperty(exports, 'raw', { + configurable: true, + enumerable: true, + get: createParserGetter('raw') +}) + +/** + * Text parser. + * @public + */ + +Object.defineProperty(exports, 'text', { + configurable: true, + enumerable: true, + get: createParserGetter('text') +}) + +/** + * URL-encoded parser. + * @public + */ + +Object.defineProperty(exports, 'urlencoded', { + configurable: true, + enumerable: true, + get: createParserGetter('urlencoded') +}) + +/** + * Create a middleware to parse json and urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @deprecated + * @public + */ + +function bodyParser (options) { + // use default type for parsers + var opts = Object.create(options || null, { + type: { + configurable: true, + enumerable: true, + value: undefined, + writable: true + } + }) + + var _urlencoded = exports.urlencoded(opts) + var _json = exports.json(opts) + + return function bodyParser (req, res, next) { + _json(req, res, function (err) { + if (err) return next(err) + _urlencoded(req, res, next) + }) + } +} + +/** + * Create a getter for loading a parser. + * @private + */ + +function createParserGetter (name) { + return function get () { + return loadParser(name) + } +} + +/** + * Load a parser module. + * @private + */ + +function loadParser (parserName) { + var parser = parsers[parserName] + + if (parser !== undefined) { + return parser + } + + // this uses a switch for static require analysis + switch (parserName) { + case 'json': + parser = require('./lib/types/json') + break + case 'raw': + parser = require('./lib/types/raw') + break + case 'text': + parser = require('./lib/types/text') + break + case 'urlencoded': + parser = require('./lib/types/urlencoded') + break + } + + // store to prevent invoking require() + return (parsers[parserName] = parser) +} diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js new file mode 100644 index 0000000..fce6283 --- /dev/null +++ b/node_modules/body-parser/lib/read.js @@ -0,0 +1,205 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var createError = require('http-errors') +var destroy = require('destroy') +var getBody = require('raw-body') +var iconv = require('iconv-lite') +var onFinished = require('on-finished') +var unpipe = require('unpipe') +var zlib = require('zlib') + +/** + * Module exports. + */ + +module.exports = read + +/** + * Read a request into a buffer and parse. + * + * @param {object} req + * @param {object} res + * @param {function} next + * @param {function} parse + * @param {function} debug + * @param {object} options + * @private + */ + +function read (req, res, next, parse, debug, options) { + var length + var opts = options + var stream + + // flag as parsed + req._body = true + + // read options + var encoding = opts.encoding !== null + ? opts.encoding + : null + var verify = opts.verify + + try { + // get the content stream + stream = contentstream(req, debug, opts.inflate) + length = stream.length + stream.length = undefined + } catch (err) { + return next(err) + } + + // set raw-body options + opts.length = length + opts.encoding = verify + ? null + : encoding + + // assert charset is supported + if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { + return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + })) + } + + // read body + debug('read body') + getBody(stream, opts, function (error, body) { + if (error) { + var _error + + if (error.type === 'encoding.unsupported') { + // echo back charset + _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + }) + } else { + // set status code on error + _error = createError(400, error) + } + + // unpipe from stream and destroy + if (stream !== req) { + unpipe(req) + destroy(stream, true) + } + + // read off entire request + dump(req, function onfinished () { + next(createError(400, _error)) + }) + return + } + + // verify + if (verify) { + try { + debug('verify body') + verify(req, res, body, encoding) + } catch (err) { + next(createError(403, err, { + body: body, + type: err.type || 'entity.verify.failed' + })) + return + } + } + + // parse + var str = body + try { + debug('parse body') + str = typeof body !== 'string' && encoding !== null + ? iconv.decode(body, encoding) + : body + req.body = parse(str) + } catch (err) { + next(createError(400, err, { + body: str, + type: err.type || 'entity.parse.failed' + })) + return + } + + next() + }) +} + +/** + * Get the content stream of the request. + * + * @param {object} req + * @param {function} debug + * @param {boolean} [inflate=true] + * @return {object} + * @api private + */ + +function contentstream (req, debug, inflate) { + var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() + var length = req.headers['content-length'] + var stream + + debug('content-encoding "%s"', encoding) + + if (inflate === false && encoding !== 'identity') { + throw createError(415, 'content encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + switch (encoding) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') + req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') + req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length + break + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + return stream +} + +/** + * Dump the contents of a request. + * + * @param {object} req + * @param {function} callback + * @api private + */ + +function dump (req, callback) { + if (onFinished.isFinished(req)) { + callback(null) + } else { + onFinished(req, callback) + req.resume() + } +} diff --git a/node_modules/body-parser/lib/types/json.js b/node_modules/body-parser/lib/types/json.js new file mode 100644 index 0000000..59f3f7e --- /dev/null +++ b/node_modules/body-parser/lib/types/json.js @@ -0,0 +1,247 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:json') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = json + +/** + * RegExp to match the first non-space in a string. + * + * Allowed whitespace is defined in RFC 7159: + * + * ws = *( + * %x20 / ; Space + * %x09 / ; Horizontal tab + * %x0A / ; Line feed or New line + * %x0D ) ; Carriage return + */ + +var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex + +var JSON_SYNTAX_CHAR = '#' +var JSON_SYNTAX_REGEXP = /#+/g + +/** + * Create a middleware to parse JSON bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function json (options) { + var opts = options || {} + + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var inflate = opts.inflate !== false + var reviver = opts.reviver + var strict = opts.strict !== false + var type = opts.type || 'application/json' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + if (body.length === 0) { + // special-case empty json body, as it's a common client-side mistake + // TODO: maybe make this configurable or part of "strict" option + return {} + } + + if (strict) { + var first = firstchar(body) + + if (first !== '{' && first !== '[') { + debug('strict violation') + throw createStrictSyntaxError(body, first) + } + } + + try { + debug('parse json') + return JSON.parse(body, reviver) + } catch (e) { + throw normalizeJsonSyntaxError(e, { + message: e.message, + stack: e.stack + }) + } + } + + return function jsonParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset per RFC 7159 sec 8.1 + var charset = getCharset(req) || 'utf-8' + if (charset.slice(0, 4) !== 'utf-') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Create strict violation syntax error matching native error. + * + * @param {string} str + * @param {string} char + * @return {Error} + * @private + */ + +function createStrictSyntaxError (str, char) { + var index = str.indexOf(char) + var partial = '' + + if (index !== -1) { + partial = str.substring(0, index) + JSON_SYNTAX_CHAR + + for (var i = index + 1; i < str.length; i++) { + partial += JSON_SYNTAX_CHAR + } + } + + try { + JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') + } catch (e) { + return normalizeJsonSyntaxError(e, { + message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) { + return str.substring(index, index + placeholder.length) + }), + stack: e.stack + }) + } +} + +/** + * Get the first non-whitespace character in a string. + * + * @param {string} str + * @return {function} + * @private + */ + +function firstchar (str) { + var match = FIRST_CHAR_REGEXP.exec(str) + + return match + ? match[1] + : undefined +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Normalize a SyntaxError for JSON.parse. + * + * @param {SyntaxError} error + * @param {object} obj + * @return {SyntaxError} + */ + +function normalizeJsonSyntaxError (error, obj) { + var keys = Object.getOwnPropertyNames(error) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + if (key !== 'stack' && key !== 'message') { + delete error[key] + } + } + + // replace stack before message for Node.js 0.10 and below + error.stack = obj.stack.replace(error.message, obj.message) + error.message = obj.message + + return error +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/raw.js b/node_modules/body-parser/lib/types/raw.js new file mode 100644 index 0000000..f5d1b67 --- /dev/null +++ b/node_modules/body-parser/lib/types/raw.js @@ -0,0 +1,101 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var debug = require('debug')('body-parser:raw') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = raw + +/** + * Create a middleware to parse raw bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function raw (options) { + var opts = options || {} + + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/octet-stream' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function rawParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // read + read(req, res, next, parse, debug, { + encoding: null, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/text.js b/node_modules/body-parser/lib/types/text.js new file mode 100644 index 0000000..083a009 --- /dev/null +++ b/node_modules/body-parser/lib/types/text.js @@ -0,0 +1,121 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var debug = require('debug')('body-parser:text') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = text + +/** + * Create a middleware to parse text bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function text (options) { + var opts = options || {} + + var defaultCharset = opts.defaultCharset || 'utf-8' + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'text/plain' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function textParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // get charset + var charset = getCharset(req) || defaultCharset + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/urlencoded.js b/node_modules/body-parser/lib/types/urlencoded.js new file mode 100644 index 0000000..b2ca8f1 --- /dev/null +++ b/node_modules/body-parser/lib/types/urlencoded.js @@ -0,0 +1,284 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:urlencoded') +var deprecate = require('depd')('body-parser') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = urlencoded + +/** + * Cache of parser modules. + */ + +var parsers = Object.create(null) + +/** + * Create a middleware to parse urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function urlencoded (options) { + var opts = options || {} + + // notice because option default will flip in next major + if (opts.extended === undefined) { + deprecate('undefined extended: provide extended option') + } + + var extended = opts.extended !== false + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/x-www-form-urlencoded' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate query parser + var queryparse = extended + ? extendedparser(opts) + : simpleparser(opts) + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + return body.length + ? queryparse(body) + : {} + } + + return function urlencodedParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset + var charset = getCharset(req) || 'utf-8' + if (charset !== 'utf-8') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + debug: debug, + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the extended query parser. + * + * @param {object} options + */ + +function extendedparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('qs') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + var arrayLimit = Math.max(100, paramCount) + + debug('parse extended urlencoding') + return parse(body, { + allowPrototypes: true, + arrayLimit: arrayLimit, + depth: Infinity, + parameterLimit: parameterLimit + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Count the number of parameters, stopping once limit reached + * + * @param {string} body + * @param {number} limit + * @api private + */ + +function parameterCount (body, limit) { + var count = 0 + var index = 0 + + while ((index = body.indexOf('&', index)) !== -1) { + count++ + index++ + + if (count === limit) { + return undefined + } + } + + return count +} + +/** + * Get parser for module name dynamically. + * + * @param {string} name + * @return {function} + * @api private + */ + +function parser (name) { + var mod = parsers[name] + + if (mod !== undefined) { + return mod.parse + } + + // this uses a switch for static require analysis + switch (name) { + case 'qs': + mod = require('qs') + break + case 'querystring': + mod = require('querystring') + break + } + + // store to prevent invoking require() + parsers[name] = mod + + return mod.parse +} + +/** + * Get the simple query parser. + * + * @param {object} options + */ + +function simpleparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('querystring') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + debug('parse urlencoding') + return parse(body, undefined, undefined, { maxKeys: parameterLimit }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/package.json b/node_modules/body-parser/package.json new file mode 100644 index 0000000..4637304 --- /dev/null +++ b/node_modules/body-parser/package.json @@ -0,0 +1,56 @@ +{ + "name": "body-parser", + "description": "Node.js body parsing middleware", + "version": "1.20.2", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "expressjs/body-parser", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "devDependencies": { + "eslint": "8.34.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-markdown": "3.0.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "6.1.1", + "eslint-plugin-standard": "4.1.0", + "methods": "1.1.2", + "mocha": "10.2.0", + "nyc": "15.1.0", + "safe-buffer": "5.2.1", + "supertest": "6.3.3" + }, + "files": [ + "lib/", + "LICENSE", + "HISTORY.md", + "SECURITY.md", + "index.js" + ], + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..0478be8 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..a18faa8 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/bytes/History.md b/node_modules/bytes/History.md new file mode 100644 index 0000000..d60ce0e --- /dev/null +++ b/node_modules/bytes/History.md @@ -0,0 +1,97 @@ +3.1.2 / 2022-01-27 +================== + + * Fix return value for un-parsable strings + +3.1.1 / 2021-11-15 +================== + + * Fix "thousandsSeparator" incorrecting formatting fractional part + +3.1.0 / 2019-01-22 +================== + + * Add petabyte (`pb`) support + +3.0.0 / 2017-08-31 +================== + + * Change "kB" to "KB" in format output + * Remove support for Node.js 0.6 + * Remove support for ComponentJS + +2.5.0 / 2017-03-24 +================== + + * Add option "unit" + +2.4.0 / 2016-06-01 +================== + + * Add option "unitSeparator" + +2.3.0 / 2016-02-15 +================== + + * Drop partial bytes on all parsed units + * Fix non-finite numbers to `.format` to return `null` + * Fix parsing byte string that looks like hex + * perf: hoist regular expressions + +2.2.0 / 2015-11-13 +================== + + * add option "decimalPlaces" + * add option "fixedDecimals" + +2.1.0 / 2015-05-21 +================== + + * add `.format` export + * add `.parse` export + +2.0.2 / 2015-05-20 +================== + + * remove map recreation + * remove unnecessary object construction + +2.0.1 / 2015-05-07 +================== + + * fix browserify require + * remove node.extend dependency + +2.0.0 / 2015-04-12 +================== + + * add option "case" + * add option "thousandsSeparator" + * return "null" on invalid parse input + * support proper round-trip: bytes(bytes(num)) === num + * units no longer case sensitive when parsing + +1.0.0 / 2014-05-05 +================== + + * add negative support. fixes #6 + +0.3.0 / 2014-03-19 +================== + + * added terabyte support + +0.2.1 / 2013-04-01 +================== + + * add .component + +0.2.0 / 2012-10-28 +================== + + * bytes(200).should.eql('200b') + +0.1.0 / 2012-07-04 +================== + + * add bytes to string conversion [yields] diff --git a/node_modules/bytes/LICENSE b/node_modules/bytes/LICENSE new file mode 100644 index 0000000..63e95a9 --- /dev/null +++ b/node_modules/bytes/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bytes/Readme.md b/node_modules/bytes/Readme.md new file mode 100644 index 0000000..5790e23 --- /dev/null +++ b/node_modules/bytes/Readme.md @@ -0,0 +1,152 @@ +# Bytes utility + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install bytes +``` + +## Usage + +```js +var bytes = require('bytes'); +``` + +#### bytes(number|string value, [options]): number|string|null + +Default export function. Delegates to either `bytes.format` or `bytes.parse` based on the type of `value`. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number`|`string` | Number value to format or string value to parse | +| options | `Object` | Conversion options for `format` | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`number`|`null` | Return null upon error. Numeric value in bytes, or string value otherwise. | + +**Example** + +```js +bytes(1024); +// output: '1KB' + +bytes('1KB'); +// output: 1024 +``` + +#### bytes.format(number value, [options]): string|null + +Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is + rounded. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number` | Value in bytes | +| options | `Object` | Conversion options | + +**Options** + +| Property | Type | Description | +|-------------------|--------|-----------------------------------------------------------------------------------------| +| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | +| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | +| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `'.'`... Default value to `''`. | +| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | +| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`null` | Return null upon error. String value otherwise. | + +**Example** + +```js +bytes.format(1024); +// output: '1KB' + +bytes.format(1000); +// output: '1000B' + +bytes.format(1000, {thousandsSeparator: ' '}); +// output: '1 000B' + +bytes.format(1024 * 1.7, {decimalPlaces: 0}); +// output: '2KB' + +bytes.format(1024, {unitSeparator: ' '}); +// output: '1 KB' +``` + +#### bytes.parse(string|number value): number|null + +Parse the string value into an integer in bytes. If no unit is given, or `value` +is a number, it is assumed the value is in bytes. + +Supported units and abbreviations are as follows and are case-insensitive: + + * `b` for bytes + * `kb` for kilobytes + * `mb` for megabytes + * `gb` for gigabytes + * `tb` for terabytes + * `pb` for petabytes + +The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. + +**Arguments** + +| Name | Type | Description | +|---------------|--------|--------------------| +| value | `string`|`number` | String to parse, or number in bytes. | + +**Returns** + +| Name | Type | Description | +|---------|-------------|-------------------------| +| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | + +**Example** + +```js +bytes.parse('1KB'); +// output: 1024 + +bytes.parse('1024'); +// output: 1024 + +bytes.parse(1024); +// output: 1024 +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/visionmedia/bytes.js/master?label=ci +[ci-url]: https://github.com/visionmedia/bytes.js/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master +[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master +[downloads-image]: https://badgen.net/npm/dm/bytes +[downloads-url]: https://npmjs.org/package/bytes +[npm-image]: https://badgen.net/npm/v/bytes +[npm-url]: https://npmjs.org/package/bytes diff --git a/node_modules/bytes/index.js b/node_modules/bytes/index.js new file mode 100644 index 0000000..6f2d0f8 --- /dev/null +++ b/node_modules/bytes/index.js @@ -0,0 +1,170 @@ +/*! + * bytes + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015 Jed Watson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = bytes; +module.exports.format = format; +module.exports.parse = parse; + +/** + * Module variables. + * @private + */ + +var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; + +var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; + +var map = { + b: 1, + kb: 1 << 10, + mb: 1 << 20, + gb: 1 << 30, + tb: Math.pow(1024, 4), + pb: Math.pow(1024, 5), +}; + +var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; + +/** + * Convert the given value in bytes into a string or parse to string to an integer in bytes. + * + * @param {string|number} value + * @param {{ + * case: [string], + * decimalPlaces: [number] + * fixedDecimals: [boolean] + * thousandsSeparator: [string] + * unitSeparator: [string] + * }} [options] bytes options. + * + * @returns {string|number|null} + */ + +function bytes(value, options) { + if (typeof value === 'string') { + return parse(value); + } + + if (typeof value === 'number') { + return format(value, options); + } + + return null; +} + +/** + * Format the given value in bytes into a string. + * + * If the value is negative, it is kept as such. If it is a float, + * it is rounded. + * + * @param {number} value + * @param {object} [options] + * @param {number} [options.decimalPlaces=2] + * @param {number} [options.fixedDecimals=false] + * @param {string} [options.thousandsSeparator=] + * @param {string} [options.unit=] + * @param {string} [options.unitSeparator=] + * + * @returns {string|null} + * @public + */ + +function format(value, options) { + if (!Number.isFinite(value)) { + return null; + } + + var mag = Math.abs(value); + var thousandsSeparator = (options && options.thousandsSeparator) || ''; + var unitSeparator = (options && options.unitSeparator) || ''; + var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; + var fixedDecimals = Boolean(options && options.fixedDecimals); + var unit = (options && options.unit) || ''; + + if (!unit || !map[unit.toLowerCase()]) { + if (mag >= map.pb) { + unit = 'PB'; + } else if (mag >= map.tb) { + unit = 'TB'; + } else if (mag >= map.gb) { + unit = 'GB'; + } else if (mag >= map.mb) { + unit = 'MB'; + } else if (mag >= map.kb) { + unit = 'KB'; + } else { + unit = 'B'; + } + } + + var val = value / map[unit.toLowerCase()]; + var str = val.toFixed(decimalPlaces); + + if (!fixedDecimals) { + str = str.replace(formatDecimalsRegExp, '$1'); + } + + if (thousandsSeparator) { + str = str.split('.').map(function (s, i) { + return i === 0 + ? s.replace(formatThousandsRegExp, thousandsSeparator) + : s + }).join('.'); + } + + return str + unitSeparator + unit; +} + +/** + * Parse the string value into an integer in bytes. + * + * If no unit is given, it is assumed the value is in bytes. + * + * @param {number|string} val + * + * @returns {number|null} + * @public + */ + +function parse(val) { + if (typeof val === 'number' && !isNaN(val)) { + return val; + } + + if (typeof val !== 'string') { + return null; + } + + // Test if the string passed is valid + var results = parseRegExp.exec(val); + var floatValue; + var unit = 'b'; + + if (!results) { + // Nothing could be extracted from the given string + floatValue = parseInt(val, 10); + unit = 'b' + } else { + // Retrieve the value and the unit + floatValue = parseFloat(results[1]); + unit = results[4].toLowerCase(); + } + + if (isNaN(floatValue)) { + return null; + } + + return Math.floor(map[unit] * floatValue); +} diff --git a/node_modules/bytes/package.json b/node_modules/bytes/package.json new file mode 100644 index 0000000..f2b6a8b --- /dev/null +++ b/node_modules/bytes/package.json @@ -0,0 +1,42 @@ +{ + "name": "bytes", + "description": "Utility to parse a string bytes to bytes and vice-versa", + "version": "3.1.2", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "contributors": [ + "Jed Watson ", + "Théo FIDRY " + ], + "license": "MIT", + "keywords": [ + "byte", + "bytes", + "utility", + "parse", + "parser", + "convert", + "converter" + ], + "repository": "visionmedia/bytes.js", + "devDependencies": { + "eslint": "7.32.0", + "eslint-plugin-markdown": "2.2.1", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "History.md", + "LICENSE", + "Readme.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --check-leaks --reporter spec", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/call-bind/.eslintignore b/node_modules/call-bind/.eslintignore new file mode 100644 index 0000000..404abb2 --- /dev/null +++ b/node_modules/call-bind/.eslintignore @@ -0,0 +1 @@ +coverage/ diff --git a/node_modules/call-bind/.eslintrc b/node_modules/call-bind/.eslintrc new file mode 100644 index 0000000..e5d3c9a --- /dev/null +++ b/node_modules/call-bind/.eslintrc @@ -0,0 +1,17 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "func-name-matching": 0, + "id-length": 0, + "new-cap": [2, { + "capIsNewExceptions": [ + "GetIntrinsic", + ], + }], + "no-magic-numbers": 0, + "operator-linebreak": [2, "before"], + }, +} diff --git a/node_modules/call-bind/.github/FUNDING.yml b/node_modules/call-bind/.github/FUNDING.yml new file mode 100644 index 0000000..c70c2ec --- /dev/null +++ b/node_modules/call-bind/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/call-bind +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/call-bind/.nycrc b/node_modules/call-bind/.nycrc new file mode 100644 index 0000000..1826526 --- /dev/null +++ b/node_modules/call-bind/.nycrc @@ -0,0 +1,13 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/call-bind/CHANGELOG.md b/node_modules/call-bind/CHANGELOG.md new file mode 100644 index 0000000..62a3727 --- /dev/null +++ b/node_modules/call-bind/CHANGELOG.md @@ -0,0 +1,42 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.2](https://github.com/ljharb/call-bind/compare/v1.0.1...v1.0.2) - 2021-01-11 + +### Commits + +- [Fix] properly include the receiver in the bound length [`dbae7bc`](https://github.com/ljharb/call-bind/commit/dbae7bc676c079a0d33c0a43e9ef92cb7b01345d) + +## [v1.0.1](https://github.com/ljharb/call-bind/compare/v1.0.0...v1.0.1) - 2021-01-08 + +### Commits + +- [Tests] migrate tests to Github Actions [`b6db284`](https://github.com/ljharb/call-bind/commit/b6db284c36f8ccd195b88a6764fe84b7223a0da1) +- [meta] do not publish github action workflow files [`ec7fe46`](https://github.com/ljharb/call-bind/commit/ec7fe46e60cfa4764ee943d2755f5e5a366e578e) +- [Fix] preserve original function’s length when possible [`adbceaa`](https://github.com/ljharb/call-bind/commit/adbceaa3cac4b41ea78bb19d7ccdbaaf7e0bdadb) +- [Tests] gather coverage data on every job [`d69e23c`](https://github.com/ljharb/call-bind/commit/d69e23cc65f101ba1d4c19bb07fa8eb0ec624be8) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`2fd3586`](https://github.com/ljharb/call-bind/commit/2fd3586c5d47b335364c14293114c6b625ae1f71) +- [Deps] update `get-intrinsic` [`f23e931`](https://github.com/ljharb/call-bind/commit/f23e9318cc271c2add8bb38cfded85ee7baf8eee) +- [Deps] update `get-intrinsic` [`72d9f44`](https://github.com/ljharb/call-bind/commit/72d9f44e184465ba8dd3fb48260bbcff234985f2) +- [meta] fix FUNDING.yml [`e723573`](https://github.com/ljharb/call-bind/commit/e723573438c5a68dcec31fb5d96ea6b7e4a93be8) +- [eslint] ignore coverage output [`15e76d2`](https://github.com/ljharb/call-bind/commit/15e76d28a5f43e504696401e5b31ebb78ee1b532) +- [meta] add Automatic Rebase and Require Allow Edits workflows [`8fa4dab`](https://github.com/ljharb/call-bind/commit/8fa4dabb23ba3dd7bb92c9571c1241c08b56e4b6) + +## v1.0.0 - 2020-10-30 + +### Commits + +- Initial commit [`306cf98`](https://github.com/ljharb/call-bind/commit/306cf98c7ec9e7ef66b653ec152277ac1381eb50) +- Tests [`e10d0bb`](https://github.com/ljharb/call-bind/commit/e10d0bbdadc7a10ecedc9a1c035112d3e368b8df) +- Implementation [`43852ed`](https://github.com/ljharb/call-bind/commit/43852eda0f187327b7fad2423ca972149a52bd65) +- npm init [`408f860`](https://github.com/ljharb/call-bind/commit/408f860b773a2f610805fd3613d0d71bac1b6249) +- [meta] add Automatic Rebase and Require Allow Edits workflows [`fb349b2`](https://github.com/ljharb/call-bind/commit/fb349b2e48defbec8b5ec8a8395cc8f69f220b13) +- [meta] add `auto-changelog` [`c4001fc`](https://github.com/ljharb/call-bind/commit/c4001fc43031799ef908211c98d3b0fb2b60fde4) +- [meta] add "funding"; create `FUNDING.yml` [`d4d6d29`](https://github.com/ljharb/call-bind/commit/d4d6d2974a14bc2e98830468eda7fe6d6a776717) +- [Tests] add `npm run lint` [`dedfb98`](https://github.com/ljharb/call-bind/commit/dedfb98bd0ecefb08ddb9a94061bd10cde4332af) +- Only apps should have lockfiles [`54ac776`](https://github.com/ljharb/call-bind/commit/54ac77653db45a7361dc153d2f478e743f110650) +- [meta] add `safe-publish-latest` [`9ea8e43`](https://github.com/ljharb/call-bind/commit/9ea8e435b950ce9b705559cd651039f9bf40140f) diff --git a/node_modules/call-bind/LICENSE b/node_modules/call-bind/LICENSE new file mode 100644 index 0000000..48f05d0 --- /dev/null +++ b/node_modules/call-bind/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/call-bind/README.md b/node_modules/call-bind/README.md new file mode 100644 index 0000000..53649eb --- /dev/null +++ b/node_modules/call-bind/README.md @@ -0,0 +1,2 @@ +# call-bind +Robustly `.call.bind()` a function. diff --git a/node_modules/call-bind/callBound.js b/node_modules/call-bind/callBound.js new file mode 100644 index 0000000..8374adf --- /dev/null +++ b/node_modules/call-bind/callBound.js @@ -0,0 +1,15 @@ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); + +var callBind = require('./'); + +var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf')); + +module.exports = function callBoundIntrinsic(name, allowMissing) { + var intrinsic = GetIntrinsic(name, !!allowMissing); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBind(intrinsic); + } + return intrinsic; +}; diff --git a/node_modules/call-bind/index.js b/node_modules/call-bind/index.js new file mode 100644 index 0000000..6fa3e4a --- /dev/null +++ b/node_modules/call-bind/index.js @@ -0,0 +1,47 @@ +'use strict'; + +var bind = require('function-bind'); +var GetIntrinsic = require('get-intrinsic'); + +var $apply = GetIntrinsic('%Function.prototype.apply%'); +var $call = GetIntrinsic('%Function.prototype.call%'); +var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply); + +var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true); +var $defineProperty = GetIntrinsic('%Object.defineProperty%', true); +var $max = GetIntrinsic('%Math.max%'); + +if ($defineProperty) { + try { + $defineProperty({}, 'a', { value: 1 }); + } catch (e) { + // IE 8 has a broken defineProperty + $defineProperty = null; + } +} + +module.exports = function callBind(originalFunction) { + var func = $reflectApply(bind, $call, arguments); + if ($gOPD && $defineProperty) { + var desc = $gOPD(func, 'length'); + if (desc.configurable) { + // original length, plus the receiver, minus any additional arguments (after the receiver) + $defineProperty( + func, + 'length', + { value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) } + ); + } + } + return func; +}; + +var applyBind = function applyBind() { + return $reflectApply(bind, $apply, arguments); +}; + +if ($defineProperty) { + $defineProperty(module.exports, 'apply', { value: applyBind }); +} else { + module.exports.apply = applyBind; +} diff --git a/node_modules/call-bind/package.json b/node_modules/call-bind/package.json new file mode 100644 index 0000000..4360556 --- /dev/null +++ b/node_modules/call-bind/package.json @@ -0,0 +1,80 @@ +{ + "name": "call-bind", + "version": "1.0.2", + "description": "Robustly `.call.bind()` a function", + "main": "index.js", + "exports": { + ".": [ + { + "default": "./index.js" + }, + "./index.js" + ], + "./callBound": [ + { + "default": "./callBound.js" + }, + "./callBound.js" + ], + "./package.json": "./package.json" + }, + "scripts": { + "prepublish": "safe-publish-latest", + "lint": "eslint --ext=.js,.mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/*'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/call-bind.git" + }, + "keywords": [ + "javascript", + "ecmascript", + "es", + "js", + "callbind", + "callbound", + "call", + "bind", + "bound", + "call-bind", + "call-bound", + "function", + "es-abstract" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/call-bind/issues" + }, + "homepage": "https://github.com/ljharb/call-bind#readme", + "devDependencies": { + "@ljharb/eslint-config": "^17.3.0", + "aud": "^1.1.3", + "auto-changelog": "^2.2.1", + "eslint": "^7.17.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^1.1.4", + "tape": "^5.1.1" + }, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + } +} diff --git a/node_modules/call-bind/test/callBound.js b/node_modules/call-bind/test/callBound.js new file mode 100644 index 0000000..209ce3c --- /dev/null +++ b/node_modules/call-bind/test/callBound.js @@ -0,0 +1,55 @@ +'use strict'; + +var test = require('tape'); + +var callBound = require('../callBound'); + +test('callBound', function (t) { + // static primitive + t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself'); + t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself'); + + // static non-function object + t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself'); + t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself'); + t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself'); + t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself'); + + // static function + t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself'); + t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself'); + + // prototype primitive + t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself'); + t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself'); + + // prototype function + t.notEqual(callBound('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString does not yield itself'); + t.notEqual(callBound('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% does not yield itself'); + t.equal(callBound('Object.prototype.toString')(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original'); + t.equal(callBound('%Object.prototype.toString%')(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original'); + + t['throws']( + function () { callBound('does not exist'); }, + SyntaxError, + 'nonexistent intrinsic throws' + ); + t['throws']( + function () { callBound('does not exist', true); }, + SyntaxError, + 'allowMissing arg still throws for unknown intrinsic' + ); + + /* globals WeakRef: false */ + t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) { + st['throws']( + function () { callBound('WeakRef'); }, + TypeError, + 'real but absent intrinsic throws' + ); + st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception'); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/call-bind/test/index.js b/node_modules/call-bind/test/index.js new file mode 100644 index 0000000..bf6769c --- /dev/null +++ b/node_modules/call-bind/test/index.js @@ -0,0 +1,66 @@ +'use strict'; + +var callBind = require('../'); +var bind = require('function-bind'); + +var test = require('tape'); + +/* + * older engines have length nonconfigurable + * in io.js v3, it is configurable except on bound functions, hence the .bind() + */ +var functionsHaveConfigurableLengths = !!( + Object.getOwnPropertyDescriptor + && Object.getOwnPropertyDescriptor(bind.call(function () {}), 'length').configurable +); + +test('callBind', function (t) { + var sentinel = { sentinel: true }; + var func = function (a, b) { + // eslint-disable-next-line no-invalid-this + return [this, a, b]; + }; + t.equal(func.length, 2, 'original function length is 2'); + t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args'); + t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args'); + t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args'); + + var bound = callBind(func); + t.equal(bound.length, func.length + 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths }); + t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with too few args'); + t.deepEqual(bound(1, 2), [1, 2, undefined], 'bound func with right args'); + t.deepEqual(bound(1, 2, 3), [1, 2, 3], 'bound func with too many args'); + + var boundR = callBind(func, sentinel); + t.equal(boundR.length, func.length, 'function length is preserved', { skip: !functionsHaveConfigurableLengths }); + t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args'); + t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args'); + t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args'); + + var boundArg = callBind(func, sentinel, 1); + t.equal(boundArg.length, func.length - 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths }); + t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args'); + t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg'); + t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args'); + + t.test('callBind.apply', function (st) { + var aBound = callBind.apply(func); + st.deepEqual(aBound(sentinel), [sentinel, undefined, undefined], 'apply-bound func with no args'); + st.deepEqual(aBound(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args'); + st.deepEqual(aBound(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args'); + + var aBoundArg = callBind.apply(func); + st.deepEqual(aBoundArg(sentinel, [1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with too many args'); + st.deepEqual(aBoundArg(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args'); + st.deepEqual(aBoundArg(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args'); + + var aBoundR = callBind.apply(func, sentinel); + st.deepEqual(aBoundR([1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with receiver and too many args'); + st.deepEqual(aBoundR([1, 2], 4), [sentinel, 1, 2], 'apply-bound func with receiver and right args'); + st.deepEqual(aBoundR([1], 4), [sentinel, 1, undefined], 'apply-bound func with receiver and too few args'); + + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/cassandra-driver/.eslintignore b/node_modules/cassandra-driver/.eslintignore new file mode 100644 index 0000000..d1d2071 --- /dev/null +++ b/node_modules/cassandra-driver/.eslintignore @@ -0,0 +1,8 @@ +# TypeScript: js files are generated using tsc transpiler +/test/unit/typescript/*.js + +# Integer is part of the Closure Library +/lib/types/integer.js + +# TinkerPop-based files +/lib/datastax/graph/type-serializers.js \ No newline at end of file diff --git a/node_modules/cassandra-driver/.travis.yml b/node_modules/cassandra-driver/.travis.yml new file mode 100644 index 0000000..c3de682 --- /dev/null +++ b/node_modules/cassandra-driver/.travis.yml @@ -0,0 +1,44 @@ +language: node_js +sudo: false +cache: + directories: + - node_modules + +jobs: + include: + - stage: "tests" + name: "Run eslint" + script: "npm install -g eslint@4; npm run eslint;" + node_js: "10" + - name: "TypeScript 2.9 generation and compilation tests" + node_js: "10" + script: + - npm install -g typescript@2.9; + - pushd test/unit/typescript/ + - tsc -p . + - node -e "require('./api-generation-test').generate()" > generated.ts + - tsc generated.ts + - name: "Unit tests - Node.js 12" + node_js: "12" + script: "npm test" + - name: "Unit tests w/ latest dependencies - Node.js 12" + node_js: "12" + script: + - rm package-lock.json + - rm -rf node_modules + - npm install + - npm test + - name: "Unit tests - Node.js 10" + node_js: "10" + script: "npm test" + - name: "Unit tests - Node.js 8" + node_js: "8" + script: "npm test" + - name: "TypeScript (latest) generation and compilation tests" + node_js: "10" + script: + - npm install -g typescript; + - pushd test/unit/typescript/ + - tsc -p . + - node -e "require('./api-generation-test').generate()" > generated.ts + - tsc generated.ts diff --git a/node_modules/cassandra-driver/CHANGELOG.md b/node_modules/cassandra-driver/CHANGELOG.md new file mode 100644 index 0000000..263b5fe --- /dev/null +++ b/node_modules/cassandra-driver/CHANGELOG.md @@ -0,0 +1,611 @@ +# ChangeLog - DataStax Node.js Driver + +## 4.6.4 + +2022-07-11 + +### Bug fixes + +- [NODEJS-633] - Mapper error: TypeError: Cannot read property 'executor' of null +- [NODEJS-645] - Travis builds are failing due to TypeScript errors +- [NODEJS-646] - Unable to add address for initially unresolved host +- [NODEJS-647] - Log negotiated SSL/TLS versions when using encrypted connections + +## 4.6.3 + +2021-05-18 + +### Bug fixes + +- [NODEJS-632] - Re-resolve contact points on reconnect when all nodes are unavailable +- [NODEJS-634] - Downgrade protocol versions when encountering versions beyond what we support + +## 4.6.2 + +2021-03-30 + +### Bug fixes + +- [NODEJS-630] - upgraded vulnerable version of adm-zip dependency + +## 4.6.1 + +2020-11-25 + +### Bug fixes + +- [NODEJS-622] - Mapper: Batching with custom mapping does not work + +## 4.6.0 + +2020-08-11 + +### Features + +- [NODEJS-587] - Mapper: Add ability to provide a encoding/decoding functions to ModelOptions.columns +- [NODEJS-607] - Mapper: Add clear error messages when the model can not be found +- [NODEJS-611] - Use "allow list" terminology instead of whitelist + +### Bug fixes + +- [NODEJS-609] - NodeJS driver doesn't support transitional authentication +- [NODEJS-610] - TypeScript error on Type 'ResultSet' must have a '[Symbol.asyncIterator]()' method that returns an + async iterator. + +## 4.5.2 + +2020-05-07 + +### Bug fixes + +- [NODEJS-606] - Graph: Direction serializer does not serialize direction elements + +## 4.5.1 + +2020-04-14 + +### Bug fixes + +- [NODEJS-605] - Graph: TSerializer does not deserialize T elements correctly + +## 4.5.0 + +2020-03-26 + +### Features + +- [NODEJS-489] - Initial DS Graph 6.8 Support +- [NODEJS-482] - Remove unnecessary allocations during GraphExecutionOptions creation +- [NODEJS-564] - Include available dcs in all localDataCenter errors +- [NODEJS-571] - Astra: Validate server certificate CN against the proxy host name +- [NODEJS-573] - Expose a method to build the default load balancing policy with the data center name + +### Bug fixes + +- [NODEJS-594] - Cloud support uses fs.promises which is marked as experimental in Node.js + +## 4.4.0 + +2020-01-15 + +### Features + +- [NODEJS-549] - Unified driver: merge core and DSE drivers into a single package +- [NODEJS-574] - Refactor Driver Internals: modernize codebase and use async functions internally +- [NODEJS-563] - Support paging with async iterator +- [NODEJS-585] - Implement TCP flow control +- [NODEJS-516] - Drop support for Node.js 4 and 6 runtime + +### Bug fixes + +- [NODEJS-583] - Default load balancing policy local hosts cache might exclude hosts that were down +- [NODEJS-584] - Exclude down hosts when preparing on all hosts +- [NODEJS-586] - Encode Date as BigInt when useBigIntAsLong is true + +## 4.3.1 + +2019-11-06 + +### Bug fixes + +- [NODEJS-541] - Deprecated IdempotencyAwareRetryPolicy checks for options resulting in error +- [NODEJS-572] - TypeScript: ExecutionProfile parameters should be marked as optional +- [NODEJS-576] - MutableLong multiplication can cause infinite recursion + +## 4.3.0 + +2019-10-21 + +### Features + +- [NODEJS-503] - DataStax Astra Support +- [NODEJS-562] - TypeScript: Add generics to Mapper + +### Bug fixes + +- [NODEJS-556] - TypeScript: Client.stream typedef has incorrect return type +- [NODEJS-557] - TypeScript: DataCollection speculateRetry should be speculativeRetry +- [NODEJS-559] - TypeScript: DataCollection object|map types +- [NODEJS-561] - TypeScript: Missing credentials in ClientOptions + +## 4.2.0 + +2019-09-23 + +### Features + +- [NODEJS-464] - Include TypeScript declaration files on the package +- [NODEJS-512] - Mapper: support selecting the table or view using clustering keys from "order by" +- [NODEJS-514] - Support string input for all numeric types +- [NODEJS-515] - Mapper: support static column updates with only the partition key +- [NODEJS-545] - Introduce 'credentials' client option + +### Bug fixes + +- [NODEJS-509] - Mapper: batch items promise usage can lead to unhandled rejections +- [NODEJS-524] - "RangeError: Index out of range" when fetching tuples +- [NODEJS-527] - Mapper: select query cache key uses order by portion incorrectly +- [NODEJS-529] - Tuple constructor incorrectly handles single element values +- [NODEJS-538] - Mapper query generation error: USING clause is misplaced +- [NODEJS-546] - Mapper result throws when inspected and result is not ROWS + +## 4.1.0 + +2019-04-25 + +### Features + +- [NODEJS-180] - Expose API for parallel execution +- [NODEJS-100] - Provide a way to stream data in +- [NODEJS-491] - Send driver name and driver version in the STARTUP message +- [NODEJS-497] - Add jitter to ExponentialReconnectionPolicy +- [NODEJS-500] - ControlConnection init: Defer host map creation until system tables have been queried +- [NODEJS-501] - Include host_id in host metadata + +### Bug fixes + +- [NODEJS-475] - Buffer out of bounds error when reading empty byte buffers +- [NODEJS-477] - Logged message when preparing times out does not contain host address +- [NODEJS-492] - RequestLogger: Object parameter is not stringified before using substr +- [NODEJS-506] - ControlConnection queries during shutdown can leave active handles + +## 4.0.0 + +2018-12-17 + +### Features + +- [NODEJS-144] - Object Mapper +- [NODEJS-104] - Randomize contact points to prevent hotspots +- [NODEJS-192] - Allow raw bytes to be input for all types +- [NODEJS-220] - Slow Query Logger +- [NODEJS-300] - Expose metrics API +- [NODEJS-331] - Bypass retry logic when query is marked as non-idempotent +- [NODEJS-345] - Expose Metadata.checkSchemaAgreement() and ExecutionInfo.isSchemaInAgreement() +- [NODEJS-366] - Latency tracker interface +- [NODEJS-379] - DCAwareRoundRobinPolicy: Throw when local datacenter is not specified +- [NODEJS-387] - Expose optional callbacks in TimeUuid.now() and TimeUuid.fromDate() as an async overload +- [NODEJS-449] - Support new JavaScript primitive type bigint +- [NODEJS-450] - Introduce a wrapper around QueryOptions +- [NODEJS-485] - Remove usedHostsPerRemoteDc from DCAwareRoundRobinPolicy +- [NODEJS-462] - Remove the experimental flag to the Execution Profiles + +### Bug fixes + +- [NODEJS-473] - Zero-length map values: Fix RangeError on empty buffer in Encoder + +## 3.6.0 + +2018-12-04 + +### Features + +- [NODEJS-442] - Parse Virtual Keyspace Metadata +- [NODEJS-443] - Provide a means of sending query to a specific node to facilitate virtual table queries +- [NODEJS-459] - Support for Virtual Tables/System views +- [NODEJS-487] - Deprecate DCAwareRoundRobinPolicy parameter for inter-DC failover + +### Bug fixes + +- [NODEJS-465] - Table metadata fetch fails when using ES2015 Set and C* 3.0+ +- [NODEJS-472] - Connections attempts are made when a new node is bootstrapped even if its marked as "ignored" +- [NODEJS-474] - Retry on current host should be made on a different connection + +## 3.5.0 + +2018-04-17 + +### Features + +- [NODEJS-407] - Add NO\_COMPACT option +- [NODEJS-426] - Log driver version on Client.connect +- [NODEJS-431] - Consider using OPTIONS for heartbeats instead of 'select key from system.local' + +### Bug fixes + +- [NODEJS-412] - Methods to retrieve schema metadata should reject promise when not connected +- [NODEJS-418] - Add jsdoc for each response error code +- [NODEJS-428] - Connection associated with reconnect is not fully closed when STARTUP times out on node that is unresponsive +- #271 - Use all columns from peers in ControlConnection +- #276 - Remove buffers `noAssert` argument + + +## 3.4.1 + +2018-02-20 + +### Bug Fixes + +- [NODEJS-429] - Additional validation needed in Encoder.setRoutingKeyFromUser to ensure provided routing key is valid +- [NODEJS-430] - Unexpected error when query execution doesn't include parameters which are part of the partition key + +## 3.4.0 + +2018-02-05 + +### Features + +- [NODEJS-95] - Expose node token and range information +- [NODEJS-335] - Avoid using Object.defineProperty() for type representations +- [NODEJS-344] - Verbose logging on Connection overhead +- [NODEJS-363] - Drop support for Node.js v0.10 and v0.12 +- [NODEJS-378] - DCAwareRoundRobinPolicy: Warn when the local datacenter is not specified +- [NODEJS-388] - Drop support for domains +- [NODEJS-400] - Modify the message for batch log write failures +- [NODEJS-402] - Batch: Use routing key from first statement +- [NODEJS-404] - Use pooling.warmup to true as default +- [NODEJS-405] - Use system.peers in protocol negotiation +- [NODEJS-406] - Use ES2015 - Modernize codebase +- [NODEJS-409] - alreadyExists error does not include keyspace and table attributes +- [NODEJS-417] - Handle network stream backpressure when writing and limit write queue + +### Bug Fixes + +- [NODEJS-390] - Buffer.from() fails in older versions of Node.js v4 (lower than v4.5) +- [NODEJS-403] - NoHostAvailableError does not call super DriverError constructor +- [NODEJS-415] - Incorrect Murmur3 hashing of tokens which byte length satisfy length & 15 >= 12 +- [NODEJS-419] - Token to Replica map can omit replicas for vnodes + + +## 3.3.0 + +2017-09-19 + +### Features + +- [NODEJS-82] - Speculative query retries +- [NODEJS-287] - Provide metrics on the state of connections to Cassandra +- [NODEJS-308] - Add CDC to TableOptionsMetadata and TableOptions for Cassandra 3.8+ +- [NODEJS-309] - Allow prepared statements to be prepared on all nodes +- [NODEJS-339] - Avoid using deprecated Buffer constructors +- [NODEJS-343] - Improve performance of Murmur 3 partitioner +- [NODEJS-359] - Add 'applied' to ResultSet, similar to java-drivers ResultSet.wasApplied() +- [NODEJS-375] - Expose optional callbacks Uuid.random() as async overload +- [NODEJS-376] - Stringify tokens once to simplify computations when building token map + +### Bug Fixes + +- [NODEJS-365] - Routing key component length is encoded as int16 instead of uint16 +- [NODEJS-370] - Consistency of trace queries is not configurable +- [NODEJS-373] - Empty string in a map field returned as null on query + +## 3.2.2 + +2017-06-05 + +### Bug Fixes + +- [NODEJS-346] - Shutdown doesn't work if error occurs after control connection initialization +- [NODEJS-347] - Metadata: Schema parser fails to handle index_options null values +- [NODEJS-355] - Domain without dots will not connect +- [NODEJS-358] - TokenAwarePolicy does not take statement keyspace into account +- [NODEJS-360] - ControlConnection: when any of the queries to refresh topology fail it will not attempt to reconnect +- [NODEJS-362] - Driver fails to encode Duration's with large values + +## 3.2.1 + +2017-04-24 + +### Features + +- [NODEJS-332] - Support Duration Type +- [NODEJS-338] - Make protocol negotiation more resilient + +## 3.2.0 + +2017-01-17 + +### Notable Changes + +- Promise support ([#194](https://github.com/datastax/nodejs-driver/pull/194)). +- Timestamp generation: client-side timestamps are generated and sent in the request by default when the +server supports it ([#195](https://github.com/datastax/nodejs-driver/pull/195)). +- Added `isIdempotent` query option which is set to `false` by default: future versions of the driver will use this + value to consider whether an execution should be retried or directly rethrown to the consumer without using the retry + policy ([#197](https://github.com/datastax/nodejs-driver/pull/197)). + +### Features + +- [NODEJS-322] - Timestamp Generator Support for providing Client Timestamps Improvement +- [NODEJS-189] - Support promises +- [NODEJS-230] - Expose ResultSet `@@iterator` +- [NODEJS-325] - Add explicit idempotency setting in the query options + +## 3.1.6 + +2016-11-14 + +### Bug Fixes + +- [NODEJS-294] - TokenAwarePolicy: Avoid yielding the primary replica first + +## 3.1.5 + +2016-10-07 + +### Bug Fixes + +- [NODEJS-313] - Client-to-node encryption: mark request as written before invoking socket.write() + +## 3.1.4 + +2016-09-21 + +### Bug Fixes + +- [NODEJS-310] - Reading streaming frames with flags can result in uncaught error + +## 3.1.3 + +2016-08-31 + +### Bug Fixes + +- [NODEJS-303] - Protocol version downgrade fails on OSX and Windows. + +## 3.1.2 + +2016-08-30 + +### Bug Fixes + +- [NODEJS-283] - Possible connection leak if pool is shutting down while core connections are being created. +- [NODEJS-288] - Callback never executed in error on subsequent Client.execute with Client configured with keyspace +that doesn't exist. +- [NODEJS-293] - When client.connect() return error - client.shutdown() not work properly. +- [NODEJS-296] - Cannot read property 'consistency' of null, TypeError: Cannot read property 'consistency' of null +- [NODEJS-297] - DCAwareRoundRobinPolicy should make a local reference to host arrays +- [NODEJS-301] - 'Trying to access beyond buffer length' error if Warnings, Custom Payload, or Trace Id present in +non-RESULT response Improvement +- [NODEJS-265] - Remove connection from host pool when closed by server side + +## 3.1.1 + +2016-06-30 + +### Bug Fixes + +- [NODEJS-284] - Driver fails to resolve host names in the local hosts file + +## 3.1.0 + +2016-06-28 + +### Notable Changes + +- Introduced experimental Execution Profiles API ([#156](https://github.com/datastax/nodejs-driver/pull/156)) +- Removed dependency to [async](https://github.com/caolan/async) package ( +[#138](https://github.com/datastax/nodejs-driver/pull/138)). +- Enhanced retry policies: handle client timeouts, connection closed and other errors. New retry decision: try next +host ([#143](https://github.com/datastax/nodejs-driver/pull/143)). + +### Features + +- [NODEJS-261] - Execution profiles +- [NODEJS-105] - New Retry Policy Decision - try next host +- [NODEJS-106] - Don't mark host down while one connection is active +- [NODEJS-107] - Prevent duplicate metadata fetches from control connection and allow disabling schema metadata fetching +- [NODEJS-247] - Schedule idleTimeout before descheduling the previous +- [NODEJS-177] - Use A-record with multiple IPs for contact points +- [NODEJS-201] - Avoid dynamically copying query options properties into users query options +- [NODEJS-236] - Handle empty map values gracefully +- [NODEJS-240] - Replace async dependency +- [NODEJS-242] - Expose default policies and default options +- [NODEJS-248] - Optimize query plan hosts iteration +- [NODEJS-249] - Avoid using Object.defineProperty() in ResultSet constructor +- [NODEJS-251] - Expose onRequestError() method in the RetryPolicy prototype + +### Bug Fixes + +- [NODEJS-246] - InetAddress validation improperly flags IPv4-mapped IPv6 +- [NODEJS-250] - Timeout duration reported in OperationTimedOutError does not consider statement-level options. +- [NODEJS-252] - Prepared statement metadata does not use logged keyspace +- [NODEJS-255] - InetAddress.toString() improperly truncates last group if preceding bytes are 0 for ipv6 addresses +- [NODEJS-257] - Connection wrongly parses IPv6 from Host address +- [NODEJS-273] - readTimeout set to 0 in queryOptions is not used. + +## 3.0.2 + +2016-04-05 + +### Features + +- [NODEJS-228] - Allow setting read timeout at statement level + +### Bug Fixes + +- [NODEJS-159] - Metadata.getTokenToReplicaNetworkMapper does not account for multiple racks in a DC +- [NODEJS-235] - Decoding error can result in callback not executed +- [NODEJS-237] - Timeuuid generation sub-millisecond portion is not guaranteed to be increasing +- [NODEJS-238] - eachRow() retry attempts after read timeout don't execute rowCallback + +## 3.0.1 + +2016-02-08 + +### Features + +- [NODEJS-211] - Pass the authenticator name from the server to the auth provider + +### Bug Fixes + +- [NODEJS-216] - Inet with alpha character is converting the character to 0 + +## 3.0.0 + +2015-12-14 + +### Notable Changes + +- Default consistency changed back to `LOCAL_ONE`. + +### Features + +- [NODEJS-155] - Schedule reconnections using Timers +- [NODEJS-195] - Expose encode()/decode() functions +- [NODEJS-204] - Change default consistency level to LOCAL_ONE +- [NODEJS-198] - Avoid using Function.prototype.bind() for common execution path +- [NODEJS-200] - Use Error.captureStackTrace() only when setting enabled + +### Bug Fixes + +- [NODEJS-193] - BigDecimal.fromString() should throw a TypeError if there is a conversion error +- [NODEJS-197] - Can't parse column type if it contains UDT that is a quoted identifier +- [NODEJS-202] - Support for "custom" types after CASSANDRA-10365 +- [NODEJS-203] - RoundRobinPolicies: Missing return statement when calling callback + +## 3.0.0-rc1 + +2015-11-11 + +### Notable Changes + +- Added support for Cassandra 3.0.0 +- _Breaking_ Changed default consistency to `LOCAL QUORUM` [#103](https://github.com/datastax/nodejs-driver/pull/103) +- _Breaking_ `Aggregate#initCondition` now returns the string representation of the value +[#102](https://github.com/datastax/nodejs-driver/pull/102) +- Manual paging via `ResultSet#nextPage()` and `Client#stream()` throttling +[#111](https://github.com/datastax/nodejs-driver/pull/111) + +### Features + +- [NODEJS-186] - Update schema type representation to CQL +- [NODEJS-68] - Manual paging support via nextPage() and client.stream() throttling +- [NODEJS-130] - Add buffer for non-streaming rows messages +- [NODEJS-142] - Frame coalescing on connection +- [NODEJS-169] - Update async dependency +- [NODEJS-178] - Change default consistency level to LOCAL_QUORUM +- [NODEJS-181] - Update default behavior unbound values in prepared statements + +### Bug Fixes + +- [NODEJS-164] - Defunct connection is not properly removed from pool +- [NODEJS-190] - useUndefinedAsUnset should not apply to udt, tuple, set, list and map members. + +## 3.0.0-beta1 + +2015-10-19 + +### Notable Changes + +- Added support for Cassandra 3.0-rc1 +- New index metadata API [#98](https://github.com/datastax/nodejs-driver/pull/98) + +### Features + +- [NODEJS-163] - Process Modernized Schema Tables for C* 3.0 +- [NODEJS-166] - Process Materialized View Metadata +- [NODEJS-170] - Process materialized view events +- [NODEJS-171] - Process changes in 'columns' table in C* 3.0-rc1+ +- [NODEJS-172] - Process crc_check_chance column from 'tables' and 'views' metadata tables +- [NODEJS-182] - Add missing options to table / view metadata +- [NODEJS-183] - Add support for parsing Index metadata + +### Bug Fixes + +- [NODEJS-185] - Metadata fetch of table with ColumnToCollectionType fails + +## 2.2.2 + +2015-10-14 + +### Features + +- [NODEJS-187] - Expose Metadata prototype to be available for _promisification_ + +### Bug Fixes + +- [NODEJS-160] - Error setting routing keys before query execution +- [NODEJS-175] - Select from table after a new field is added to a UDT can result in callback never fired +- [NODEJS-185] - Metadata fetch of table with ColumnToCollectionType fails + +## 2.2.1 + +2015-09-14 + +### Features + +- [NODEJS-162] - Add coordinator of query to error object + +### Bug Fixes + +- [NODEJS-154] - Local datacenter could not be determined +- [NODEJS-165] - Driver 2.2 fails to connect under windows server for cassandra 2.1 + +## 2.2.0 + +2015-08-10 + +### Notable Changes + +- **Client**: All requests use `readTimeout` that can be configured in the `socketOptions`, enabled by default to +12secs +- **Client**: Now exposes topology and node status change events: `hostAdd`, `hostRemove`, `hostUp` and `hostDown` + +### Features + +- [NODEJS-140] - WhiteListPolicy +- [NODEJS-114] - Client-Configurable High Level Request Timeout +- [NODEJS-138] - Provide option to open all connections at startup +- [NODEJS-149] - Expose node status and topology changes +- [NODEJS-152] - Enable client read timeout by default + +### Bug Fixes + +- [NODEJS-111] - Connect should callback in error after shutdown +- [NODEJS-151] - 'All host(s) tried for query failed' error immediately after Cassandra node failure +- [NODEJS-156] - RequestHandler retry should not use a new query plan +- [NODEJS-157] - Control connection can fail and not be re-established if it errors on initOnConnection + +## 2.2.0-rc1 + +2015-06-18 + +### Notable Changes + +- Added support for Cassandra 2.2 and native protocol v4 + +### Features + +- [NODEJS-117] - Small int and byte types for C* 2.2 +- [NODEJS-118] - Support new date and time types +- [NODEJS-121] - Distinguish between `NULL` and `UNSET` values in Prepared Statements +- [NODEJS-122] - Add support for client warnings +- [NODEJS-123] - Support Key-value payloads in native protocol v4 +- [NODEJS-124] - Use PK columns from v4 prepared responses +- [NODEJS-125] - Support UDF and Aggregate Function Schema Meta +- [NODEJS-126] - Add client address to query trace +- [NODEJS-129] - Support server error in Startup response for C* 2.1 +- [NODEJS-131] - Handle new C* 2.2 errors + +### Bug Fixes + +- [NODEJS-119] - Rare 'write after end' error encountered while reconnecting with lower protocol version on nodejs 0.10.x +- [NODEJS-120] - Connection 'object is not a function' at Connection.handleResult +- [NODEJS-127] - Integer.toBuffer() gives wrong representation for positive numbers with the msb on +- [NODEJS-128] - getPeersSchemaVersions uses system.local instead of system.peers +- [NODEJS-136] - LocalDate fails to parse dates less than -271821-04-20 and greater than 275760-09-13 +- [NODEJS-137] - DriverInternalError - No active connection found +- [NODEJS-139] - Use retry policy defined in the query options +- [NODEJS-141] - Node schema change - keyspace metadata does not exist +- [NODEJS-146] - Unhandled 'error' event caused by RST on Socket on Connection Initialization causes app to terminate diff --git a/node_modules/cassandra-driver/Jenkinsfile b/node_modules/cassandra-driver/Jenkinsfile new file mode 100644 index 0000000..34a5d9a --- /dev/null +++ b/node_modules/cassandra-driver/Jenkinsfile @@ -0,0 +1,648 @@ +#!groovy + + +def initializeEnvironment() { + def nodeVersions = ['8': '8.16.2', '10': '10.17.0', '12': '12.13.0'] + env.DRIVER_DISPLAY_NAME = 'Cassandra Node.js Driver' + env.DRIVER_METRIC_TYPE = 'oss' + if (env.GIT_URL.contains('riptano/nodejs-driver')) { + env.DRIVER_DISPLAY_NAME = 'private ' + env.DRIVER_DISPLAY_NAME + env.DRIVER_METRIC_TYPE = 'oss-private' + } else if (env.GIT_URL.contains('nodejs-dse-driver')) { + env.DRIVER_DISPLAY_NAME = 'DSE Node.js Driver' + env.DRIVER_METRIC_TYPE = 'dse' + } + + env.GIT_SHA = "${env.GIT_COMMIT.take(7)}" + env.GITHUB_PROJECT_URL = "https://${GIT_URL.replaceFirst(/(git@|http:\/\/|https:\/\/)/, '').replace(':', '/').replace('.git', '')}" + env.GITHUB_BRANCH_URL = "${GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" + env.GITHUB_COMMIT_URL = "${GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" + env.NODEJS_VERSION_FULL = nodeVersions[env.NODEJS_VERSION] + + sh label: 'Assign Node.js global environment', script: '''#!/bin/bash -lex + nodenv versions + echo "Using Node.js runtime ${NODEJS_VERSION} (${NODEJS_VERSION_FULL})" + nodenv global ${NODEJS_VERSION_FULL} + ''' + + sh label: 'Download Apache Cassandra or DataStax Enterprise', script: '''#!/bin/bash -lex + . ${CCM_ENVIRONMENT_SHELL} ${CASSANDRA_VERSION} + ''' + + sh label: 'Display Node.js and environment information', script: '''#!/bin/bash -le + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + node --version + npm --version + printenv | sort + ''' +} + +def installDriverAndDependencies() { + sh label: 'Install the driver', script: '''#!/bin/bash -lex + npm install + ''' + + sh label: 'Install driver dependencies', script: '''#!/bin/bash -lex + npm install mocha-jenkins-reporter@0 + npm install kerberos@1 + npm install -g eslint@4 + ''' +} + +def executeLinter() { + sh label: 'Perform static analysis of source code', script: '''#!/bin/bash -lex + npm run eslint + ''' +} + +def executeTests() { + sh label: 'Execute tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + npm run ci_jenkins + ''' +} + +def executeExamples() { + sh label: 'Create CCM cluster for examples', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + ccm create test_samples --dse -v ${CCM_VERSION} -n 1:0 -b -s + ''' + + sh label: 'Execute examples', script: '''#!/bin/bash -lex + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + ( + cd examples + npm install + node runner.js + ) + ''' + + sh label: 'Clean-up CCM cluster for examples', script: '''#!/bin/bash -lex + ccm remove + ''' +} + +def notifySlack(status = 'started') { + // Set the global pipeline scoped environment (this is above each matrix) + env.BUILD_STATED_SLACK_NOTIFIED = 'true' + + def buildType = 'Commit' + if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { + buildType = "${params.CI_SCHEDULE.toLowerCase().capitalize()}" + } + + def color = 'good' // Green + if (status.equalsIgnoreCase('aborted')) { + color = '808080' // Grey + } else if (status.equalsIgnoreCase('unstable')) { + color = 'warning' // Orange + } else if (status.equalsIgnoreCase('failed')) { + color = 'danger' // Red + } + + def message = """Build ${status} for ${env.DRIVER_DISPLAY_NAME} [${buildType}] +<${env.GITHUB_BRANCH_URL}|${env.BRANCH_NAME}> - <${env.RUN_DISPLAY_URL}|#${env.BUILD_NUMBER}> - <${env.GITHUB_COMMIT_URL}|${env.GIT_SHA}>""" + if (!status.equalsIgnoreCase('Started')) { + message += """ +${status} after ${currentBuild.durationString - ' and counting'}""" + } + +// slackSend color: "${color}", +// channel: "#nodejs-driver-dev-bots", +// message: "${message}" +} + +def submitCIMetrics(buildType) { + long durationMs = currentBuild.duration + long durationSec = durationMs / 1000 + long nowSec = (currentBuild.startTimeInMillis + durationMs) / 1000 + def branchNameNoPeriods = env.BRANCH_NAME.replaceAll('\\.', '_') + def durationMetric = "okr.ci.nodejs.${env.DRIVER_METRIC_TYPE}.${buildType}.${branchNameNoPeriods} ${durationSec} ${nowSec}" + + timeout(time: 1, unit: 'MINUTES') { + withCredentials([string(credentialsId: 'lab-grafana-address', variable: 'LAB_GRAFANA_ADDRESS'), + string(credentialsId: 'lab-grafana-port', variable: 'LAB_GRAFANA_PORT')]) { + withEnv(["DURATION_METRIC=${durationMetric}"]) { + sh label: 'Send runtime metrics to labgrafana', script: '''#!/bin/bash -lex + echo "${DURATION_METRIC}" | nc -q 5 ${LAB_GRAFANA_ADDRESS} ${LAB_GRAFANA_PORT} + ''' + } + } + } +} + +def describePerCommitStage() { + script { + currentBuild.displayName = "Per-Commit" + currentBuild.description = '''Per-Commit build and testing''' + } +} + +def describeScheduledTestingStage() { + script { + def type = params.CI_SCHEDULE.toLowerCase().capitalize() + currentBuild.displayName = "${type} schedule" + currentBuild.description = "${type} scheduled build and testing of all supported Apache Cassandra and DataStax " + + "Enterprise against multiple Node.js runtimes" + } +} + +def describeAdhocTestingStage() { + script { + def serverType = params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[0] + def serverDisplayName = 'Apache Cassandra' + def serverVersion = " v${serverType}" + if (serverType == 'ALL') { + serverDisplayName = "all ${serverDisplayName} and DataStax Enterprise server versions" + serverVersion = '' + } else { + try { + serverVersion = " v${env.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[1]}" + } catch (e) { + ;; // no-op + } + if (serverType == 'ddac') { + serverDisplayName = "DataStax Distribution of ${serverDisplayName}" + } else if (serverType == 'dse') { + serverDisplayName = 'DataStax Enterprise' + } + } + + def nodeJsVersionInformation = "Node.js v${params.ADHOC_BUILD_AND_EXECUTE_TESTS_NODEJS_VERSION}" + if (params.ADHOC_BUILD_AND_EXECUTE_TESTS_NODEJS_VERSION == 'ALL') { + nodeJsVersionInformation = 'all Node.js versions' + } + + def examplesInformation = '' + if (ADHOC_BUILD_AND_EXECUTE_TESTS_EXECUTE_EXAMPLES) { + examplesInformation = ' with examples' + } + + currentBuild.displayName = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION} against ${nodeJsVersionInformation}" + currentBuild.description = "Testing ${serverDisplayName} ${serverVersion} against ${nodeJsVersionInformation}${examplesInformation}" + } +} + +// branch pattern for cron +def branchPatternCron = ~"(master)" + +pipeline { + agent none + + // Global pipeline timeout + options { + disableConcurrentBuilds() + timeout(time: 5, unit: 'HOURS') + buildDiscarder(logRotator(artifactNumToKeepStr: '10', // Keep only the last 10 artifacts + numToKeepStr: '50')) // Keep only the last 50 build records + } + + parameters { + choice( + name: 'ADHOC_BUILD_TYPE', + choices: ['BUILD', 'BUILD-AND-EXECUTE-TESTS'], + description: '''

Perform a adhoc build operation

+ + + + + + + + + + + + + + + +
ChoiceDescription
BUILDPerforms a Per-Commit build
BUILD-AND-EXECUTE-TESTSPerforms a build and executes the integration and unit tests
''') + choice( + name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_NODEJS_VERSION', + choices: ['8', '10', '12.13.0', 'ALL'], + description: 'Node.js version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY!') + choice( + name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION', + choices: ['2.1', // Legacy Apache Cassandra + '3.11', // Current Apache Cassandra + '4.0', // Development Apache Cassandra + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7', // Current DataStax Enterprise + 'dse-6.8', // Development DataStax Enterprise + 'ALL'], + description: '''Apache Cassandra and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
2.1Apache Cassandra v2.1.x
3.11Apache Cassandra v3.11.x
4.0Apache Cassandra v4.x (CURRENTLY UNDER DEVELOPMENT)
dse-5.1DataStax Enterprise v5.1.x
dse-6.0DataStax Enterprise v6.0.x
dse-6.7DataStax Enterprise v6.7.x
dse-6.8DataStax Enterprise v6.8.x (CURRENTLY UNDER DEVELOPMENT)
''') + booleanParam( + name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_EXECUTE_EXAMPLES', + defaultValue: false, + description: 'Flag to determine if the examples should be executed for adhoc builds') + booleanParam( + name: 'JUNIT_REPORT_STACK', + defaultValue: true, + description: 'Flag to determine if stack trace should be enabled with test failures for scheduled or adhoc builds') + booleanParam( + name: 'TEST_TRACE', + defaultValue: true, + description: 'Flag to determine if test tracing should be enabled for scheduled or adhoc builds') + choice( + name: 'CI_SCHEDULE', + choices: ['DO-NOT-CHANGE-THIS-SELECTION', 'WEEKNIGHTS'], + description: 'CI testing schedule to execute periodically scheduled builds and tests of the driver (DO NOT CHANGE THIS SELECTION)') + } + + triggers { + parameterizedCron(branchPatternCron.matcher(env.BRANCH_NAME).matches() ? """ + # Every weeknight (Monday - Friday) around 7 PM + H 19 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS + """ : "") + } + + environment { + OS_VERSION = 'ubuntu/bionic64/nodejs-driver' + JUNIT_REPORT_STACK = "${params.JUNIT_REPORT_STACK ? '1' : '0'}" + JUNIT_REPORT_PATH = '.' + TEST_TRACE = "${params.TEST_TRACE ? 'on' : 'off'}" + SIMULACRON_PATH = '/home/jenkins/simulacron.jar' + CCM_PATH = '/home/jenkins/ccm' + CCM_ENVIRONMENT_SHELL = '/usr/local/bin/ccm_environment.sh' + } + + stages { + stage('Per-Commit') { + when { + beforeAgent true + allOf { + expression { params.ADHOC_BUILD_TYPE == 'BUILD' } + expression { params.CI_SCHEDULE == 'DO-NOT-CHANGE-THIS-SELECTION' } + not { buildingTag() } + } + } + + matrix { + axes { + axis { + name 'CASSANDRA_VERSION' + values '2.1', // Legacy Apache Cassandra + '3.11', // Current Apache Cassandra + '4.0', // Development Apache Cassandra + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7', // Current DataStax Enterprise + 'dse-6.8' // Development DataStax Enterprise + } + axis { + name 'NODEJS_VERSION' + values '8', '10', '12' + } + } + + excludes { + exclude { + axis { + name 'NODEJS_VERSION' + values '8' + } + axis { + name 'CASSANDRA_VERSION' + values '3.11', '4.0', 'dse-5.1', 'dse-6.8' + } + } + exclude { + axis { + name 'NODEJS_VERSION' + values '10' + } + axis { + name 'CASSANDRA_VERSION' + values '2.1', '4.0', 'dse-5.1', 'dse-6.0', 'dse-6.7' + } + } + exclude { + axis { + name 'NODEJS_VERSION' + values '12' + } + axis { + name 'CASSANDRA_VERSION' + values '2.1', '3.11', 'dse-6.0', 'dse-6.8' + } + } + } + + agent { + label "${OS_VERSION}" + } + + stages { + stage('Initialize-Environment') { + steps { + initializeEnvironment() + script { + if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { + notifySlack() + } + } + } + } + stage('Describe-Build') { + steps { + describePerCommitStage() + } + } + stage('Install-Driver-And-Dependencies') { + steps { + installDriverAndDependencies() + } + } + stage('Execute-Linter') { + steps { + executeLinter() + } + } + stage('Execute-Tests') { + steps { + catchError { // Handle error conditions in the event examples should be executed + executeTests() + } + } + post { + always { + junit testResults: '*.xml' + } + } + } + stage('Execute-Examples') { + when { + expression { env.CASSANDRA_VERSION == 'dse-6.7' } + } + steps { + executeExamples() + } + } + } + } + post { + always { + node('master') { + submitCIMetrics('commit') + } + } + aborted { + notifySlack('aborted') + } + success { + notifySlack('completed') + } + unstable { + notifySlack('unstable') + } + failure { + notifySlack('FAILED') + } + } + } + + stage('Scheduled-Testing') { + when { + beforeAgent true + branch pattern: '((\\d+(\\.[\\dx]+)+)|dse|master)', comparator: 'REGEXP' + allOf { + expression { params.ADHOC_BUILD_TYPE == 'BUILD' } + expression { params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION' } + not { buildingTag() } + } + } + matrix { + axes { + axis { + name 'CASSANDRA_VERSION' + values '2.1', // Legacy Apache Cassandra + '3.11', // Current Apache Cassandra + '4.0', // Development Apache Cassandra + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7', // Current DataStax Enterprise + 'dse-6.8' // Development DataStax Enterprise + } + axis { + name 'NODEJS_VERSION' + values '8', '10', '12' + } + } + + excludes { + exclude { + axis { + name 'NODEJS_VERSION' + values '10' + } + axis { + name 'CASSANDRA_VERSION' + values '2.1', '3.11', '4.0', 'dse-5.1', 'dse-6.0', 'dse-6.7' + } + } + } + + agent { + label "${OS_VERSION}" + } + + stages { + stage('Initialize-Environment') { + steps { + initializeEnvironment() + script { + if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { + notifySlack() + } + } + } + } + stage('Describe-Build') { + steps { + describeScheduledTestingStage() + } + } + stage('Install-Driver-And-Dependencies') { + steps { + installDriverAndDependencies() + } + } + stage('Execute-Linter') { + steps { + executeLinter() + } + } + stage('Execute-Tests') { + steps { + catchError { // Handle error conditions in the event examples should be executed + executeTests() + } + } + post { + always { + junit testResults: '*.xml' + } + } + } + stage('Execute-Examples') { + when { + expression { env.CASSANDRA_VERSION == 'dse-6.7' } + } + steps { + executeExamples() + } + } + } + } + post { + aborted { + notifySlack('aborted') + } + success { + notifySlack('completed') + } + unstable { + notifySlack('unstable') + } + failure { + notifySlack('FAILED') + } + } + } + + stage('Adhoc-Testing') { + when { + beforeAgent true + allOf { + expression { params.ADHOC_BUILD_TYPE == 'BUILD-AND-EXECUTE-TESTS' } + not { buildingTag() } + } + } + matrix { + axes { + axis { + name 'CASSANDRA_VERSION' + values '2.1', // Legacy Apache Cassandra + '3.11', // Current Apache Cassandra + '4.0', // Development Apache Cassandra + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7', // Current DataStax Enterprise + 'dse-6.8' // Development DataStax Enterprise + } + axis { + name 'NODEJS_VERSION' + values '8', '10', '12' + } + } + when { + beforeAgent true + allOf { + expression { params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION ==~ /(ALL|${env.CASSANDRA_VERSION})/ } + expression { params.ADHOC_BUILD_AND_EXECUTE_TESTS_NODEJS_VERSION ==~ /(ALL|${env.NODEJS_VERSION})/ } + } + } + + agent { + label "${OS_VERSION}" + } + + stages { + stage('Describe-Build') { + steps { + describeAdhocTestingStage() + } + } + stage('Initialize-Environment') { + steps { + initializeEnvironment() + } + } + stage('Install-Driver-And-Dependencies') { + steps { + installDriverAndDependencies() + } + } + stage('Execute-Linter') { + steps { + executeLinter() + } + } + stage('Execute-Tests') { + steps { + catchError { // Handle error conditions in the event examples should be executed + executeTests() + } + } + post { + always { + junit testResults: '*.xml' + } + } + } + stage('Execute-Examples') { + when { + expression { params.ADHOC_BUILD_AND_EXECUTE_TESTS_EXECUTE_EXAMPLES } + } + steps { + executeExamples() + } + } + } + } + } + } +} diff --git a/node_modules/cassandra-driver/LICENSE.txt b/node_modules/cassandra-driver/LICENSE.txt new file mode 100644 index 0000000..4947287 --- /dev/null +++ b/node_modules/cassandra-driver/LICENSE.txt @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/cassandra-driver/NOTICE.txt b/node_modules/cassandra-driver/NOTICE.txt new file mode 100644 index 0000000..711158c --- /dev/null +++ b/node_modules/cassandra-driver/NOTICE.txt @@ -0,0 +1,4 @@ +Integer provided by The Closure Library from +https://github.com/google/closure-library/ +under the Apache License 2.0. +Copyright (c) 2010 Google Inc. \ No newline at end of file diff --git a/node_modules/cassandra-driver/README.md b/node_modules/cassandra-driver/README.md new file mode 100644 index 0000000..5db9a47 --- /dev/null +++ b/node_modules/cassandra-driver/README.md @@ -0,0 +1,294 @@ +# DataStax Node.js Driver for Apache Cassandra® + +A modern, [feature-rich](#features) and highly tunable Node.js client library for Apache Cassandra and [DSE][dse] using +exclusively Cassandra's binary protocol and Cassandra Query Language. + +## Installation + +```bash +$ npm install cassandra-driver +``` + +[![Build Status](https://api.travis-ci.com/datastax/nodejs-driver.svg?branch=master)](https://travis-ci.org/datastax/nodejs-driver) +[![Build status](https://ci.appveyor.com/api/projects/status/m21t2tfdpmkjex1l/branch/master?svg=true)](https://ci.appveyor.com/project/datastax/nodejs-driver/branch/master) + + +## Features + +- Simple, Prepared, and [Batch][batch] statements +- Asynchronous IO, parallel execution, request pipelining +- [Connection pooling][pooling] +- Auto node discovery +- Automatic reconnection +- Configurable [load balancing][load-balancing] and [retry policies][retry] +- Works with any cluster size +- Built-in [object mapper][doc-mapper] +- Both [promise and callback-based API][doc-promise-callback] +- [Row streaming and pipes](#row-streaming-and-pipes) +- Built-in TypeScript support + +## Documentation + +- [Documentation index][doc-index] +- [CQL types to JavaScript types][doc-datatypes] +- [API docs][doc-api] +- [FAQ][faq] + +## Getting Help + +You can use the [project mailing list][mailinglist] or create a ticket on the [Jira issue tracker][jira]. + +## Basic usage + +```javascript +const cassandra = require('cassandra-driver'); + +const client = new cassandra.Client({ + contactPoints: ['h1', 'h2'], + localDataCenter: 'datacenter1', + keyspace: 'ks1' +}); + +const query = 'SELECT name, email FROM users WHERE key = ?'; + +client.execute(query, [ 'someone' ]) + .then(result => console.log('User with email %s', result.rows[0].email)); +``` + +The driver supports both [promises and callbacks][doc-promise-callback] for the asynchronous methods, +you can choose the approach that suits your needs. + +Note that in order to have concise code examples in this documentation, we will use the promise-based API of the +driver along with the `await` keyword. + +If you are using [DataStax Astra][astra] you can configure your client by setting the secure bundle and the + user credentials: + +```javascript +const client = new cassandra.Client({ + cloud: { secureConnectBundle: 'path/to/secure-connect-DATABASE_NAME.zip' }, + credentials: { username: 'user_name', password: 'p@ssword1' } +}); +``` + +### Prepare your queries + +Using prepared statements provides multiple benefits. + +Prepared statements are parsed and prepared on the Cassandra nodes and are ready for future execution. +Also, when preparing, the driver retrieves information about the parameter types which + **allows an accurate mapping between a JavaScript type and a Cassandra type**. + +The driver will prepare the query once on each host and execute the statement with the bound parameters. + +```javascript +// Use query markers (?) and parameters +const query = 'UPDATE users SET birth = ? WHERE key=?'; +const params = [ new Date(1942, 10, 1), 'jimi-hendrix' ]; + +// Set the prepare flag in the query options +await client.execute(query, params, { prepare: true }); +console.log('Row updated on the cluster'); +``` + +### Row streaming and pipes + +When using `#eachRow()` and `#stream()` methods, the driver parses each row as soon as it is received, + yielding rows without buffering them. + +```javascript +// Reducing a large result +client.eachRow( + 'SELECT time, val FROM temperature WHERE station_id=', + ['abc'], + (n, row) => { + // The callback will be invoked per each row as soon as they are received + minTemperature = Math.min(row.val, minTemperature); + }, + err => { + // This function will be invoked when all rows where consumed or an error was encountered + } +); +``` + +The `#stream()` method works in the same way but instead of callback it returns a [Readable Streams2][streams2] object + in `objectMode` that emits instances of `Row`. + +It can be **piped** downstream and provides automatic pause/resume logic (it buffers when not read). + +```javascript +client.stream('SELECT time, val FROM temperature WHERE station_id=', [ 'abc' ]) + .on('readable', function () { + // 'readable' is emitted as soon a row is received and parsed + let row; + while (row = this.read()) { + console.log('time %s and value %s', row.time, row.val); + } + }) + .on('end', function () { + // Stream ended, there aren't any more rows + }) + .on('error', function (err) { + // Something went wrong: err is a response error from Cassandra + }); +``` + +### User defined types + +[User defined types (UDT)][cql-udt] are represented as JavaScript objects. + +For example: +Consider the following UDT and table + +```cql +CREATE TYPE address ( + street text, + city text, + state text, + zip int, + phones set +); +CREATE TABLE users ( + name text PRIMARY KEY, + email text, + address frozen
+); +``` + +You can retrieve the user address details as a regular JavaScript object. + +```javascript +const query = 'SELECT name, address FROM users WHERE key = ?'; +const result = await client.execute(query, [ key ], { prepare: true }); +const row = result.first(); +const address = row.address; +console.log('User lives in %s, %s - %s', address.street, address.city, address.state); +``` + +Read more information about using [UDTs with the Node.js Driver][doc-udt]. + +### Paging + +All driver methods use a default `fetchSize` of 5000 rows, retrieving only first page of results up to a +maximum of 5000 rows to shield an application against accidentally retrieving large result sets in a single response. + +`stream()` method automatically fetches the following page once the current one was read. You can also use `eachRow()` +method to retrieve the following pages by using `autoPage` flag. See [paging documentation for more +information][doc-paging]. + +### Batch multiple statements + +You can execute multiple statements in a batch to update/insert several rows atomically even in different column families. + +```javascript +const queries = [ + { + query: 'UPDATE user_profiles SET email=? WHERE key=?', + params: [ emailAddress, 'hendrix' ] + }, { + query: 'INSERT INTO user_track (key, text, date) VALUES (?, ?, ?)', + params: [ 'hendrix', 'Changed email', new Date() ] + } +]; + +await client.batch(queries, { prepare: true }); +console.log('Data updated on cluster'); +``` + +## Object Mapper + +The driver provides a built-in [object mapper][doc-mapper] that lets you interact with your data like you +would interact with a set of documents. + +Retrieving objects from the database: + +```javascript +const videos = await videoMapper.find({ userId }); +for (let video of videos) { + console.log(video.name); +} +``` + +Updating an object from the database: + +```javascript +await videoMapper.update({ id, userId, name, addedDate, description }); +``` + +You can read more information about [getting started with the Mapper in our +documentation][doc-mapper-start]. + +---- + +## Data types + +There are few data types defined in the ECMAScript specification, this usually represents a problem when you are trying + to deal with data types that come from other systems in JavaScript. + +The driver supports all the CQL data types in Apache Cassandra (3.0 and below) even for types with no built-in +JavaScript representation, like decimal, varint and bigint. Check the documentation on working with + [numerical values][doc-numerical], [uuids][doc-uuid] and [collections][doc-collections]. + +## Logging + +Instances of `Client()` are `EventEmitter` and emit `log` events: + +```javascript +client.on('log', (level, loggerName, message, furtherInfo) => { + console.log(`${level} - ${loggerName}: ${message}`); +}); +``` + +The `level` being passed to the listener can be `verbose`, `info`, `warning` or `error`. Visit the [logging +documentation][doc-logging] for more information. + +## Compatibility + +- Apache Cassandra versions 2.1 and above. +- DataStax Enterprise versions 4.8 and above. +- Node.js versions 8 and above. + +Note: DataStax products do not support big-endian systems. + +## Credits + +This driver is based on the original work of [Jorge Bay][jorgebay] on [node-cassandra-cql][old-driver] and adds a series of advanced features that are common across all other [DataStax drivers][drivers] for Apache Cassandra. + +The development effort to provide an up to date, high performance, fully featured Node.js Driver for Apache Cassandra will continue on this project, while [node-cassandra-cql][old-driver] will be discontinued. + +## License + +© DataStax, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +[cassandra]: https://cassandra.apache.org/ +[doc-api]: https://docs.datastax.com/en/developer/nodejs-driver/latest/api/ +[doc-index]: https://docs.datastax.com/en/developer/nodejs-driver/latest/ +[doc-datatypes]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/datatypes/ +[doc-numerical]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/datatypes/numerical/ +[doc-uuid]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/datatypes/uuids/ +[doc-collections]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/datatypes/collections/ +[doc-udt]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/datatypes/udts/ +[doc-promise-callback]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/promise-callback/ +[doc-mapper]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/mapper/ +[doc-mapper-start]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/mapper/getting-started/ +[doc-logging]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/logging/ +[faq]: https://docs.datastax.com/en/developer/nodejs-driver/latest/faq/ +[load-balancing]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/tuning-policies/#load-balancing-policy +[retry]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/tuning-policies/#retry-policy +[pooling]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/connection-pooling/ +[batch]: https://docs.datastax.com/en/developer/nodejs-driver/latest/features/batch/ +[old-driver]: https://github.com/jorgebay/node-cassandra-cql +[jorgebay]: https://github.com/jorgebay +[drivers]: https://github.com/datastax +[mailinglist]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/nodejs-driver-user +[jira]: https://datastax-oss.atlassian.net/projects/NODEJS/issues +[streams2]: https://nodejs.org/api/stream.html#stream_class_stream_readable +[cql-udt]: https://cassandra.apache.org/doc/latest/cql/types.html#udts +[dse]: https://www.datastax.com/products/datastax-enterprise +[astra]: https://www.datastax.com/products/datastax-astra \ No newline at end of file diff --git a/node_modules/cassandra-driver/build.yaml b/node_modules/cassandra-driver/build.yaml new file mode 100644 index 0000000..135920d --- /dev/null +++ b/node_modules/cassandra-driver/build.yaml @@ -0,0 +1,70 @@ +schedules: + commit: + # per commit job for all branches to run a subset of configs. + schedule: per_commit + matrix: + exclude: + - nodejs: ['8'] + # Include 2.1, dse-6.0, dse-6.7 + cassandra: ['3.11', '4.0', 'dse-5.1', 'dse-6.8'] + - nodejs: ['10'] + # Include 3.11, dse-6.8 + cassandra: ['2.1', '4.0', 'dse-5.1', 'dse-6.0', 'dse-6.7'] + - nodejs: ['12'] + # Include 4.0, dse-5.1, dse-6.7 + # Examples are run against DSE 6.7 + cassandra: ['2.1', '3.11', 'dse-6.0', 'dse-6.8'] + nightly: + # nightly job for primary branches to run all configs. + schedule: nightly + branches: + # regex matches primary branch format (2.1, 3.x, 3.0.x, 3.1.x, dse, master, etc). + include: ["/((\\d+(\\.[\\dx]+)+)|dse|master)/"] + adhoc: + # adhoc job for non-primary branches that doesn't have a schedule but may be used to run all configs. + schedule: adhoc + enable_pull_requests: true + branches: + exclude: ["/((\\d+(\\.[\\dx]+)+)|dse|master)/"] +nodejs: + - '8' + - '10' + - '12' +os: + - ubuntu/bionic64/nodejs-driver +cassandra: + - '2.1' + - '3.11' + - '4.0' + - 'dse-5.1' + - 'dse-6.0' + - 'dse-6.7' + - 'dse-6.8' +build: + - type: envinject + properties: | + JAVA_HOME=$CCM_JAVA_HOME + CCM_PATH=$HOME/ccm + JUNIT_REPORT_STACK=1 + JUNIT_REPORT_PATH=. + SIMULACRON_PATH=$HOME/simulacron.jar + TEST_TRACE=on + - npm: install + - npm: install mocha-jenkins-reporter@0 + - npm: install kerberos@1 + - npm: install -g eslint@4 + - npm: run eslint + - npm: run ci_jenkins + - script: | + if [ "$CCM_IS_DSE" == "true" ] && [ "${CCM_VERSION:0:3}" == "6.7" ]; then + pushd examples + npm install + ccm create test_samples --dse -v $CCM_VERSION -n 1:0 -b -s + node runner.js + run_rc=$? + ccm remove + popd + exit $run_rc + fi + strict: false + - xunit: "*.xml" diff --git a/node_modules/cassandra-driver/index.d.ts b/node_modules/cassandra-driver/index.d.ts new file mode 100644 index 0000000..cf44d76 --- /dev/null +++ b/node_modules/cassandra-driver/index.d.ts @@ -0,0 +1,414 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as events from 'events'; +import * as tls from 'tls'; +import { URL } from 'url'; +import { auth } from './lib/auth'; +import { policies } from './lib/policies'; +import { types } from './lib/types'; +import { metrics } from './lib/metrics'; +import { tracker } from './lib/tracker'; +import { metadata } from './lib/metadata'; +import { datastax } from './lib/datastax/'; +import Long = types.Long; +import Uuid = types.Uuid; +import graph = datastax.graph; + +// Export imported submodules +export { concurrent } from './lib/concurrent'; +export { mapping } from './lib/mapping'; +export { geometry } from './lib/geometry'; +export { auth, datastax, metadata, metrics, policies, tracker, types }; + +export const version: number; + +export function defaultOptions(): ClientOptions; + +export type ValueCallback = (err: Error, val: T) => void; +export type EmptyCallback = (err: Error) => void; +export type ArrayOrObject = any[]|{[key: string]: any}; + +export class Client extends events.EventEmitter { + hosts: HostMap; + keyspace: string; + metadata: metadata.Metadata; + metrics: metrics.ClientMetrics; + + constructor(options: DseClientOptions); + + connect(): Promise; + + connect(callback: EmptyCallback): void; + + execute(query: string, params?: ArrayOrObject, options?: QueryOptions): Promise; + + execute(query: string, params: ArrayOrObject, options: QueryOptions, callback: ValueCallback): void; + + execute(query: string, params: ArrayOrObject, callback: ValueCallback): void; + + execute(query: string, callback: ValueCallback): void; + + executeGraph( + traversal: string, + parameters: { [name: string]: any } | undefined, + options: GraphQueryOptions, + callback: ValueCallback): void; + + executeGraph( + traversal: string, + parameters: { [name: string]: any } | undefined, + callback: ValueCallback): void; + + executeGraph(traversal: string, callback: ValueCallback): void; + + executeGraph( + traversal: string, + parameters?: { [name: string]: any }, + options?: GraphQueryOptions): Promise; + + eachRow(query: string, + params: ArrayOrObject, + options: QueryOptions, + rowCallback: (n: number, row: types.Row) => void, + callback?: ValueCallback): void; + + eachRow(query: string, + params: ArrayOrObject, + rowCallback: (n: number, row: types.Row) => void, + callback?: ValueCallback): void; + + eachRow(query: string, + rowCallback: (n: number, row: types.Row) => void): void; + + stream(query: string, params?: ArrayOrObject, options?: QueryOptions, callback?: EmptyCallback): events.EventEmitter; + + batch( + queries: Array, + options?: QueryOptions): Promise; + + batch( + queries: Array, + options: QueryOptions, + callback: ValueCallback): void; + + batch( + queries: Array, + callback: ValueCallback): void; + + shutdown(): Promise; + + shutdown(callback: EmptyCallback): void; + + getReplicas(keyspace: string, token: Buffer): Host[]; + + getState(): metadata.ClientState; +} + +export interface HostMap extends events.EventEmitter { + length: number; + + forEach(callback: (value: Host, key: string) => void): void; + + get(key: string): Host; + + keys(): string[]; + + values(): Host[]; +} + +export interface Host extends events.EventEmitter { + address: string; + cassandraVersion: string; + datacenter: string; + rack: string; + tokens: string[]; + hostId: types.Uuid; + + canBeConsideredAsUp(): boolean; + + getCassandraVersion(): number[]; + + isUp(): boolean; +} + +export interface ExecutionOptions { + getCaptureStackTrace(): boolean; + + getConsistency(): types.consistencies; + + getCustomPayload(): { [key: string]: any }; + + getFetchSize(): number; + + getFixedHost(): Host; + + getHints(): string[] | string[][]; + + isAutoPage(): boolean; + + isBatchCounter(): boolean; + + isBatchLogged(): boolean; + + isIdempotent(): boolean; + + isPrepared(): boolean; + + isQueryTracing(): boolean; + + getKeyspace(): string; + + getLoadBalancingPolicy(): policies.loadBalancing.LoadBalancingPolicy; + + getPageState(): Buffer; + + getRawQueryOptions(): QueryOptions; + + getReadTimeout(): number; + + getRetryPolicy(): policies.retry.RetryPolicy; + + getRoutingKey(): Buffer | Buffer[]; + + getSerialConsistency(): types.consistencies; + + getTimestamp(): number | Long | undefined | null; + + setHints(hints: string[]): void; +} + +export interface ClientOptions { + contactPoints?: string[]; + localDataCenter?: string; + keyspace?: string; + authProvider?: auth.AuthProvider; + credentials?: { + username: string; + password: string; + } + + cloud?: { + secureConnectBundle: string | URL; + }; + + encoding?: { + map?: Function; + set?: Function; + copyBuffer?: boolean; + useUndefinedAsUnset?: boolean; + useBigIntAsLong?: boolean; + useBigIntAsVarint?: boolean; + }; + isMetadataSyncEnabled?: boolean; + maxPrepared?: number; + metrics?: metrics.ClientMetrics; + policies?: { + addressResolution?: policies.addressResolution.AddressTranslator; + loadBalancing?: policies.loadBalancing.LoadBalancingPolicy; + reconnection?: policies.reconnection.ReconnectionPolicy; + retry?: policies.retry.RetryPolicy; + speculativeExecution?: policies.speculativeExecution.SpeculativeExecutionPolicy; + timestampGeneration?: policies.timestampGeneration.TimestampGenerator; + }; + pooling?: { + coreConnectionsPerHost?: { [key: number]: number; }; + heartBeatInterval?: number; + maxRequestsPerConnection?: number; + warmup?: boolean; + }; + prepareOnAllHosts?: boolean; + profiles?: ExecutionProfile[]; + protocolOptions?: { + maxSchemaAgreementWaitSeconds?: number; + maxVersion?: number; + noCompact?: boolean; + port?: number; + }; + promiseFactory?: (handler: (callback: (err: Error, result?: any) => void) => void) => Promise; + queryOptions?: QueryOptions; + refreshSchemaDelay?: number; + rePrepareOnUp?: boolean; + requestTracker?: tracker.RequestTracker; + socketOptions?: { + coalescingThreshold?: number; + connectTimeout?: number; + defunctReadTimeoutThreshold?: number; + keepAlive?: boolean; + keepAliveDelay?: number; + readTimeout?: number; + tcpNoDelay?: boolean; + }; + sslOptions?: tls.ConnectionOptions; +} + +export interface QueryOptions { + autoPage?: boolean; + captureStackTrace?: boolean; + consistency?: number; + counter?: boolean; + customPayload?: any; + executionProfile?: string | ExecutionProfile; + fetchSize?: number; + hints?: string[] | string[][]; + host?: Host; + isIdempotent?: boolean; + keyspace?: string; + logged?: boolean; + pageState?: Buffer | string; + prepare?: boolean; + readTimeout?: number; + retry?: policies.retry.RetryPolicy; + routingIndexes?: number[]; + routingKey?: Buffer | Buffer[]; + routingNames?: string[]; + serialConsistency?: number; + timestamp?: number | Long; + traceQuery?: boolean; +} + +export interface DseClientOptions extends ClientOptions { + id?: Uuid; + applicationName?: string; + applicationVersion?: string; + monitorReporting?: { enabled?: boolean }; + graphOptions?: GraphOptions; +} + +export interface GraphQueryOptions extends QueryOptions { + graphLanguage?: string; + graphName?: string; + graphReadConsistency?: types.consistencies; + graphSource?: string; + graphWriteConsistency?: types.consistencies; +} + +export type GraphOptions = { + language?: string; + name?: string; + readConsistency?: types.consistencies; + readTimeout?: number; + source?: string; + writeConsistency?: types.consistencies; +}; + +export class ExecutionProfile { + consistency?: types.consistencies; + loadBalancing?: policies.loadBalancing.LoadBalancingPolicy; + name: string; + readTimeout?: number; + retry?: policies.retry.RetryPolicy; + serialConsistency?: types.consistencies; + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: types.consistencies; + writeConsistency?: types.consistencies; + }; + + constructor(name: string, options: { + consistency?: types.consistencies; + loadBalancing?: policies.loadBalancing.LoadBalancingPolicy; + readTimeout?: number; + retry?: policies.retry.RetryPolicy; + serialConsistency?: types.consistencies; + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: types.consistencies; + writeConsistency?: types.consistencies; + }; + }); +} + +export namespace errors { + class ArgumentError extends DriverError { + constructor(message: string); + } + + class AuthenticationError extends DriverError { + constructor(message: string); + } + + class BusyConnectionError extends DriverError { + constructor(address: string, maxRequestsPerConnection: number, connectionLength: number); + } + + abstract class DriverError extends Error { + info: string; + + constructor(message: string, constructor?: any); + } + + class DriverInternalError extends DriverError { + constructor(message: string); + } + + class NoHostAvailableError extends DriverError { + innerErrors: any; + + constructor(innerErrors: any, message?: string); + } + + class NotSupportedError extends DriverError { + constructor(message: string); + } + + class OperationTimedOutError extends DriverError { + host?: string; + + constructor(message: string, host?: string); + } + + class ResponseError extends DriverError { + code: number; + + constructor(code: number, message: string); + } +} + +export namespace token { + interface Token { + compare(other: Token): number; + + equals(other: Token): boolean; + + getType(): { code: types.dataTypes, info: any }; + + getValue(): any; + } + + interface TokenRange { + start: Token; + end: Token; + + compare(other: TokenRange): number; + + contains(token: Token): boolean; + + equals(other: TokenRange): boolean; + + isEmpty(): boolean; + + isWrappedAround(): boolean; + + splitEvenly(numberOfSplits: number): TokenRange[]; + + unwrap(): TokenRange[]; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/index.js b/node_modules/cassandra-driver/index.js new file mode 100644 index 0000000..2acf5cc --- /dev/null +++ b/node_modules/cassandra-driver/index.js @@ -0,0 +1,48 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const clientOptions = require('./lib/client-options'); +exports.Client = require('./lib/client'); +exports.ExecutionProfile = require('./lib/execution-profile').ExecutionProfile; +exports.ExecutionOptions = require('./lib/execution-options').ExecutionOptions; +exports.types = require('./lib/types'); +exports.errors = require('./lib/errors'); +exports.policies = require('./lib/policies'); +exports.auth = require('./lib/auth'); +exports.mapping = require('./lib/mapping'); +exports.tracker = require('./lib/tracker'); +exports.metrics = require('./lib/metrics'); +exports.concurrent = require('./lib/concurrent'); + +const token = require('./lib/token'); +exports.token = { + Token: token.Token, + TokenRange: token.TokenRange +}; +const Metadata = require('./lib/metadata'); +exports.metadata = { + Metadata: Metadata +}; +exports.Encoder = require('./lib/encoder'); +exports.geometry = require('./lib/geometry'); +exports.datastax = require('./lib/datastax'); +/** + * Returns a new instance of the default [options]{@link ClientOptions} used by the driver. + */ +exports.defaultOptions = function () { + return clientOptions.defaultOptions(); +}; +exports.version = require('./package.json').version; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/auth/base-dse-authenticator.js b/node_modules/cassandra-driver/lib/auth/base-dse-authenticator.js new file mode 100644 index 0000000..bddc649 --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/base-dse-authenticator.js @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const { Authenticator } = require('./provider'); + +const dseAuthenticatorName = 'com.datastax.bdp.cassandra.auth.DseAuthenticator'; + +/** + * Base class for Authenticator implementations that want to make use of + * the authentication scheme negotiation in the DseAuthenticator + * @param {String} authenticatorName + * @extends Authenticator + * @constructor + * @ignore + */ +function BaseDseAuthenticator(authenticatorName) { + this.authenticatorName = authenticatorName; +} + +util.inherits(BaseDseAuthenticator, Authenticator); + +/** + * Return a Buffer containing the required SASL mechanism. + * @abstract + * @returns {Buffer} + */ +BaseDseAuthenticator.prototype.getMechanism = function () { + throw new Error('Not implemented'); +}; + +/** + * Return a byte array containing the expected successful server challenge. + * @abstract + * @returns {Buffer} + */ +BaseDseAuthenticator.prototype.getInitialServerChallenge = function () { + throw new Error('Not implemented'); +}; + +/** + * @param {Function} callback + * @override + */ +BaseDseAuthenticator.prototype.initialResponse = function (callback) { + if (!this._isDseAuthenticator()) { + //fallback + return this.evaluateChallenge(this.getInitialServerChallenge(), callback); + } + //send the mechanism as a first auth message + callback(null, this.getMechanism()); +}; + +/** + * Determines if the name of the authenticator matches DSE 5+ + * @protected + * @ignore + */ +BaseDseAuthenticator.prototype._isDseAuthenticator = function () { + return this.authenticatorName === dseAuthenticatorName; +}; + +module.exports = BaseDseAuthenticator; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/auth/dse-gssapi-auth-provider.js b/node_modules/cassandra-driver/lib/auth/dse-gssapi-auth-provider.js new file mode 100644 index 0000000..ac25a51 --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/dse-gssapi-auth-provider.js @@ -0,0 +1,231 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const { AuthProvider } = require('./provider'); +const BaseDseAuthenticator = require('./base-dse-authenticator'); +const GssapiClient = require('./gssapi-client'); +const dns = require('dns'); +const utils = require('../utils'); + +const mechanism = utils.allocBufferFromString('GSSAPI'); +const initialServerChallenge = 'GSSAPI-START'; +const emptyBuffer = utils.allocBuffer(0); + +/** + * Creates a new instance of DseGssapiAuthProvider. + * @classdesc + * AuthProvider that provides GSSAPI authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @param {Object} [gssOptions] GSSAPI authenticator options + * @param {String} [gssOptions.authorizationId] The optional authorization ID. Providing an authorization ID allows the + * currently authenticated user to act as a different user (a.k.a. proxy authentication). + * @param {String} [gssOptions.service] The service to use. Defaults to 'dse'. + * @param {Function} [gssOptions.hostNameResolver] A method to be used to resolve the name of the Cassandra node based + * on the IP Address. Defaults to [lookupServiceResolver]{@link module:auth~DseGssapiAuthProvider.lookupServiceResolver} + * which resolves the FQDN of the provided IP to generate principals in the format of + * dse/example.com@MYREALM.COM. + * Alternatively, you can use [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver} to do a + * reverse DNS lookup or [useIpResolver]{@link module:auth~DseGssapiAuthProvider.useIpResolver} to simply use the IP + * address provided. + * @param {String} [gssOptions.user] DEPRECATED, it will be removed in future versions. For proxy authentication, use + * authorizationId instead. + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DseGssapiAuthProvider() + * }); + * @alias module:auth~DseGssapiAuthProvider + * @constructor + */ +function DseGssapiAuthProvider(gssOptions) { + //load the kerberos at construction time + try { + // eslint-disable-next-line + this._kerberos = require('kerberos'); + } + catch (err) { + if (err.code === 'MODULE_NOT_FOUND') { + const newErr = new Error('You must install module "kerberos" to use GSSAPI auth provider: ' + + 'https://www.npmjs.com/package/kerberos'); + newErr.code = err.code; + throw newErr; + } + throw err; + } + gssOptions = gssOptions || utils.emptyObject; + this.authorizationId = gssOptions.authorizationId || gssOptions.user; + this.service = gssOptions.service; + this.hostNameResolver = gssOptions.hostNameResolver || DseGssapiAuthProvider.lookupServiceResolver; +} + +util.inherits(DseGssapiAuthProvider, AuthProvider); + +/** + * Returns an Authenticator instance to be used by the driver when connecting to a host. + * @param {String} endpoint The IP address and port number in the format ip:port. + * @param {String} name Authenticator name. + * @override + * @returns {Authenticator} + */ +DseGssapiAuthProvider.prototype.newAuthenticator = function (endpoint, name) { + let address = endpoint; + if (endpoint.indexOf(':') > 0) { + address = endpoint.split(':')[0]; + } + return new GssapiAuthenticator( + this._kerberos, address, name, this.authorizationId, this.service, this.hostNameResolver); +}; + +/** + * Performs a lookupService query that resolves an IPv4 or IPv6 address to a hostname. This ultimately makes a + * getnameinfo() system call which depends on the OS to do hostname resolution. + *

+ * Note: Depends on dns.lookupService which was added in 0.12. For older versions falls back on + * [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver}. + * + * @param {String} ip IP address to resolve. + * @param {Function} callback The callback function with err and hostname arguments. + */ +DseGssapiAuthProvider.lookupServiceResolver = function (ip, callback) { + if (!dns.lookupService) { + return DseGssapiAuthProvider.reverseDnsResolver(ip, callback); + } + dns.lookupService(ip, 0, function (err, hostname) { + if (err) { + return callback(err); + } + if (!hostname) { + //fallback to ip + return callback(null, ip); + } + callback(null, hostname); + }); +}; + +/** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to a hostname. + * @param {String} ip IP address to resolve. + * @param {Function} callback The callback function with err and hostname arguments. + */ +DseGssapiAuthProvider.reverseDnsResolver = function (ip, callback) { + dns.reverse(ip, function (err, names) { + if (err) { + return callback(err); + } + if (!names || !names.length) { + //fallback to ip + return callback(null, ip); + } + callback(null, names[0]); + }); +}; + +/** + * Effectively a no op operation, returns the IP address provided. + * @param {String} ip IP address to use. + * @param {Function} callback The callback function with err and hostname arguments. + */ +DseGssapiAuthProvider.useIpResolver = function (ip, callback) { + callback(null, ip); +}; + +/** + * @param {Object} kerberosModule + * @param {String} address Host address. + * @param {String} authenticatorName + * @param {String} authorizationId + * @param {String} service + * @param {Function} hostNameResolver + * @extends Authenticator + * @private + */ +function GssapiAuthenticator(kerberosModule, address, authenticatorName, authorizationId, service, hostNameResolver) { + BaseDseAuthenticator.call(this, authenticatorName); + this.authorizationId = authorizationId; + this.address = address; + this.client = GssapiClient.createNew(kerberosModule, authorizationId, service); + this.hostNameResolver = hostNameResolver; +} + +//noinspection JSCheckFunctionSignatures +util.inherits(GssapiAuthenticator, BaseDseAuthenticator); + +GssapiAuthenticator.prototype.getMechanism = function () { + return mechanism; +}; + +GssapiAuthenticator.prototype.getInitialServerChallenge = function () { + return utils.allocBufferFromString(initialServerChallenge); +}; + +//noinspection JSUnusedGlobalSymbols +/** + * Obtain an initial response token for initializing the SASL handshake. + * @param {Function} callback + */ +GssapiAuthenticator.prototype.initialResponse = function (callback) { + const self = this; + //initialize the GSS client + let host = this.address; + utils.series([ + function getHostName(next) { + self.hostNameResolver(self.address, function (err, name) { + if (!err && name) { + host = name; + } + next(); + }); + }, + function initClient(next) { + self.client.init(host, function (err) { + if (err) { + return next(err); + } + if (!self._isDseAuthenticator()) { + //fallback + return self.evaluateChallenge(self.getInitialServerChallenge(), next); + } + //send the mechanism as a first auth message + next(null, self.getMechanism()); + }); + } + ], callback); +}; + +/** + * Evaluates a challenge received from the Server. Generally, this method should callback with + * no error and no additional params when authentication is complete from the client perspective. + * @param {Buffer} challenge + * @param {Function} callback + * @override + */ +GssapiAuthenticator.prototype.evaluateChallenge = function (challenge, callback) { + if (!challenge || challenge.toString() === initialServerChallenge) { + challenge = emptyBuffer; + } + this.client.evaluateChallenge(challenge, callback); +}; + +/** + * @override + */ +GssapiAuthenticator.prototype.onAuthenticationSuccess = function (token) { + this.client.shutdown(function noop() { }); +}; + + +module.exports = DseGssapiAuthProvider; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/auth/dse-plain-text-auth-provider.js b/node_modules/cassandra-driver/lib/auth/dse-plain-text-auth-provider.js new file mode 100644 index 0000000..64a0f64 --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/dse-plain-text-auth-provider.js @@ -0,0 +1,110 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const { AuthProvider } = require('./provider'); +const BaseDseAuthenticator = require('./base-dse-authenticator'); +const utils = require('../utils'); + +const mechanism = utils.allocBufferFromString('PLAIN'); +const separatorBuffer = utils.allocBufferFromArray([0]); +const initialServerChallenge = 'PLAIN-START'; + +/** + * Creates a new instance of DsePlainTextAuthProvider. + * @classdesc + * AuthProvider that provides plain text authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @param {String} username The username; cannot be null. + * @param {String} password The password; cannot be null. + * @param {String} [authorizationId] The optional authorization ID. Providing an authorization ID allows the currently + * authenticated user to act as a different user (a.k.a. proxy authentication). + * @extends AuthProvider + * @alias module:auth~DsePlainTextAuthProvider + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DsePlainTextAuthProvider('user', 'p@ssword1'); + * }); + * @constructor + */ +function DsePlainTextAuthProvider(username, password, authorizationId) { + if (typeof username !== 'string' || typeof password !== 'string') { + // Validate for null and undefined + throw new TypeError('Username and password must be a string'); + } + this.username = username; + this.password = password; + this.authorizationId = authorizationId; +} + +util.inherits(DsePlainTextAuthProvider, AuthProvider); + +/** + * Returns an Authenticator instance to be used by the driver when connecting to a host. + * @param {String} endpoint The IP address and port number in the format ip:port. + * @param {String} name Authenticator name. + * @override + * @returns {Authenticator} + */ +DsePlainTextAuthProvider.prototype.newAuthenticator = function (endpoint, name) { + return new PlainTextAuthenticator(name, this.username, this.password, this.authorizationId); +}; + +/** + * @param {String} authenticatorName + * @param {String} authenticatorId + * @param {String} password + * @param {String} authorizationId + * @extends BaseDseAuthenticator + * @constructor + * @private + */ +function PlainTextAuthenticator(authenticatorName, authenticatorId, password, authorizationId) { + BaseDseAuthenticator.call(this, authenticatorName); + this.authenticatorId = utils.allocBufferFromString(authenticatorId); + this.password = utils.allocBufferFromString(password); + this.authorizationId = utils.allocBufferFromString(authorizationId || ''); +} + +util.inherits(PlainTextAuthenticator, BaseDseAuthenticator); + +/** @override */ +PlainTextAuthenticator.prototype.getMechanism = function () { + return mechanism; +}; + +/** @override */ +PlainTextAuthenticator.prototype.getInitialServerChallenge = function () { + return utils.allocBufferFromString(initialServerChallenge); +}; + +/** @override */ +PlainTextAuthenticator.prototype.evaluateChallenge = function (challenge, callback) { + if (!challenge || challenge.toString() !== initialServerChallenge) { + return callback(new Error('Incorrect SASL challenge from server')); + } + // The SASL plain text format is authorizationId 0 username 0 password + callback(null, Buffer.concat([ + this.authorizationId, + separatorBuffer, + this.authenticatorId, + separatorBuffer, + this.password + ])); +}; + +module.exports = DsePlainTextAuthProvider; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/auth/gssapi-client.js b/node_modules/cassandra-driver/lib/auth/gssapi-client.js new file mode 100644 index 0000000..92af818 --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/gssapi-client.js @@ -0,0 +1,155 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const util = require('util'); +const utils = require('../utils'); + +/** + * GSSAPI Client interface. + * @ignore + */ +class GssapiClient { + /** + * @param {String} [authorizationId] + * @param {String} [service] + */ + constructor(authorizationId, service) { + this.authorizationId = authorizationId; + this.service = service !== undefined ? service : 'dse'; + } + + /** + * @abstract + * @param {String} host Host name or ip + * @param {Function} callback + */ + init(host, callback) { + throw new Error('Not implemented'); + } + + /** + * @param {Buffer} challenge + * @param {Function} callback + * @abstract + */ + evaluateChallenge(challenge, callback) { + throw new Error('Not implemented'); + } + + /** + * @abstract + * @param {Function} [callback] + */ + shutdown(callback) { + throw new Error('Not implemented'); + } + + /** + * Factory to get the actual implementation of GSSAPI (unix or win) + * @param {Object} kerberosModule Kerberos client library dependency + * @param {String} [authorizationId] An identity to act as (for proxy authentication). + * @param {String} [service] The service to use. (defaults to 'dse') + * @returns GssapiClient + */ + static createNew(kerberosModule, authorizationId, service) { + return new StandardGssClient(kerberosModule, authorizationId, service); + } +} + +/** + * GSSAPI Client implementation using kerberos module. + * @ignore + */ +class StandardGssClient extends GssapiClient { + constructor(kerberosModule, authorizationId, service) { + if (typeof kerberosModule.initializeClient !== 'function') { + throw new Error('The driver expects version 1.x of the kerberos library'); + } + + super(authorizationId, service); + this.kerberos = kerberosModule; + this.transitionIndex = 0; + } + + init(host, callback) { + this.host = host; + let uri = this.service; + if (this.host) { + //For the principal "dse/cassandra1.datastax.com@DATASTAX.COM" + //the expected uri is: "dse@cassandra1.datastax.com" + uri = util.format("%s@%s", this.service, this.host); + } + const options = { + gssFlags: this.kerberos.GSS_C_MUTUAL_FLAG //authenticate itself flag + }; + this.kerberos.initializeClient(uri, options, (err, kerberosClient) => { + if (err) { + return callback(err); + } + this.kerberosClient = kerberosClient; + callback(); + }); + } + + /** @override */ + evaluateChallenge(challenge, callback) { + this['transition' + this.transitionIndex](challenge, (err, response) => { + if (err) { + return callback(err); + } + this.transitionIndex++; + callback(null, response ? utils.allocBufferFromString(response, 'base64') : utils.allocBuffer(0)); + }); + } + + transition0(challenge, callback) { + this.kerberosClient.step('', callback); + } + + transition1(challenge, callback) { + const charPointerChallenge = challenge.toString('base64'); + this.kerberosClient.step(charPointerChallenge, callback); + } + + transition2(challenge, callback) { + this.kerberosClient.unwrap(challenge.toString('base64'), (err, response) => { + if (err) { + return callback(err, false); + } + const cb = function (err, wrapped) { + if (err) { + return callback(err); + } + callback(null, wrapped); + }; + if (this.authorizationId !== undefined) { + this.kerberosClient.wrap(response, { user: this.authorizationId }, cb); + } + else { + this.kerberosClient.wrap(response, null, cb); + } + }); + } + + shutdown(callback) { + this.kerberosClient = null; + callback(); + } +} + +module.exports = GssapiClient; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/auth/index.d.ts b/node_modules/cassandra-driver/lib/auth/index.d.ts new file mode 100644 index 0000000..c1c3741 --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/index.d.ts @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export namespace auth { + interface Authenticator { + initialResponse(callback: Function): void; + + evaluateChallenge(challenge: Buffer, callback: Function): void; + + onAuthenticationSuccess(token?: Buffer): void; + } + + interface AuthProvider { + newAuthenticator(endpoint: string, name: string): Authenticator; + } + + class PlainTextAuthProvider implements AuthProvider { + constructor(username: string, password: string); + + newAuthenticator(endpoint: string, name: string): Authenticator; + } + + class DsePlainTextAuthProvider implements AuthProvider { + constructor(username: string, password: string, authorizationId?: string); + + newAuthenticator(endpoint: string, name: string): Authenticator; + } + + class DseGssapiAuthProvider implements AuthProvider { + constructor(gssOptions?: { authorizationId?: string, service?: string, hostNameResolver?: Function }); + + newAuthenticator(endpoint: string, name: string): Authenticator; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/auth/index.js b/node_modules/cassandra-driver/lib/auth/index.js new file mode 100644 index 0000000..b79b14e --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/index.js @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +/** + * DSE Authentication module. + *

+ * Contains the classes used for connecting to a DSE cluster secured with DseAuthenticator. + *

+ * @module auth + */ + +const { Authenticator, AuthProvider } = require('./provider'); +const { PlainTextAuthProvider } = require('./plain-text-auth-provider'); +const DseGssapiAuthProvider = require('./dse-gssapi-auth-provider'); +const DsePlainTextAuthProvider = require('./dse-plain-text-auth-provider'); +const NoAuthProvider = require('./no-auth-provider'); + +module.exports = { + Authenticator, + AuthProvider, + DseGssapiAuthProvider, + DsePlainTextAuthProvider, + NoAuthProvider, + PlainTextAuthProvider +}; diff --git a/node_modules/cassandra-driver/lib/auth/no-auth-provider.js b/node_modules/cassandra-driver/lib/auth/no-auth-provider.js new file mode 100644 index 0000000..7dbcd9e --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/no-auth-provider.js @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const { AuthProvider, Authenticator } = require('./provider'); +const { PlainTextAuthenticator } = require('./plain-text-auth-provider'); +const errors = require('../errors'); + +const dseAuthenticator = 'com.datastax.bdp.cassandra.auth.DseAuthenticator'; + +/** + * Internal authentication provider that is used when no provider has been set by the user. + * @ignore + */ +class NoAuthProvider extends AuthProvider { + newAuthenticator(endpoint, name) { + if (name === dseAuthenticator) { + // Try to use transitional mode + return new TransitionalModePlainTextAuthenticator(); + } + + // Use an authenticator that doesn't allow auth flow + return new NoAuthAuthenticator(endpoint); + } +} + +/** + * An authenticator throws an error when authentication flow is started. + * @ignore + */ +class NoAuthAuthenticator extends Authenticator { + constructor(endpoint) { + super(); + this.endpoint = endpoint; + } + + initialResponse(callback) { + callback(new errors.AuthenticationError( + `Host ${this.endpoint} requires authentication, but no authenticator found in the options`)); + } +} + +/** + * Authenticator that accounts for DSE authentication configured with transitional mode: normal. + * + * In this situation, the client is allowed to connect without authentication, but DSE + * would still send an AUTHENTICATE response. This Authenticator handles this situation + * by sending back a dummy credential. + */ +class TransitionalModePlainTextAuthenticator extends PlainTextAuthenticator { + constructor() { + super('', ''); + } +} + +module.exports = NoAuthProvider; diff --git a/node_modules/cassandra-driver/lib/auth/plain-text-auth-provider.js b/node_modules/cassandra-driver/lib/auth/plain-text-auth-provider.js new file mode 100644 index 0000000..f5241f7 --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/plain-text-auth-provider.js @@ -0,0 +1,81 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +const provider = require('./provider.js'); +const utils = require('../utils'); +const AuthProvider = provider.AuthProvider; +const Authenticator = provider.Authenticator; +/** + * Creates a new instance of the Authenticator provider + * @classdesc Provides plain text [Authenticator]{@link module:auth~Authenticator} instances to be used when + * connecting to a host. + * @extends module:auth~AuthProvider + * @example + * var authProvider = new cassandra.auth.PlainTextAuthProvider('my_user', 'p@ssword1!'); + * //Set the auth provider in the clientOptions when creating the Client instance + * const client = new Client({ contactPoints: contactPoints, authProvider: authProvider }); + * @param {String} username User name in plain text + * @param {String} password Password in plain text + * @alias module:auth~PlainTextAuthProvider + * @constructor + */ +function PlainTextAuthProvider(username, password) { + this.username = username; + this.password = password; +} + +util.inherits(PlainTextAuthProvider, AuthProvider); + +/** + * Returns a new [Authenticator]{@link module:auth~Authenticator} instance to be used for plain text authentication. + * @override + * @returns {Authenticator} + */ +PlainTextAuthProvider.prototype.newAuthenticator = function () { + return new PlainTextAuthenticator(this.username, this.password); +}; + +/** + * @ignore + */ +function PlainTextAuthenticator(username, password) { + this.username = username; + this.password = password; +} + +util.inherits(PlainTextAuthenticator, Authenticator); + +PlainTextAuthenticator.prototype.initialResponse = function (callback) { + const initialToken = Buffer.concat([ + utils.allocBufferFromArray([0]), + utils.allocBufferFromString(this.username, 'utf8'), + utils.allocBufferFromArray([0]), + utils.allocBufferFromString(this.password, 'utf8') + ]); + callback(null, initialToken); +}; + +PlainTextAuthenticator.prototype.evaluateChallenge = function (challenge, callback) { + //noop + callback(); +}; + +module.exports = { + PlainTextAuthenticator, + PlainTextAuthProvider, +}; diff --git a/node_modules/cassandra-driver/lib/auth/provider.js b/node_modules/cassandra-driver/lib/auth/provider.js new file mode 100644 index 0000000..d4bf9ed --- /dev/null +++ b/node_modules/cassandra-driver/lib/auth/provider.js @@ -0,0 +1,79 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +/** + * @classdesc Provides [Authenticator]{@link module:auth~Authenticator} instances to be used when connecting to a host. + * @constructor + * @abstract + * @alias module:auth~AuthProvider + */ +function AuthProvider() { + +} + +/** + * Returns an [Authenticator]{@link module:auth~Authenticator} instance to be used when connecting to a host. + * @param {String} endpoint The ip address and port number in the format ip:port + * @param {String} name Authenticator name + * @abstract + * @returns {Authenticator} + */ +AuthProvider.prototype.newAuthenticator = function (endpoint, name) { + throw new Error('This is an abstract class, you must implement newAuthenticator method or ' + + 'use another auth provider that inherits from this class'); +}; + +/** + * @class + * @classdesc Handles SASL authentication with Cassandra servers. + * Each time a new connection is created and the server requires authentication, + * a new instance of this class will be created by the corresponding. + * @constructor + * @alias module:auth~Authenticator + */ +function Authenticator() { + +} + +/** + * Obtain an initial response token for initializing the SASL handshake. + * @param {Function} callback + */ +Authenticator.prototype.initialResponse = function (callback) { + callback(new Error('Not implemented')); +}; + +/** + * Evaluates a challenge received from the Server. Generally, this method should callback with + * no error and no additional params when authentication is complete from the client perspective. + * @param {Buffer} challenge + * @param {Function} callback + */ +Authenticator.prototype.evaluateChallenge = function (challenge, callback) { + callback(new Error('Not implemented')); +}; + +/** + * Called when authentication is successful with the last information + * optionally sent by the server. + * @param {Buffer} [token] + */ +Authenticator.prototype.onAuthenticationSuccess = function (token) { + +}; + +exports.AuthProvider = AuthProvider; +exports.Authenticator = Authenticator; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/client-options.js b/node_modules/cassandra-driver/lib/client-options.js new file mode 100644 index 0000000..029e5c8 --- /dev/null +++ b/node_modules/cassandra-driver/lib/client-options.js @@ -0,0 +1,361 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const policies = require('./policies'); +const types = require('./types'); +const utils = require('./utils'); +const tracker = require('./tracker'); +const metrics = require('./metrics'); +const auth = require('./auth'); + +/** Core connections per host for protocol versions 1 and 2 */ +const coreConnectionsPerHostV2 = { + [types.distance.local]: 2, + [types.distance.remote]: 1, + [types.distance.ignored]: 0 +}; + +/** Core connections per host for protocol version 3 and above */ +const coreConnectionsPerHostV3 = { + [types.distance.local]: 1, + [types.distance.remote]: 1, + [types.distance.ignored]: 0 +}; + +/** Default maxRequestsPerConnection value for protocol v1 and v2 */ +const maxRequestsPerConnectionV2 = 128; + +/** Default maxRequestsPerConnection value for protocol v3+ */ +const maxRequestsPerConnectionV3 = 2048; + +const continuousPageUnitBytes = 'bytes'; +const continuousPageDefaultSize = 5000; +const continuousPageDefaultHighWaterMark = 10000; + +/** + * @returns {ClientOptions} + */ +function defaultOptions () { + return ({ + policies: { + addressResolution: policies.defaultAddressTranslator(), + loadBalancing: policies.defaultLoadBalancingPolicy(), + reconnection: policies.defaultReconnectionPolicy(), + retry: policies.defaultRetryPolicy(), + speculativeExecution: policies.defaultSpeculativeExecutionPolicy(), + timestampGeneration: policies.defaultTimestampGenerator() + }, + queryOptions: { + fetchSize: 5000, + prepare: false, + captureStackTrace: false + }, + protocolOptions: { + port: 9042, + maxSchemaAgreementWaitSeconds: 10, + maxVersion: 0, + noCompact: false + }, + pooling: { + heartBeatInterval: 30000, + warmup: true + }, + socketOptions: { + connectTimeout: 5000, + defunctReadTimeoutThreshold: 64, + keepAlive: true, + keepAliveDelay: 0, + readTimeout: 12000, + tcpNoDelay: true, + coalescingThreshold: 65536 + }, + authProvider: null, + requestTracker: null, + metrics: new metrics.DefaultMetrics(), + maxPrepared: 500, + refreshSchemaDelay: 1000, + isMetadataSyncEnabled: true, + prepareOnAllHosts: true, + rePrepareOnUp: true, + encoding: { + copyBuffer: true, + useUndefinedAsUnset: true + }, + monitorReporting: { + enabled: true + } + }); +} + +/** + * Extends and validates the user options + * @param {Object} [baseOptions] The source object instance that will be overridden + * @param {Object} userOptions + * @returns {Object} + */ +function extend(baseOptions, userOptions) { + if (arguments.length === 1) { + userOptions = arguments[0]; + baseOptions = {}; + } + const options = utils.deepExtend(baseOptions, defaultOptions(), userOptions); + + if (!options.cloud) { + if (!Array.isArray(options.contactPoints) || options.contactPoints.length === 0) { + throw new TypeError('Contacts points are not defined.'); + } + + for (let i = 0; i < options.contactPoints.length; i++) { + const hostName = options.contactPoints[i]; + if (!hostName) { + throw new TypeError(util.format('Contact point %s (%s) is not a valid host name, ' + + 'the following values are valid contact points: ipAddress, hostName or ipAddress:port', i, hostName)); + } + } + + options.sni = undefined; + } else { + validateCloudOptions(options); + } + + if (!options.logEmitter) { + options.logEmitter = function () {}; + } + if (!options.queryOptions) { + throw new TypeError('queryOptions not defined in options'); + } + + if (options.requestTracker !== null && !(options.requestTracker instanceof tracker.RequestTracker)) { + throw new TypeError('requestTracker must be an instance of RequestTracker'); + } + + if (!(options.metrics instanceof metrics.ClientMetrics)) { + throw new TypeError('metrics must be an instance of ClientMetrics'); + } + + validatePoliciesOptions(options.policies); + + validateProtocolOptions(options.protocolOptions); + + validateSocketOptions(options.socketOptions); + + validateAuthenticationOptions(options); + + options.encoding = options.encoding || {}; + + validateEncodingOptions(options.encoding); + + if (options.profiles && !Array.isArray(options.profiles)) { + throw new TypeError('profiles must be an Array of ExecutionProfile instances'); + } + + validateApplicationInfo(options); + + validateMonitorReporting(options); + + return options; +} + +/** + * Validates the options to connect to a cloud instance. + * @private + */ +function validateCloudOptions(options) { + const bundle = options.cloud.secureConnectBundle; + + // eslint-disable-next-line no-undef + if (!(typeof bundle === 'string' || (typeof URL !== 'undefined' && bundle instanceof URL))) { + throw new TypeError('secureConnectBundle in cloud options must be of type string'); + } + + if (options.contactPoints) { + throw new TypeError('Contact points can not be defined when cloud settings are provided'); + } + + if (options.sslOptions) { + throw new TypeError('SSL options can not be defined when cloud settings are provided'); + } +} + +/** + * Validates the policies from the client options. + * @param {ClientOptions.policies} policiesOptions + * @private + */ +function validatePoliciesOptions(policiesOptions) { + if (!policiesOptions) { + throw new TypeError('policies not defined in options'); + } + if (!(policiesOptions.loadBalancing instanceof policies.loadBalancing.LoadBalancingPolicy)) { + throw new TypeError('Load balancing policy must be an instance of LoadBalancingPolicy'); + } + if (!(policiesOptions.reconnection instanceof policies.reconnection.ReconnectionPolicy)) { + throw new TypeError('Reconnection policy must be an instance of ReconnectionPolicy'); + } + if (!(policiesOptions.retry instanceof policies.retry.RetryPolicy)) { + throw new TypeError('Retry policy must be an instance of RetryPolicy'); + } + if (!(policiesOptions.addressResolution instanceof policies.addressResolution.AddressTranslator)) { + throw new TypeError('Address resolution policy must be an instance of AddressTranslator'); + } + if (policiesOptions.timestampGeneration !== null && + !(policiesOptions.timestampGeneration instanceof policies.timestampGeneration.TimestampGenerator)) { + throw new TypeError('Timestamp generation policy must be an instance of TimestampGenerator'); + } +} + +/** + * Validates the protocol options. + * @param {ClientOptions.protocolOptions} protocolOptions + * @private + */ +function validateProtocolOptions(protocolOptions) { + if (!protocolOptions) { + throw new TypeError('protocolOptions not defined in options'); + } + const version = protocolOptions.maxVersion; + if (version && (typeof version !== 'number' || !types.protocolVersion.isSupported(version))) { + throw new TypeError(util.format('protocolOptions.maxVersion provided (%s) is invalid', version)); + } +} + +/** + * Validates the socket options. + * @param {ClientOptions.socketOptions} socketOptions + * @private + */ +function validateSocketOptions(socketOptions) { + if (!socketOptions) { + throw new TypeError('socketOptions not defined in options'); + } + if (typeof socketOptions.readTimeout !== 'number') { + throw new TypeError('socketOptions.readTimeout must be a Number'); + } + if (typeof socketOptions.coalescingThreshold !== 'number' || socketOptions.coalescingThreshold <= 0) { + throw new TypeError('socketOptions.coalescingThreshold must be a positive Number'); + } +} + +/** + * Validates authentication provider and credentials. + * @param {ClientOptions} options + * @private + */ +function validateAuthenticationOptions(options) { + if (!options.authProvider) { + const credentials = options.credentials; + if (credentials) { + if (typeof credentials.username !== 'string' || typeof credentials.password !== 'string') { + throw new TypeError('credentials username and password must be a string'); + } + + options.authProvider = new auth.PlainTextAuthProvider(credentials.username, credentials.password); + } else { + options.authProvider = new auth.NoAuthProvider(); + } + } else if (!(options.authProvider instanceof auth.AuthProvider)) { + throw new TypeError('options.authProvider must be an instance of AuthProvider'); + } +} + +/** + * Validates the encoding options. + * @param {ClientOptions.encoding} encodingOptions + * @private + */ +function validateEncodingOptions(encodingOptions) { + if (encodingOptions.map) { + const mapConstructor = encodingOptions.map; + if (typeof mapConstructor !== 'function' || + typeof mapConstructor.prototype.forEach !== 'function' || + typeof mapConstructor.prototype.set !== 'function') { + throw new TypeError('Map constructor not valid'); + } + } + + if (encodingOptions.set) { + const setConstructor = encodingOptions.set; + if (typeof setConstructor !== 'function' || + typeof setConstructor.prototype.forEach !== 'function' || + typeof setConstructor.prototype.add !== 'function') { + throw new TypeError('Set constructor not valid'); + } + } + + if ((encodingOptions.useBigIntAsLong || encodingOptions.useBigIntAsVarint) && typeof BigInt === 'undefined') { + throw new TypeError('BigInt is not supported by the JavaScript engine'); + } +} + +function validateApplicationInfo(options) { + function validateString(key) { + const str = options[key]; + + if (str !== null && str !== undefined && typeof str !== 'string') { + throw new TypeError(`${key} should be a String`); + } + } + + validateString('applicationName'); + validateString('applicationVersion'); + + if (options.id !== null && options.id !== undefined && !(options.id instanceof types.Uuid)) { + throw new TypeError('Client id must be a Uuid'); + } +} + +function validateMonitorReporting(options) { + const o = options.monitorReporting; + if (o === null || typeof o !== 'object') { + throw new TypeError(`Monitor reporting must be an object, obtained: ${o}`); + } +} + +/** + * Sets the default options that depend on the protocol version and other metadata. + * @param {Client} client + */ +function setMetadataDependent(client) { + const version = client.controlConnection.protocolVersion; + let coreConnectionsPerHost = coreConnectionsPerHostV3; + let maxRequestsPerConnection = maxRequestsPerConnectionV3; + + if (!types.protocolVersion.uses2BytesStreamIds(version)) { + coreConnectionsPerHost = coreConnectionsPerHostV2; + maxRequestsPerConnection = maxRequestsPerConnectionV2; + } + + if (client.options.queryOptions.consistency === undefined) { + client.options.queryOptions.consistency = + client.metadata.isDbaas() ? types.consistencies.localQuorum : types.consistencies.localOne; + } + + client.options.pooling = utils.deepExtend( + {}, { coreConnectionsPerHost, maxRequestsPerConnection }, client.options.pooling); +} + +exports.extend = extend; +exports.defaultOptions = defaultOptions; +exports.coreConnectionsPerHostV2 = coreConnectionsPerHostV2; +exports.coreConnectionsPerHostV3 = coreConnectionsPerHostV3; +exports.maxRequestsPerConnectionV2 = maxRequestsPerConnectionV2; +exports.maxRequestsPerConnectionV3 = maxRequestsPerConnectionV3; +exports.setMetadataDependent = setMetadataDependent; +exports.continuousPageUnitBytes = continuousPageUnitBytes; +exports.continuousPageDefaultSize = continuousPageDefaultSize; +exports.continuousPageDefaultHighWaterMark = continuousPageDefaultHighWaterMark; diff --git a/node_modules/cassandra-driver/lib/client.js b/node_modules/cassandra-driver/lib/client.js new file mode 100644 index 0000000..fae8ff8 --- /dev/null +++ b/node_modules/cassandra-driver/lib/client.js @@ -0,0 +1,1180 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const events = require('events'); +const util = require('util'); + +const utils = require('./utils.js'); +const errors = require('./errors.js'); +const types = require('./types'); +const { ProfileManager } = require('./execution-profile'); +const requests = require('./requests'); +const clientOptions = require('./client-options'); +const ClientState = require('./metadata/client-state'); +const description = require('../package.json').description; +const { version } = require('../package.json'); +const { DefaultExecutionOptions } = require('./execution-options'); +const ControlConnection = require('./control-connection'); +const RequestHandler = require('./request-handler'); +const PrepareHandler = require('./prepare-handler'); +const InsightsClient = require('./insights-client'); +const cloud = require('./datastax/cloud'); +const GraphExecutor = require('./datastax/graph/graph-executor'); +const promiseUtils = require('./promise-utils'); + +/** + * Max amount of pools being warmup in parallel, when warmup is enabled + * @private + */ +const warmupLimit = 32; + +/** + * Client options. + *

While the driver provides lots of extensibility points and configurability, few client options are required.

+ *

Default values for all settings are designed to be suitable for the majority of use cases, you should avoid + * fine tuning it when not needed.

+ *

See [Client constructor]{@link Client} documentation for recommended options.

+ * @typedef {Object} ClientOptions + * @property {Array.} contactPoints + * Array of addresses or host names of the nodes to add as contact points. + *

+ * Contact points are addresses of Cassandra nodes that the driver uses to discover the cluster topology. + *

+ *

+ * Only one contact point is required (the driver will retrieve the address of the other nodes automatically), + * but it is usually a good idea to provide more than one contact point, because if that single contact point is + * unavailable, the driver will not be able to initialize correctly. + *

+ * @property {String} [localDataCenter] The local data center to use. + *

+ * If using DCAwareRoundRobinPolicy (default), this option is required and only hosts from this data center are + * connected to and used in query plans. + *

+ * @property {String} [keyspace] The logged keyspace for all the connections created within the {@link Client} instance. + * @property {Object} [credentials] An object containing the username and password for plain-text authentication. + * It configures the authentication provider to be used against Apache Cassandra's PasswordAuthenticator or DSE's + * DseAuthenticator, when default auth scheme is plain-text. + *

+ * Note that you should configure either credentials or authProvider to connect to an + * auth-enabled cluster, but not both. + *

+ * @property {String} [credentials.username] The username to use for plain-text authentication. + * @property {String} [credentials.password] The password to use for plain-text authentication. + * @property {Uuid} [id] A unique identifier assigned to a {@link Client} object, that will be communicated to the + * server (DSE 6.0+) to identify the client instance created with this options. When not defined, the driver will + * generate a random identifier. + * @property {String} [applicationName] An optional setting identifying the name of the application using + * the {@link Client} instance. + *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

+ * @property {String} [applicationVersion] An optional setting identifying the version of the application using + * the {@link Client} instance. + *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

+ * @property {Object} [monitorReporting] Options for reporting mechanism from the client to the DSE server, for + * versions that support it. + * @property {Boolean} [monitorReporting.enabled=true] Determines whether the reporting mechanism is enabled. + * Defaults to true. + * @property {Object} [cloud] The options to connect to a cloud instance. + * @property {String|URL} cloud.secureConnectBundle Determines the file path for the credentials file bundle. + * @property {Number} [refreshSchemaDelay] The default window size in milliseconds used to debounce node list and schema + * refresh metadata requests. Default: 1000. + * @property {Boolean} [isMetadataSyncEnabled] Determines whether client-side schema metadata retrieval and update is + * enabled. + *

Setting this value to false will cause keyspace information not to be automatically loaded, affecting + * replica calculation per token in the different keyspaces. When disabling metadata synchronization, use + * [Metadata.refreshKeyspaces()]{@link module:metadata~Metadata#refreshKeyspaces} to keep keyspace information up to + * date or token-awareness will not work correctly.

+ * Default: true. + * @property {Boolean} [prepareOnAllHosts] Determines if the driver should prepare queries on all hosts in the cluster. + * Default: true. + * @property {Boolean} [rePrepareOnUp] Determines if the driver should re-prepare all cached prepared queries on a + * host when it marks it back up. + * Default: true. + * @property {Number} [maxPrepared] Determines the maximum amount of different prepared queries before evicting items + * from the internal cache. Reaching a high threshold hints that the queries are not being reused, like when + * hard-coding parameter values inside the queries. + * Default: 500. + * @property {Object} [policies] + * @property {LoadBalancingPolicy} [policies.loadBalancing] The load balancing policy instance to be used to determine + * the coordinator per query. + * @property {RetryPolicy} [policies.retry] The retry policy. + * @property {ReconnectionPolicy} [policies.reconnection] The reconnection policy to be used. + * @property {AddressTranslator} [policies.addressResolution] The address resolution policy. + * @property {SpeculativeExecutionPolicy} [policies.speculativeExecution] The SpeculativeExecutionPolicy + * instance to be used to determine if the client should send speculative queries when the selected host takes more + * time than expected. + *

+ * Default: [NoSpeculativeExecutionPolicy]{@link + * module:policies/speculativeExecution~NoSpeculativeExecutionPolicy} + *

+ * @property {TimestampGenerator} [policies.timestampGeneration] The client-side + * [query timestamp generator]{@link module:policies/timestampGeneration~TimestampGenerator}. + *

+ * Default: [MonotonicTimestampGenerator]{@link module:policies/timestampGeneration~MonotonicTimestampGenerator} + * + *

+ *

Use null to disable client-side timestamp generation.

+ * @property {QueryOptions} [queryOptions] Default options for all queries. + * @property {Object} [pooling] Pooling options. + * @property {Number} [pooling.heartBeatInterval] The amount of idle time in milliseconds that has to pass before the + * driver issues a request on an active connection to avoid idle time disconnections. Default: 30000. + * @property {Object} [pooling.coreConnectionsPerHost] Associative array containing amount of connections per host + * distance. + * @property {Number} [pooling.maxRequestsPerConnection] The maximum number of requests per connection. The default + * value is: + *
    + *
  • For modern protocol versions (v3 and above): 2048
  • + *
  • For older protocol versions (v1 and v2): 128
  • + *
+ * @property {Boolean} [pooling.warmup] Determines if all connections to hosts in the local datacenter must be opened on + * connect. Default: true. + * @property {Object} [protocolOptions] + * @property {Number} [protocolOptions.port] The port to use to connect to the Cassandra host. If not set through this + * method, the default port (9042) will be used instead. + * @property {Number} [protocolOptions.maxSchemaAgreementWaitSeconds] The maximum time in seconds to wait for schema + * agreement between nodes before returning from a DDL query. Default: 10. + * @property {Number} [protocolOptions.maxVersion] When set, it limits the maximum protocol version used to connect to + * the nodes. + * Useful for using the driver against a cluster that contains nodes with different major/minor versions of Cassandra. + * @property {Boolean} [protocolOptions.noCompact] When set to true, enables the NO_COMPACT startup option. + *

+ * When this option is supplied SELECT, UPDATE, DELETE, and BATCH + * statements on COMPACT STORAGE tables function in "compatibility" mode which allows seeing these tables + * as if they were "regular" CQL tables. + *

+ *

+ * This option only effects interactions with interactions with tables using COMPACT STORAGE and is only + * supported by C* 3.0.16+, 3.11.2+, 4.0+ and DSE 6.0+. + *

+ * @property {Object} [socketOptions] + * @property {Number} [socketOptions.connectTimeout] Connection timeout in milliseconds. Default: 5000. + * @property {Number} [socketOptions.defunctReadTimeoutThreshold] Determines the amount of requests that simultaneously + * have to timeout before closing the connection. Default: 64. + * @property {Boolean} [socketOptions.keepAlive] Whether to enable TCP keep-alive on the socket. Default: true. + * @property {Number} [socketOptions.keepAliveDelay] TCP keep-alive delay in milliseconds. Default: 0. + * @property {Number} [socketOptions.readTimeout] Per-host read timeout in milliseconds. + *

+ * Please note that this is not the maximum time a call to {@link Client#execute} may have to wait; + * this is the maximum time that call will wait for one particular Cassandra host, but other hosts will be tried if + * one of them timeout. In other words, a {@link Client#execute} call may theoretically wait up to + * readTimeout * number_of_cassandra_hosts (though the total number of hosts tried for a given query also + * depends on the LoadBalancingPolicy in use). + *

When setting this value, keep in mind the following:

+ *
    + *
  • the timeout settings used on the Cassandra side (*_request_timeout_in_ms in cassandra.yaml) should be taken + * into account when picking a value for this read timeout. You should pick a value a couple of seconds greater than + * the Cassandra timeout settings. + *
  • + *
  • + * the read timeout is only approximate and only control the timeout to one Cassandra host, not the full query. + *
  • + *
+ * Setting a value of 0 disables read timeouts. Default: 12000. + * @property {Boolean} [socketOptions.tcpNoDelay] When set to true, it disables the Nagle algorithm. Default: true. + * @property {Number} [socketOptions.coalescingThreshold] Buffer length in bytes use by the write queue before flushing + * the frames. Default: 8000. + * @property {AuthProvider} [authProvider] Provider to be used to authenticate to an auth-enabled cluster. + * @property {RequestTracker} [requestTracker] The instance of RequestTracker used to monitor or log requests executed + * with this instance. + * @property {Object} [sslOptions] Client-to-node ssl options. When set the driver will use the secure layer. + * You can specify cert, ca, ... options named after the Node.js tls.connect() options. + *

+ * It uses the same default values as Node.js tls.connect() except for rejectUnauthorized + * which is set to false by default (for historical reasons). This setting is likely to change + * in upcoming versions to enable validation by default. + *

+ * @property {Object} [encoding] Encoding options. + * @property {Function} [encoding.map] Map constructor to use for Cassandra map type encoding and decoding. + * If not set, it will default to Javascript Object with map keys as property names. + * @property {Function} [encoding.set] Set constructor to use for Cassandra set type encoding and decoding. + * If not set, it will default to Javascript Array. + * @property {Boolean} [encoding.copyBuffer] Determines if the network buffer should be copied for buffer based data + * types (blob, uuid, timeuuid and inet). + *

+ * Setting it to true will cause that the network buffer is copied for each row value of those types, + * causing additional allocations but freeing the network buffer to be reused. + * Setting it to true is a good choice for cases where the Row and ResultSet returned by the queries are long-lived + * objects. + *

+ *

+ * Setting it to false will cause less overhead and the reference of the network buffer to be maintained until the row + * / result set are de-referenced. + * Default: true. + *

+ * @property {Boolean} [encoding.useUndefinedAsUnset] Valid for Cassandra 2.2 and above. Determines that, if a parameter + * is set to + * undefined it should be encoded as unset. + *

+ * By default, ECMAScript undefined is encoded as null in the driver. Cassandra 2.2 + * introduced the concept of unset. + * At driver level, you can set a parameter to unset using the field types.unset. Setting this flag to + * true allows you to use ECMAScript undefined as Cassandra unset. + *

+ *

+ * Default: true. + *

+ * @property {Boolean} [encoding.useBigIntAsLong] Use [BigInt ECMAScript type](https://tc39.github.io/proposal-bigint/) + * to represent CQL bigint and counter data types. + * @property {Boolean} [encoding.useBigIntAsVarint] Use [BigInt ECMAScript + * type](https://tc39.github.io/proposal-bigint/) to represent CQL varint data type. + * @property {Array.} [profiles] The array of [execution profiles]{@link ExecutionProfile}. + * @property {Function} [promiseFactory] Function to be used to create a Promise from a + * callback-style function. + *

+ * Promise libraries often provide different methods to create a promise. For example, you can use Bluebird's + * Promise.fromCallback() method. + *

+ *

+ * By default, the driver will use the + * [Promise constructor]{@link https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise}. + *

+ */ + +/** + * Query options + * @typedef {Object} QueryOptions + * @property {Boolean} [autoPage] Determines if the driver must retrieve the following result pages automatically. + *

+ * This setting is only considered by the [Client#eachRow()]{@link Client#eachRow} method. For more information, + * check the + * [paging results documentation]{@link https://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. + *

+ * @property {Boolean} [captureStackTrace] Determines if the stack trace before the query execution should be + * maintained. + *

+ * Useful for debugging purposes, it should be set to false under production environment as it adds an + * unnecessary overhead to each execution. + *

+ * Default: false. + * @property {Number} [consistency] [Consistency level]{@link module:types~consistencies}. + *

+ * Defaults to localOne for Apache Cassandra and DSE deployments. + * For DataStax Astra, it defaults to localQuorum. + *

+ * @property {Object} [customPayload] Key-value payload to be passed to the server. On the Cassandra side, + * implementations of QueryHandler can use this data. + * @property {String} [executeAs] The user or role name to act as when executing this statement. + *

When set, it executes as a different user/role than the one currently authenticated (a.k.a. proxy execution).

+ *

This feature is only available in DSE 5.1+.

+ * @property {String|ExecutionProfile} [executionProfile] Name or instance of the [profile]{@link ExecutionProfile} to + * be used for this execution. If not set, it will the use "default" execution profile. + * @property {Number} [fetchSize] Amount of rows to retrieve per page. + * @property {Array|Array} [hints] Type hints for parameters given in the query, ordered as for the parameters. + *

For batch queries, an array of such arrays, ordered as with the queries in the batch.

+ * @property {Host} [host] The host that should handle the query. + *

+ * Use of this option is heavily discouraged and should only be used in the following cases: + *

+ *
    + *
  1. + * Querying node-local tables, such as tables in the system and system_views + * keyspaces. + *
  2. + *
  3. + * Applying a series of schema changes, where it may be advantageous to execute schema changes in sequence on the + * same node. + *
  4. + *
+ *

+ * Configuring a specific host causes the configured + * [LoadBalancingPolicy]{@link module:policies/loadBalancing~LoadBalancingPolicy} to be completely bypassed. + * However, if the load balancing policy dictates that the host is at a + * [distance of ignored]{@link module:types~distance} or there is no active connectivity to the host, the request will + * fail with a [NoHostAvailableError]{@link module:errors~NoHostAvailableError}. + *

+ * @property {Boolean} [isIdempotent] Defines whether the query can be applied multiple times without changing the result + * beyond the initial application. + *

+ * The query execution idempotence can be used at [RetryPolicy]{@link module:policies/retry~RetryPolicy} level to + * determine if an statement can be retried in case of request error or write timeout. + *

+ *

Default: false.

+ * @property {String} [keyspace] Specifies the keyspace for the query. It is used for the following: + *
    + *
  1. To indicate what keyspace the statement is applicable to (protocol V5+ only). This is useful when the + * query does not provide an explicit keyspace and you want to override the current {@link Client#keyspace}.
  2. + *
  3. For query routing when the query operates on a different keyspace than the current {@link Client#keyspace}.
  4. + *
+ * @property {Boolean} [logged] Determines if the batch should be written to the batchlog. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: true. + * @property {Boolean} [counter] Determines if its a counter batch. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: false. + * @property {Buffer|String} [pageState] Buffer or string token representing the paging state. + *

Useful for manual paging, if provided, the query will be executed starting from a given paging state.

+ * @property {Boolean} [prepare] Determines if the query must be executed as a prepared statement. + * @property {Number} [readTimeout] When defined, it overrides the default read timeout + * (socketOptions.readTimeout) in milliseconds for this execution per coordinator. + *

+ * Suitable for statements for which the coordinator may allow a longer server-side timeout, for example aggregation + * queries. + *

+ *

+ * A value of 0 disables client side read timeout for the execution. Default: undefined. + *

+ * @property {RetryPolicy} [retry] Retry policy for the query. + *

+ * This property can be used to specify a different [retry policy]{@link module:policies/retry} to the one specified + * in the {@link ClientOptions}.policies. + *

+ * @property {Array} [routingIndexes] Index of the parameters that are part of the partition key to determine + * the routing. + * @property {Buffer|Array} [routingKey] Partition key(s) to determine which coordinator should be used for the query. + * @property {Array} [routingNames] Array of the parameters names that are part of the partition key to determine the + * routing. Only valid for non-prepared requests, it's recommended that you use the prepare flag instead. + * @property {Number} [serialConsistency] Serial consistency is the consistency level for the serial phase of + * conditional updates. + * This option will be ignored for anything else that a conditional update/insert. + * @property {Number|Long} [timestamp] The default timestamp for the query in microseconds from the unix epoch + * (00:00:00, January 1st, 1970). + *

If provided, this will replace the server side assigned timestamp as default timestamp.

+ *

Use [generateTimestamp()]{@link module:types~generateTimestamp} utility method to generate a valid timestamp + * based on a Date and microseconds parts.

+ * @property {Boolean} [traceQuery] Enable query tracing for the execution. Use query tracing to diagnose performance + * problems related to query executions. Default: false. + *

To retrieve trace, you can call [Metadata.getTrace()]{@link module:metadata~Metadata#getTrace} method.

+ * @property {Object} [graphOptions] Default options for graph query executions. + *

+ * These options are meant to provide defaults for all graph query executions. Consider using + * [execution profiles]{@link ExecutionProfile} if you plan to reuse different set of options across different + * query executions. + *

+ * @property {String} [graphOptions.language] The graph language to use in graph queries. Default: + * 'gremlin-groovy'. + * @property {String} [graphOptions.name] The graph name to be used in all graph queries. + *

+ * This property is required but there is no default value for it. This value can be overridden at query level. + *

+ * @property {Number} [graphOptions.readConsistency] Overrides the + * [consistency level]{@link module:types~consistencies} + * defined in the query options for graph read queries. + * @property {Number} [graphOptions.readTimeout] Overrides the default per-host read timeout (in milliseconds) for all + * graph queries. Default: 0. + *

+ * Use null to reset the value and use the default on socketOptions.readTimeout . + *

+ * @property {String} [graphOptions.source] The graph traversal source name to use in graph queries. Default: + * 'g'. + * @property {Number} [graphOptions.writeConsistency] Overrides the [consistency + * level]{@link module:types~consistencies} defined in the query options for graph write queries. + */ + +/** + * Creates a new instance of {@link Client}. + * @classdesc + * Represents a database client that maintains multiple connections to the cluster nodes, providing methods to + * execute CQL statements. + *

+ * The Client uses [policies]{@link module:policies} to decide which nodes to connect to, which node + * to use per each query execution, when it should retry failed or timed-out executions and how reconnection to down + * nodes should be made. + *

+ * @extends EventEmitter + * @param {ClientOptions} options The options for this instance. + * @example Creating a new client instance + * const client = new Client({ + * contactPoints: ['10.0.1.101', '10.0.1.102'], + * localDataCenter: 'datacenter1' + * }); + * @example Executing a query + * const result = await client.connect(); + * console.log(`Connected to ${client.hosts.length} nodes in the cluster: ${client.hosts.keys().join(', ')}`); + * @example Executing a query + * const result = await client.execute('SELECT key FROM system.local'); + * const row = result.first(); + * console.log(row['key']); + * @constructor + */ +function Client(options) { + events.EventEmitter.call(this); + this.options = clientOptions.extend({ logEmitter: this.emit.bind(this), id: types.Uuid.random() }, options); + Object.defineProperty(this, 'profileManager', { value: new ProfileManager(this.options) }); + Object.defineProperty(this, 'controlConnection', { + value: new ControlConnection(this.options, this.profileManager), writable: true } + ); + Object.defineProperty(this, 'insightsClient', { value: new InsightsClient(this)}); + + //Unlimited amount of listeners for internal event queues by default + this.setMaxListeners(0); + this.connected = false; + this.isShuttingDown = false; + /** + * Gets the name of the active keyspace. + * @type {String} + */ + this.keyspace = options.keyspace; + /** + * Gets the schema and cluster metadata information. + * @type {Metadata} + */ + this.metadata = this.controlConnection.metadata; + /** + * Gets an associative array of cluster hosts. + * @type {HostMap} + */ + this.hosts = this.controlConnection.hosts; + + /** + * The [ClientMetrics]{@link module:metrics~ClientMetrics} instance used to expose measurements of its internal + * behavior and of the server as seen from the driver side. + *

By default, a [DefaultMetrics]{@link module:metrics~DefaultMetrics} instance is used.

+ * @type {ClientMetrics} + */ + this.metrics = this.options.metrics; + + this._graphExecutor = new GraphExecutor(this, options, this._execute); +} + +util.inherits(Client, events.EventEmitter); + +/** + * Emitted when a new host is added to the cluster. + *
    + *
  • {@link Host} The host being added.
  • + *
+ * @event Client#hostAdd + */ +/** + * Emitted when a host is removed from the cluster + *
    + *
  • {@link Host} The host being removed.
  • + *
+ * @event Client#hostRemove + */ +/** + * Emitted when a host in the cluster changed status from down to up. + *
    + *
  • {@link Host host} The host that changed the status.
  • + *
+ * @event Client#hostUp + */ +/** + * Emitted when a host in the cluster changed status from up to down. + *
    + *
  • {@link Host host} The host that changed the status.
  • + *
+ * @event Client#hostDown + */ + +/** + * Attempts to connect to one of the [contactPoints]{@link ClientOptions} and discovers the rest the nodes of the + * cluster. + *

When the {@link Client} is already connected, it resolves immediately.

+ *

It returns a Promise when a callback is not provided.

+ * @param {function} [callback] The optional callback that is invoked when the pool is connected or it failed to + * connect. + * @example Usage example + * await client.connect(); + */ +Client.prototype.connect = function (callback) { + if (this.connected && callback) { + // Avoid creating Promise to immediately resolve them + return callback(); + } + + return promiseUtils.optionalCallback(this._connect(), callback); +}; + +/** + * Async-only version of {@link Client#connect()}. + * @private + */ +Client.prototype._connect = async function () { + if (this.connected) { + return; + } + + if (this.isShuttingDown) { + //it is being shutdown, don't allow further calls to connect() + throw new errors.NoHostAvailableError(null, 'Connecting after shutdown is not supported'); + } + + if (this.connecting) { + return promiseUtils.fromEvent(this, 'connected'); + } + + this.connecting = true; + this.log('info', util.format("Connecting to cluster using '%s' version %s", description, version)); + + try { + await cloud.init(this.options); + await this.controlConnection.init(); + this.hosts = this.controlConnection.hosts; + await this.profileManager.init(this, this.hosts); + + if (this.keyspace) { + await RequestHandler.setKeyspace(this); + } + + clientOptions.setMetadataDependent(this); + + await this._warmup(); + + } catch (err) { + // We should close the pools (if any) and reset the state to allow successive calls to connect() + await this.controlConnection.reset(); + this.connected = false; + this.connecting = false; + this.emit('connected', err); + throw err; + } + + this._setHostListeners(); + + // Set the distance of the control connection host relatively to this instance + this.profileManager.getDistance(this.controlConnection.host); + this.insightsClient.init(); + this.connected = true; + this.connecting = false; + this.emit('connected'); +}; + +/** + * Executes a query on an available connection. + *

The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag.

+ *

+ * Some execution failures can be handled transparently by the driver, according to the + * [RetryPolicy]{@linkcode module:policies/retry~RetryPolicy} or the + * [SpeculativeExecutionPolicy]{@linkcode module:policies/speculativeExecution} used. + *

+ *

It returns a Promise when a callback is not provided.

+ * @param {String} query The query to execute. + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value. + * @param {QueryOptions} [options] The query options for the execution. + * @param {ResultCallback} [callback] Executes callback(err, result) when execution completed. When not defined, the + * method will return a promise. + * @example Promise-based API, using async/await + * const query = 'SELECT name, email FROM users WHERE id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * const row = result.first(); + * console.log('%s: %s', row['name'], row['email']); + * @example Callback-based API + * const query = 'SELECT name, email FROM users WHERE id = ?'; + * client.execute(query, [ id ], { prepare: true }, function (err, result) { + * assert.ifError(err); + * const row = result.first(); + * console.log('%s: %s', row['name'], row['email']); + * }); + * @see {@link ExecutionProfile} to reuse a set of options across different query executions. + */ +Client.prototype.execute = function (query, params, options, callback) { + // This method acts as a wrapper for the async method _execute() + + if (!callback) { + // Set default argument values for optional parameters + if (typeof options === 'function') { + callback = options; + options = null; + } else if (typeof params === 'function') { + callback = params; + params = null; + } + } + + try { + const execOptions = DefaultExecutionOptions.create(options, this); + return promiseUtils.optionalCallback(this._execute(query, params, execOptions), callback); + } + catch (err) { + // There was an error when parsing the user options + if (callback) { + return callback(err); + } + + return Promise.reject(err); + } +}; + +/** + * Executes a graph query. + *

It returns a Promise when a callback is not provided.

+ * @param {String} query The gremlin query. + * @param {Object|null} [parameters] An associative array containing the key and values of the parameters. + * @param {GraphQueryOptions|null} [options] The graph query options. + * @param {Function} [callback] Function to execute when the response is retrieved, taking two arguments: + * err and result. When not defined, the method will return a promise. + * @example Promise-based API, using async/await + * const result = await client.executeGraph('g.V()'); + * // Get the first item (vertex, edge, scalar value, ...) + * const vertex = result.first(); + * console.log(vertex.label); + * @example Callback-based API + * client.executeGraph('g.V()', (err, result) => { + * const vertex = result.first(); + * console.log(vertex.label); + * }); + * @example Iterating through the results + * const result = await client.executeGraph('g.E()'); + * for (let edge of result) { + * console.log(edge.label); // created + * }); + * @example Using result.forEach() + * const result = await client.executeGraph('g.V().hasLabel("person")'); + * result.forEach(function(vertex) { + * console.log(vertex.type); // vertex + * console.log(vertex.label); // person + * }); + * @see {@link ExecutionProfile} to reuse a set of options across different query executions. + */ +Client.prototype.executeGraph = function (query, parameters, options, callback) { + callback = callback || (options ? options : parameters); + + if (typeof callback === 'function') { + parameters = typeof parameters !== 'function' ? parameters : null; + return promiseUtils.toCallback(this._graphExecutor.send(query, parameters, options), callback); + } + + return this._graphExecutor.send(query, parameters, options); +}; + +/** + * Executes the query and calls rowCallback for each row as soon as they are received. Calls the final + * callback after all rows have been sent, or when there is an error. + *

+ * The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag. + *

+ * @param {String} query The query to execute + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value. + * @param {QueryOptions} [options] The query options. + * @param {function} rowCallback Executes rowCallback(n, row) per each row received, where n is the row + * index and row is the current Row. + * @param {function} [callback] Executes callback(err, result) after all rows have been received. + *

+ * When dealing with paged results, [ResultSet#nextPage()]{@link module:types~ResultSet#nextPage} method can be used + * to retrieve the following page. In that case, rowCallback() will be again called for each row and + * the final callback will be invoked when all rows in the following page has been retrieved. + *

+ * @example Using per-row callback and arrow functions + * client.eachRow(query, params, { prepare: true }, (n, row) => console.log(n, row), err => console.error(err)); + * @example Overloads + * client.eachRow(query, rowCallback); + * client.eachRow(query, params, rowCallback); + * client.eachRow(query, params, options, rowCallback); + * client.eachRow(query, params, rowCallback, callback); + * client.eachRow(query, params, options, rowCallback, callback); + */ +Client.prototype.eachRow = function (query, params, options, rowCallback, callback) { + if (!callback && rowCallback && typeof options === 'function') { + callback = utils.validateFn(rowCallback, 'rowCallback'); + rowCallback = options; + } else { + callback = callback || utils.noop; + rowCallback = utils.validateFn(rowCallback || options || params, 'rowCallback'); + } + + params = typeof params !== 'function' ? params : null; + + let execOptions; + try { + execOptions = DefaultExecutionOptions.create(options, this, rowCallback); + } + catch (e) { + return callback(e); + } + + let rowLength = 0; + + const nextPage = () => promiseUtils.toCallback(this._execute(query, params, execOptions), pageCallback); + + function pageCallback (err, result) { + if (err) { + return callback(err); + } + // Next requests in case paging (auto or explicit) is used + rowLength += result.rowLength; + + if (result.rawPageState !== undefined) { + // Use new page state as next request page state + execOptions.setPageState(result.rawPageState); + if (execOptions.isAutoPage()) { + // Issue next request for the next page + return nextPage(); + } + // Allows for explicit (manual) paging, in case the caller needs it + result.nextPage = nextPage; + } + + // Finished auto-paging + result.rowLength = rowLength; + callback(null, result); + } + + promiseUtils.toCallback(this._execute(query, params, execOptions), pageCallback); +}; + +/** + * Executes the query and pushes the rows to the result stream as soon as they received. + *

+ * The stream is a [ReadableStream]{@linkcode https://nodejs.org/api/stream.html#stream_class_stream_readable} object + * that emits rows. + * It can be piped downstream and provides automatic pause/resume logic (it buffers when not read). + *

+ *

+ * The query can be prepared (recommended) or not depending on {@link QueryOptions}.prepare flag. Retries on multiple + * hosts if needed. + *

+ * @param {String} query The query to prepare and execute. + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value + * @param {QueryOptions} [options] The query options. + * @param {function} [callback] executes callback(err) after all rows have been received or if there is an error + * @returns {ResultStream} + */ +Client.prototype.stream = function (query, params, options, callback) { + callback = callback || utils.noop; + // NOTE: the nodejs stream maintains yet another internal buffer + // we rely on the default stream implementation to keep memory + // usage reasonable. + const resultStream = new types.ResultStream({ objectMode: 1 }); + function onFinish(err, result) { + if (err) { + resultStream.emit('error', err); + } + if (result && result.nextPage ) { + // allows for throttling as per the + // default nodejs stream implementation + resultStream._valve(function pageValve() { + try { + result.nextPage(); + } + catch( ex ) { + resultStream.emit('error', ex ); + } + }); + return; + } + // Explicitly dropping the valve (closure) + resultStream._valve(null); + resultStream.add(null); + callback(err); + } + let sync = true; + this.eachRow(query, params, options, function rowCallback(n, row) { + resultStream.add(row); + }, function eachRowFinished(err, result) { + if (sync) { + // Prevent sync callback + return setImmediate(function eachRowFinishedImmediate() { + onFinish(err, result); + }); + } + onFinish(err, result); + }); + sync = false; + return resultStream; +}; + +/** + * Executes batch of queries on an available connection to a host. + *

It returns a Promise when a callback is not provided.

+ * @param {Array.|Array.<{query, params}>} queries The queries to execute as an Array of strings or as an array + * of object containing the query and params + * @param {QueryOptions} [options] The query options. + * @param {ResultCallback} [callback] Executes callback(err, result) when the batch was executed + */ +Client.prototype.batch = function (queries, options, callback) { + if (!callback && typeof options === 'function') { + callback = options; + options = null; + } + + return promiseUtils.optionalCallback(this._batch(queries, options), callback); +}; + +/** + * Async-only version of {@link Client#batch()} . + * @param {Array.|Array.<{query, params}>}queries + * @param {QueryOptions} options + * @returns {Promise} + * @private + */ +Client.prototype._batch = async function (queries, options) { + if (!Array.isArray(queries)) { + throw new errors.ArgumentError('Queries should be an Array'); + } + + if (queries.length === 0) { + throw new errors.ArgumentError('Queries array should not be empty'); + } + + await this._connect(); + + const execOptions = DefaultExecutionOptions.create(options, this); + let queryItems; + + if (execOptions.isPrepared()) { + // use keyspace from query options if protocol supports per-query keyspace, otherwise use connection keyspace. + const version = this.controlConnection.protocolVersion; + const queryKeyspace = types.protocolVersion.supportsKeyspaceInRequest(version) && options.keyspace || this.keyspace; + queryItems = await PrepareHandler.getPreparedMultiple( + this, execOptions.getLoadBalancingPolicy(), queries, queryKeyspace); + } else { + queryItems = new Array(queries.length); + + for (let i = 0; i < queries.length; i++) { + const item = queries[i]; + if (!item) { + throw new errors.ArgumentError(`Invalid query at index ${i}`); + } + + const query = typeof item === 'string' ? item : item.query; + if (!query) { + throw new errors.ArgumentError(`Invalid query at index ${i}`); + } + + queryItems[i] = { query, params: item.params }; + } + } + + const request = await this._createBatchRequest(queryItems, execOptions); + return await RequestHandler.send(request, execOptions, this); +}; + +/** + * Gets the host that are replicas of a given token. + * @param {String} keyspace + * @param {Buffer} token + * @returns {Array} + */ +Client.prototype.getReplicas = function (keyspace, token) { + return this.metadata.getReplicas(keyspace, token); +}; + +/** + * Gets a snapshot containing information on the connections pools held by this Client at the current time. + *

+ * The information provided in the returned object only represents the state at the moment this method was called and + * it's not maintained in sync with the driver metadata. + *

+ * @returns {ClientState} A [ClientState]{@linkcode module:metadata~ClientState} instance. + */ +Client.prototype.getState = function () { + return ClientState.from(this); +}; + +Client.prototype.log = utils.log; + +/** + * Closes all connections to all hosts. + *

It returns a Promise when a callback is not provided.

+ * @param {Function} [callback] Optional callback to be invoked when finished closing all connections. + */ +Client.prototype.shutdown = function (callback) { + return promiseUtils.optionalCallback(this._shutdown(), callback); +}; + +/** @private */ +Client.prototype._shutdown = async function () { + this.log('info', 'Shutting down'); + + if (!this.hosts || !this.connected) { + // not initialized + this.connected = false; + return; + } + + if (this.connecting) { + this.log('warning', 'Shutting down while connecting'); + // wait until finish connecting for easier troubleshooting + await promiseUtils.fromEvent(this, 'connected'); + } + + this.connected = false; + this.isShuttingDown = true; + const hosts = this.hosts.values(); + + this.insightsClient.shutdown(); + + // Shutdown the ControlConnection before shutting down the pools + this.controlConnection.shutdown(); + this.options.policies.speculativeExecution.shutdown(); + + if (this.options.requestTracker) { + this.options.requestTracker.shutdown(); + } + + // go through all the host and shut down their pools + await Promise.all(hosts.map(h => h.shutdown(false))); +}; + +/** + * Waits until that the schema version in all nodes is the same or the waiting time passed. + * @param {Connection} connection + * @returns {Promise} + * @ignore + */ +Client.prototype._waitForSchemaAgreement = async function (connection) { + if (this.hosts.length === 1) { + return true; + } + + const start = process.hrtime(); + const maxWaitSeconds = this.options.protocolOptions.maxSchemaAgreementWaitSeconds; + + this.log('info', 'Waiting for schema agreement'); + + let versionsMatch; + + while (!versionsMatch && process.hrtime(start)[0] < maxWaitSeconds) { + versionsMatch = await this.metadata.compareSchemaVersions(connection); + + if (versionsMatch) { + this.log('info', 'Schema versions match'); + break; + } + + // Let some time pass before the next check + await promiseUtils.delay(500); + } + + return versionsMatch; +}; + +/** + * Waits for schema agreements and schedules schema metadata refresh. + * @param {Connection} connection + * @param event + * @returns {Promise} + * @ignore + * @internal + */ +Client.prototype.handleSchemaAgreementAndRefresh = async function (connection, event) { + let agreement = false; + + try { + agreement = await this._waitForSchemaAgreement(connection); + } catch (err) { + //we issue a warning but we continue with the normal flow + this.log('warning', 'There was an error while waiting for the schema agreement between nodes', err); + } + + if (!this.options.isMetadataSyncEnabled) { + return agreement; + } + + // Refresh metadata immediately + try { + await this.controlConnection.handleSchemaChange(event, true); + } catch (err) { + this.log('warning', 'There was an error while handling schema change', err); + } + + return agreement; +}; + +/** + * Connects and handles the execution of prepared and simple statements. + * @param {string} query + * @param {Array} params + * @param {ExecutionOptions} execOptions + * @returns {Promise} + * @private + */ +Client.prototype._execute = async function (query, params, execOptions) { + const version = this.controlConnection.protocolVersion; + + if (!execOptions.isPrepared() && params && !Array.isArray(params) && + !types.protocolVersion.supportsNamedParameters(version)) { + // Only Cassandra 2.1 and above supports named parameters + throw new errors.ArgumentError('Named parameters for simple statements are not supported, use prepare flag'); + } + + let request; + + if (!this.connected) { + // Micro optimization to avoid an async execution for a simple check + await this._connect(); + } + + if (!execOptions.isPrepared()) { + request = await this._createQueryRequest(query, execOptions, params); + } else { + const lbp = execOptions.getLoadBalancingPolicy(); + + // Use keyspace from query options if protocol supports per-query keyspace, otherwise use connection keyspace. + const queryKeyspace = types.protocolVersion.supportsKeyspaceInRequest(version) && + execOptions.getKeyspace() || this.keyspace; + + const { queryId, meta } = await PrepareHandler.getPrepared(this, lbp, query, queryKeyspace); + request = await this._createExecuteRequest(query, queryId, execOptions, params, meta); + } + + return await RequestHandler.send(request, execOptions, this); +}; + +/** + * Sets the listeners for the nodes. + * @private + */ +Client.prototype._setHostListeners = function () { + function getHostUpListener(emitter, h) { + return () => emitter.emit('hostUp', h); + } + + function getHostDownListener(emitter, h) { + return () => emitter.emit('hostDown', h); + } + + const self = this; + + // Add status listeners when new nodes are added and emit hostAdd + this.hosts.on('add', function hostAddedListener(h) { + h.on('up', getHostUpListener(self, h)); + h.on('down', getHostDownListener(self, h)); + self.emit('hostAdd', h); + }); + + // Remove all listeners and emit hostRemove + this.hosts.on('remove', function hostRemovedListener(h) { + h.removeAllListeners(); + self.emit('hostRemove', h); + }); + + // Add status listeners for existing hosts + this.hosts.forEach(function (h) { + h.on('up', getHostUpListener(self, h)); + h.on('down', getHostDownListener(self, h)); + }); +}; + +/** + * Sets the distance to each host and when warmup is true, creates all connections to local hosts. + * @returns {Promise} + * @private + */ +Client.prototype._warmup = function () { + const hosts = this.hosts.values(); + + return promiseUtils.times(hosts.length, warmupLimit, async (index) => { + const h = hosts[index]; + const distance = this.profileManager.getDistance(h); + + if (distance === types.distance.ignored) { + return; + } + + if (this.options.pooling.warmup && distance === types.distance.local) { + try { + await h.warmupPool(this.keyspace); + } catch (err) { + // An error while trying to create a connection to one of the hosts. + // Warn the user and move on. + this.log('warning', `Connection pool to host ${h.address} could not be created: ${err}`, err); + } + } else { + h.initializePool(); + } + }); +}; + +/** + * @returns {Encoder} + * @private + */ +Client.prototype._getEncoder = function () { + const encoder = this.controlConnection.getEncoder(); + if (!encoder) { + throw new errors.DriverInternalError('Encoder is not defined'); + } + return encoder; +}; + +/** + * Returns a BatchRequest instance and fills the routing key information in the provided options. + * @private + */ +Client.prototype._createBatchRequest = async function (queryItems, info) { + const firstQuery = queryItems[0]; + if (!firstQuery.meta) { + return new requests.BatchRequest(queryItems, info); + } + + await this._setRoutingInfo(info, firstQuery.params, firstQuery.meta); + return new requests.BatchRequest(queryItems, info); +}; + +/** + * Returns an ExecuteRequest instance and fills the routing key information in the provided options. + * @private + */ +Client.prototype._createExecuteRequest = async function(query, queryId, info, params, meta) { + params = utils.adaptNamedParamsPrepared(params, meta.columns); + await this._setRoutingInfo(info, params, meta); + return new requests.ExecuteRequest(query, queryId, params, info, meta); +}; + +/** + * Returns a QueryRequest instance and fills the routing key information in the provided options. + * @private + */ +Client.prototype._createQueryRequest = async function (query, execOptions, params) { + await this.metadata.adaptUserHints(this.keyspace, execOptions.getHints()); + const paramsInfo = utils.adaptNamedParamsWithHints(params, execOptions); + this._getEncoder().setRoutingKeyFromUser(paramsInfo.params, execOptions, paramsInfo.keyIndexes); + + return new requests.QueryRequest(query, paramsInfo.params, execOptions, paramsInfo.namedParameters); +}; + +/** + * Sets the routing key based on the parameter values or the provided routing key components. + * @param {ExecutionOptions} execOptions + * @param {Array} params + * @param meta + * @private + */ +Client.prototype._setRoutingInfo = async function (execOptions, params, meta) { + const encoder = this._getEncoder(); + + if (!execOptions.getKeyspace() && meta.keyspace) { + execOptions.setKeyspace(meta.keyspace); + } + if (execOptions.getRoutingKey()) { + // Routing information provided by the user + return encoder.setRoutingKeyFromUser(params, execOptions); + } + if (Array.isArray(meta.partitionKeys)) { + // The partition keys are provided as part of the metadata for modern protocol versions + execOptions.setRoutingIndexes(meta.partitionKeys); + return encoder.setRoutingKeyFromMeta(meta, params, execOptions); + } + + // Older versions of the protocol (v3 and below) don't provide routing information + try { + const tableInfo = await this.metadata.getTable(meta.keyspace, meta.table); + + if (!tableInfo) { + // The schema data is not there, maybe it is being recreated, avoid setting the routing information + return; + } + + execOptions.setRoutingIndexes(tableInfo.partitionKeys.map(c => meta.columnsByName[c.name])); + // Skip parsing metadata next time + meta.partitionKeys = execOptions.getRoutingIndexes(); + encoder.setRoutingKeyFromMeta(meta, params, execOptions); + } catch (err) { + this.log('warning', util.format('Table %s.%s metadata could not be retrieved', meta.keyspace, meta.table)); + } +}; + +/** + * Callback used by execution methods. + * @callback ResultCallback + * @param {Error} err Error occurred in the execution of the query. + * @param {ResultSet} [result] Result of the execution of the query. + */ + +module.exports = Client; diff --git a/node_modules/cassandra-driver/lib/concurrent/index.d.ts b/node_modules/cassandra-driver/lib/concurrent/index.d.ts new file mode 100644 index 0000000..91ab56f --- /dev/null +++ b/node_modules/cassandra-driver/lib/concurrent/index.d.ts @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Client } from '../../'; +import { Readable } from 'stream'; + +export namespace concurrent { + interface ResultSetGroup { + errors: Error[]; + resultItems: any[]; + totalExecuted: number; + } + + type Options = { + collectResults?: boolean; + concurrencyLevel?: number; + executionProfile?: string; + maxErrors?: number; + raiseOnFirstError?: boolean; + } + + function executeConcurrent( + client: Client, + query: string, + parameters: any[][]|Readable, + options?: Options): Promise; + + function executeConcurrent( + client: Client, + queries: Array<{query: string, params: any[]}>, + options?: Options): Promise; +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/concurrent/index.js b/node_modules/cassandra-driver/lib/concurrent/index.js new file mode 100644 index 0000000..c618693 --- /dev/null +++ b/node_modules/cassandra-driver/lib/concurrent/index.js @@ -0,0 +1,335 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const { Stream } = require('stream'); +const utils = require('../utils'); + +/** + * Utilities for concurrent query execution with the DataStax Node.js Driver. + * @module concurrent + */ + +/** + * Executes multiple queries concurrently at the defined concurrency level. + * @static + * @param {Client} client The {@link Client} instance. + * @param {String|Array<{query, params}>} query The query to execute per each parameter item. + * @param {Array|Stream|Object} parameters An {@link Array} or a readable {@link Stream} composed of {@link Array} + * items representing each individual set of parameters. Per each item in the {@link Array} or {@link Stream}, an + * execution is going to be made. + * @param {Object} [options] The execution options. + * @param {String} [options.executionProfile] The execution profile to be used. + * @param {Number} [options.concurrencyLevel=100] The concurrency level to determine the maximum amount of in-flight + * operations at any given time + * @param {Boolean} [options.raiseOnFirstError=true] Determines whether execution should stop after the first failed + * execution and the corresponding exception will be raised. + * @param {Boolean} [options.collectResults=false] Determines whether each individual + * [ResultSet]{@link module:types~ResultSet} instance should be collected in the grouped result. + * @param {Number} [options.maxErrors=100] The maximum amount of errors to be collected before ignoring the rest of + * the error results. + * @returns {Promise} A Promise of {@link ResultSetGroup} that is resolved when all the + * executions completed and it's rejected when raiseOnFirstError is true and there is one + * or more failures. + * @example Using a fixed query and an Array of Arrays as parameters + * const query = 'INSERT INTO table1 (id, value) VALUES (?, ?)'; + * const parameters = [[1, 'a'], [2, 'b'], [3, 'c'], ]; // ... + * const result = await executeConcurrent(client, query, parameters); + * @example Using a fixed query and a readable stream + * const stream = csvStream.pipe(transformLineToArrayStream); + * const result = await executeConcurrent(client, query, stream); + * @example Using a different queries + * const queryAndParameters = [ + * { query: 'INSERT INTO videos (id, name, user_id) VALUES (?, ?, ?)', + * params: [ id, name, userId ] }, + * { query: 'INSERT INTO user_videos (user_id, id, name) VALUES (?, ?, ?)', + * params: [ userId, id, name ] }, + * { query: 'INSERT INTO latest_videos (id, name, user_id) VALUES (?, ?, ?)', + * params: [ id, name, userId ] }, + * ]; + * + * const result = await executeConcurrent(client, queryAndParameters); + */ +function executeConcurrent(client, query, parameters, options) { + if (!client) { + throw new TypeError('Client instance is not defined'); + } + + if (typeof query === 'string') { + if (Array.isArray(parameters)) { + return new ArrayBasedExecutor(client, query, parameters, options).execute(); + } + + if (parameters instanceof Stream) { + return new StreamBasedExecutor(client, query, parameters, options).execute(); + } + + throw new TypeError('parameters should be an Array or a Stream instance'); + } + + if (Array.isArray(query)) { + options = parameters; + return new ArrayBasedExecutor(client, null, query, options).execute(); + } + + throw new TypeError('A string query or query and parameters array should be provided'); +} + +/** + * Wraps the functionality to execute given an Array. + * @ignore + */ +class ArrayBasedExecutor { + + /** + * @param {Client} client + * @param {String} query + * @param {Array|Array<{query, params}>} parameters + * @param {Object} [options] The execution options. + * @private + */ + constructor(client, query, parameters, options) { + this._client = client; + this._query = query; + this._parameters = parameters; + options = options || utils.emptyObject; + this._raiseOnFirstError = options.raiseOnFirstError !== false; + this._concurrencyLevel = Math.min(options.concurrencyLevel || 100, this._parameters.length); + this._queryOptions = { prepare: true, executionProfile: options.executionProfile }; + this._result = new ResultSetGroup(options); + this._stop = false; + } + + execute() { + const promises = new Array(this._concurrencyLevel); + + for (let i = 0; i < this._concurrencyLevel; i++) { + promises[i] = this._executeOneAtATime(i, 0); + } + + return Promise.all(promises).then(() => this._result); + } + + _executeOneAtATime(initialIndex, iteration) { + const index = initialIndex + this._concurrencyLevel * iteration; + + if (index >= this._parameters.length || this._stop) { + return Promise.resolve(); + } + + const item = this._parameters[index]; + let query; + let params; + + if (this._query === null) { + query = item.query; + params = item.params; + } else { + query = this._query; + params = item; + } + + return this._client.execute(query, params, this._queryOptions) + .then(rs => this._result.setResultItem(index, rs)) + .catch(err => this._setError(index, err)) + .then(() => this._executeOneAtATime(initialIndex, iteration + 1)); + } + + _setError(index, err) { + this._result.setError(index, err); + + if (this._raiseOnFirstError) { + this._stop = true; + throw err; + } + } +} + +/** + * Wraps the functionality to execute given a Stream. + * @ignore + */ +class StreamBasedExecutor { + + /** + * @param {Client} client + * @param {String} query + * @param {Stream} stream + * @param {Object} [options] The execution options. + * @private + */ + constructor(client, query, stream, options) { + this._client = client; + this._query = query; + this._stream = stream; + options = options || utils.emptyObject; + this._raiseOnFirstError = options.raiseOnFirstError !== false; + this._concurrencyLevel = options.concurrencyLevel || 100; + this._queryOptions = { prepare: true, executionProfile: options.executionProfile }; + this._inFlight = 0; + this._index = 0; + this._result = new ResultSetGroup(options); + this._resolveCallback = null; + this._rejectCallback = null; + this._readEnded = false; + } + + execute() { + return new Promise((resolve, reject) => { + this._resolveCallback = resolve; + this._rejectCallback = reject; + + this._stream + .on('data', params => this._executeOne(params)) + .on('error', err => this._setReadEnded(err)) + .on('end', () => this._setReadEnded()); + }); + } + + _executeOne(params) { + if (!Array.isArray(params)) { + return this._setReadEnded(new TypeError('Stream should be in objectMode and should emit Array instances')); + } + + if (this._readEnded) { + // Read ended abruptly because of incorrect format or error event being emitted. + // We shouldn't consider additional items. + return; + } + + const index = this._index++; + this._inFlight++; + + this._client.execute(this._query, params, this._queryOptions) + .then(rs => { + this._result.setResultItem(index, rs); + this._inFlight--; + }) + .catch(err => { + this._inFlight--; + this._setError(index, err); + }) + .then(() => { + if (this._stream.isPaused()) { + this._stream.resume(); + } + + if (this._readEnded && this._inFlight === 0) { + // When read ended and there are no more in-flight requests + // We yield the result to the user. + // It could have ended prematurely when there is a read error + // or there was an execution error and raiseOnFirstError is true + // In that case, calling the resolve callback has no effect + this._resolveCallback(this._result); + } + }); + + if (this._inFlight >= this._concurrencyLevel) { + this._stream.pause(); + } + } + + /** + * Marks the stream read process as ended. + * @param {Error} [err] The stream read error. + * @private + */ + _setReadEnded(err) { + if (!this._readEnded) { + this._readEnded = true; + + if (err) { + // There was an error while reading from the input stream. + // This should be surfaced as a failure + this._rejectCallback(err); + } else if (this._inFlight === 0) { + // Ended signaled and there are no more pending messages. + this._resolveCallback(this._result); + } + } + } + + _setError(index, err) { + this._result.setError(index, err); + + if (this._raiseOnFirstError) { + this._readEnded = true; + this._rejectCallback(err); + } + } +} + +/** + * Represents results from different related executions. + */ +class ResultSetGroup { + + /** + * Creates a new instance of {@link ResultSetGroup}. + * @ignore + */ + constructor(options) { + this._collectResults = options.collectResults; + this._maxErrors = options.maxErrors || 100; + this.totalExecuted = 0; + this.errors = []; + + if (this._collectResults) { + /** + * Gets an {@link Array} containing the [ResultSet]{@link module:types~ResultSet} instances from each execution. + *

+ * Note that when collectResults is set to false, accessing this property will + * throw an error. + *

+ * @type {Array} + */ + this.resultItems = []; + } else { + Object.defineProperty(this, 'resultItems', { enumerable: false, get: () => { + throw new Error('Property resultItems can not be accessed when collectResults is set to false'); + }}); + } + } + + /** @ignore */ + setResultItem(index, rs) { + this.totalExecuted++; + + if (this._collectResults) { + this.resultItems[index] = rs; + } + } + + /** + * Internal method to set the error of an execution. + * @ignore + */ + setError(index, err) { + this.totalExecuted++; + + if (this.errors.length < this._maxErrors) { + this.errors.push(err); + } + + if (this._collectResults) { + this.resultItems[index] = err; + } + } +} + +exports.executeConcurrent = executeConcurrent; +exports.ResultSetGroup = ResultSetGroup; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/connection.js b/node_modules/cassandra-driver/lib/connection.js new file mode 100644 index 0000000..8438287 --- /dev/null +++ b/node_modules/cassandra-driver/lib/connection.js @@ -0,0 +1,790 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const events = require('events'); +const util = require('util'); +const tls = require('tls'); +const net = require('net'); + +const Encoder = require('./encoder.js'); +const { WriteQueue } = require('./writers'); +const requests = require('./requests'); +const streams = require('./streams'); +const utils = require('./utils'); +const types = require('./types'); +const errors = require('./errors'); +const StreamIdStack = require('./stream-id-stack'); +const OperationState = require('./operation-state'); +const promiseUtils = require('./promise-utils'); +const { ExecutionOptions } = require('./execution-options'); + +/** + * Represents a connection to a Cassandra node + */ +class Connection extends events.EventEmitter { + + /** + * Creates a new instance of Connection. + * @param {String} endpoint An string containing ip address and port of the host + * @param {Number|null} protocolVersion + * @param {ClientOptions} options + */ + constructor(endpoint, protocolVersion, options) { + super(); + + this.setMaxListeners(0); + + if (!options) { + throw new Error('options is not defined'); + } + + /** + * Gets the ip and port of the server endpoint. + * @type {String} + */ + this.endpoint = endpoint; + + /** + * Gets the friendly name of the host, used to identify the connection in log messages. + * With direct connect, this is the address and port. + * With SNI, this will be the address and port of the proxy, plus the server name. + * @type {String} + */ + this.endpointFriendlyName = this.endpoint; + + if (options.sni) { + this._serverName = endpoint; + this.endpoint = `${options.sni.addressResolver.getIp()}:${options.sni.port}`; + this.endpointFriendlyName = `${this.endpoint} (${this._serverName})`; + } + + if (!this.endpoint || this.endpoint.indexOf(':') < 0) { + throw new Error('EndPoint must contain the ip address and port separated by : symbol'); + } + + const portSeparatorIndex = this.endpoint.lastIndexOf(':'); + this.address = this.endpoint.substr(0, portSeparatorIndex); + this.port = this.endpoint.substr(portSeparatorIndex + 1); + + Object.defineProperty(this, "options", { value: options, enumerable: false, writable: false}); + + if (protocolVersion === null) { + // Set initial protocol version + protocolVersion = types.protocolVersion.maxSupported; + if (options.protocolOptions.maxVersion) { + // User provided the protocol version + protocolVersion = options.protocolOptions.maxVersion; + } + // Allow to check version using this connection instance + this._checkingVersion = true; + } + + this.log = utils.log; + this.protocolVersion = protocolVersion; + this._operations = new Map(); + this._pendingWrites = []; + this._preparing = new Map(); + + /** + * The timeout state for the idle request (heartbeat) + */ + this._idleTimeout = null; + this.timedOutOperations = 0; + this._streamIds = new StreamIdStack(this.protocolVersion); + this._metrics = options.metrics; + + this.encoder = new Encoder(protocolVersion, options); + this.keyspace = null; + this.emitDrain = false; + /** + * Determines if the socket is open and startup succeeded, whether the connection can be used to send requests / + * receive events + */ + this.connected = false; + /** + * Determines if the socket can be considered as open + */ + this.isSocketOpen = false; + + this.send = util.promisify(this.sendStream); + this.closeAsync = util.promisify(this.close); + this.openAsync = util.promisify(this.open); + this.prepareOnceAsync = util.promisify(this.prepareOnce); + } + + /** + * Binds the necessary event listeners for the socket + */ + bindSocketListeners() { + //Remove listeners that were used for connecting + this.netClient.removeAllListeners('connect'); + this.netClient.removeAllListeners('timeout'); + // The socket is expected to be open at this point + this.isSocketOpen = true; + this.netClient.on('close', () => { + this.log('info', `Connection to ${this.endpointFriendlyName} closed`); + this.isSocketOpen = false; + const wasConnected = this.connected; + this.close(); + if (wasConnected) { + // Emit only when it was closed unexpectedly + this.emit('socketClose'); + } + }); + + this.protocol = new streams.Protocol({ objectMode: true }); + this.parser = new streams.Parser({ objectMode: true }, this.encoder); + const resultEmitter = new streams.ResultEmitter({objectMode: true}); + resultEmitter.on('result', this.handleResult.bind(this)); + resultEmitter.on('row', this.handleRow.bind(this)); + resultEmitter.on('frameEnded', this.freeStreamId.bind(this)); + resultEmitter.on('nodeEvent', this.handleNodeEvent.bind(this)); + + this.netClient + .pipe(this.protocol) + .pipe(this.parser) + .pipe(resultEmitter); + + this.writeQueue = new WriteQueue(this.netClient, this.encoder, this.options); + } + + /** + * Connects a socket and sends the startup protocol messages. + * Note that when open() callbacks in error, the caller should immediately call {@link Connection#close}. + */ + open(callback) { + const self = this; + this.log('info', `Connecting to ${this.endpointFriendlyName}`); + + if (!this.options.sslOptions) { + this.netClient = new net.Socket({ highWaterMark: this.options.socketOptions.coalescingThreshold }); + this.netClient.connect(this.port, this.address, function connectCallback() { + self.log('verbose', `Socket connected to ${self.endpointFriendlyName}`); + self.bindSocketListeners(); + self.startup(callback); + }); + } + else { + // Use TLS + const sslOptions = utils.extend({ rejectUnauthorized: false }, this.options.sslOptions); + + if (this.options.sni) { + sslOptions.servername = this._serverName; + } + + this.netClient = tls.connect(this.port, this.address, sslOptions, function tlsConnectCallback() { + self.log('verbose', `Secure socket connected to ${self.endpointFriendlyName} with protocol ${self.netClient.getProtocol()}`); + self.bindSocketListeners(); + self.startup(callback); + }); + + // TLSSocket will validate for values from 512 to 16K (depending on the SSL protocol version) + this.netClient.setMaxSendFragment(this.options.socketOptions.coalescingThreshold); + } + + this.netClient.once('error', function socketError(err) { + self.errorConnecting(err, false, callback); + }); + + this.netClient.once('timeout', function connectTimedOut() { + const err = new types.DriverError('Connection timeout'); + self.errorConnecting(err, true, callback); + }); + + this.netClient.setTimeout(this.options.socketOptions.connectTimeout); + + // Improve failure detection with TCP keep-alives + if (this.options.socketOptions.keepAlive) { + this.netClient.setKeepAlive(true, this.options.socketOptions.keepAliveDelay); + } + + this.netClient.setNoDelay(!!this.options.socketOptions.tcpNoDelay); + } + + /** + * Determines the protocol version to use and sends the STARTUP request + * @param {Function} callback + */ + startup(callback) { + if (this._checkingVersion) { + this.log('info', 'Trying to use protocol version 0x' + this.protocolVersion.toString(16)); + } + + const self = this; + const request = new requests.StartupRequest({ + noCompact: this.options.protocolOptions.noCompact, + clientId: this.options.id, + applicationName: this.options.applicationName, + applicationVersion: this.options.applicationVersion + }); + + this.sendStream(request, null, function responseCallback(err, response) { + if (err && self._checkingVersion) { + let invalidProtocol = (err instanceof errors.ResponseError && + err.code === types.responseErrorCodes.protocolError && + err.message.indexOf('Invalid or unsupported protocol version') >= 0); + + if (!invalidProtocol && types.protocolVersion.canStartupResponseErrorBeWrapped(self.protocolVersion)) { + //For some versions of Cassandra, the error is wrapped into a server error + //See CASSANDRA-9451 + invalidProtocol = (err instanceof errors.ResponseError && + err.code === types.responseErrorCodes.serverError && + err.message.indexOf('ProtocolException: Invalid or unsupported protocol version') > 0); + } + + if (invalidProtocol) { + // The server can respond with a message using the lower protocol version supported + // or using the same version as the one provided + let lowerVersion = self.protocol.version; + + if (lowerVersion === self.protocolVersion) { + lowerVersion = types.protocolVersion.getLowerSupported(self.protocolVersion); + } else if (!types.protocolVersion.isSupported(self.protocol.version)) { + // If we have an unsupported protocol version or a beta version we need to switch + // to something we can support. Note that dseV1 and dseV2 are excluded from this + // logic as they are supported. Also note that any v5 and greater beta protocols + // are included here since the beta flag was introduced in v5. + self.log('info',`Protocol version ${self.protocol.version} not supported by this driver, downgrading`); + lowerVersion = types.protocolVersion.getLowerSupported(self.protocol.version); + } + + if (!lowerVersion) { + return startupCallback( + new Error('Connection was unable to STARTUP using protocol version ' + self.protocolVersion)); + } + + self.log('info', 'Protocol 0x' + self.protocolVersion.toString(16) + ' not supported, using 0x' + lowerVersion.toString(16)); + self.decreaseVersion(lowerVersion); + + // The host closed the connection, close the socket and start the connection flow again + setImmediate(function decreasingVersionClosing() { + self.close(function decreasingVersionOpening() { + // Attempt to open with the correct protocol version + self.open(callback); + }); + }); + + return; + } + } + + if (response && response.mustAuthenticate) { + return self.startAuthenticating(response.authenticatorName, startupCallback); + } + + startupCallback(err); + }); + + function startupCallback(err) { + if (err) { + return self.errorConnecting(err, false, callback); + } + //The socket is connected and the connection is authenticated + return self.connectionReady(callback); + } + } + + errorConnecting(err, destroy, callback) { + this.log('warning', `There was an error when trying to connect to the host ${this.endpointFriendlyName}`, err); + if (destroy) { + //there is a TCP connection that should be killed. + this.netClient.destroy(); + } + + this._metrics.onConnectionError(err); + + callback(err); + } + + /** + * Sets the connection to ready/connected status + */ + connectionReady(callback) { + this.emit('connected'); + this.connected = true; + // Remove existing error handlers as the connection is now ready. + this.netClient.removeAllListeners('error'); + this.netClient.on('error', this.handleSocketError.bind(this)); + callback(); + } + + /** @param {Number} lowerVersion */ + decreaseVersion(lowerVersion) { + // The response already has the max protocol version supported by the Cassandra host. + this.protocolVersion = lowerVersion; + this.encoder.setProtocolVersion(lowerVersion); + this._streamIds.setVersion(lowerVersion); + } + + /** + * Handle socket errors, if the socket is not readable invoke all pending callbacks + */ + handleSocketError(err) { + this._metrics.onConnectionError(err); + this.clearAndInvokePending(err); + } + + /** + * Cleans all internal state and invokes all pending callbacks of sent streams + */ + clearAndInvokePending(innerError) { + if (this._idleTimeout) { + //Remove the idle request + clearTimeout(this._idleTimeout); + this._idleTimeout = null; + } + this._streamIds.clear(); + if (this.emitDrain) { + this.emit('drain'); + } + const err = new types.DriverError('Socket was closed'); + err.isSocketError = true; + if (innerError) { + err.innerError = innerError; + } + + // Get all handlers + const operations = Array.from(this._operations.values()); + // Clear pending operation map + this._operations = new Map(); + + if (operations.length > 0) { + this.log('info', 'Invoking ' + operations.length + ' pending callbacks'); + } + + // Invoke all handlers + utils.each(operations, function (operation, next) { + operation.setResult(err); + next(); + }); + + const pendingWritesCopy = this._pendingWrites; + this._pendingWrites = []; + utils.each(pendingWritesCopy, function (operation, next) { + operation.setResult(err); + next(); + }); + } + + /** + * Starts the SASL flow + * @param {String} authenticatorName + * @param {Function} callback + */ + startAuthenticating(authenticatorName, callback) { + if (!this.options.authProvider) { + return callback(new errors.AuthenticationError('Authentication provider not set')); + } + const authenticator = this.options.authProvider.newAuthenticator(this.endpoint, authenticatorName); + const self = this; + authenticator.initialResponse(function initialResponseCallback(err, token) { + // Start the flow with the initial token + if (err) { + return self.onAuthenticationError(callback, err); + } + self.authenticate(authenticator, token, callback); + }); + } + + /** + * Handles authentication requests and responses. + * @param {Authenticator} authenticator + * @param {Buffer} token + * @param {Function} callback + */ + authenticate(authenticator, token, callback) { + const self = this; + let request = new requests.AuthResponseRequest(token); + if (this.protocolVersion === 1) { + //No Sasl support, use CREDENTIALS + if (!authenticator.username) { + return self.onAuthenticationError( + callback, new errors.AuthenticationError('Only plain text authenticator providers allowed under protocol v1')); + } + + request = new requests.CredentialsRequest(authenticator.username, authenticator.password); + } + + this.sendStream(request, null, function authResponseCallback(err, result) { + if (err) { + if (err instanceof errors.ResponseError && err.code === types.responseErrorCodes.badCredentials) { + const authError = new errors.AuthenticationError(err.message); + authError.additionalInfo = err; + err = authError; + } + return self.onAuthenticationError(callback, err); + } + + if (result.ready) { + authenticator.onAuthenticationSuccess(); + return callback(); + } + + if (result.authChallenge) { + return authenticator.evaluateChallenge(result.token, function evaluateCallback(err, t) { + if (err) { + return self.onAuthenticationError(callback, err); + } + //here we go again + self.authenticate(authenticator, t, callback); + }); + } + + callback(new errors.DriverInternalError('Unexpected response from Cassandra: ' + util.inspect(result))); + }); + } + + onAuthenticationError(callback, err) { + this._metrics.onAuthenticationError(err); + callback(err); + } + + /** + * Executes a 'USE ' query, if keyspace is provided and it is different from the current keyspace + * @param {?String} keyspace + */ + async changeKeyspace(keyspace) { + if (!keyspace || this.keyspace === keyspace) { + return; + } + + if (this.toBeKeyspace === keyspace) { + // It will be invoked once the keyspace is changed + return promiseUtils.fromEvent(this, 'keyspaceChanged'); + } + + this.toBeKeyspace = keyspace; + + const query = `USE "${keyspace}"`; + + try { + await this.send(new requests.QueryRequest(query, null, null), null); + this.keyspace = keyspace; + this.emit('keyspaceChanged', null, keyspace); + } catch (err) { + this.log('error', `Connection to ${this.endpointFriendlyName} could not switch active keyspace: ${err}`, err); + this.emit('keyspaceChanged', err); + throw err; + } finally { + this.toBeKeyspace = null; + } + } + + /** + * Prepares a query on a given connection. If its already being prepared, it queues the callback. + * @param {String} query + * @param {String} keyspace + * @param {function} callback + */ + prepareOnce(query, keyspace, callback) { + const name = ( keyspace || '' ) + query; + let info = this._preparing.get(name); + + if (info) { + // Its being already prepared + return info.once('prepared', callback); + } + + info = new events.EventEmitter(); + info.setMaxListeners(0); + info.once('prepared', callback); + this._preparing.set(name, info); + + this.sendStream(new requests.PrepareRequest(query, keyspace), null, (err, response) => { + info.emit('prepared', err, response); + this._preparing.delete(name); + }); + } + + /** + * Queues the operation to be written to the wire and invokes the callback once the response was obtained or with an + * error (socket error or OperationTimedOutError or serialization-related error). + * @param {Request} request + * @param {ExecutionOptions|null} execOptions + * @param {function} callback Function to be called once the response has been received + * @return {OperationState} + */ + sendStream(request, execOptions, callback) { + execOptions = execOptions || ExecutionOptions.empty(); + + // Create a new operation that will contain the request, callback and timeouts + const operation = new OperationState(request, execOptions.getRowCallback(), (err, response, length) => { + if (!err || !err.isSocketError) { + // Emit that a response was obtained when there is a valid response + // or when the error is not a socket error + this.emit('responseDequeued'); + } + callback(err, response, length); + }); + + const streamId = this._getStreamId(); + + // Start the request timeout without waiting for the request to be written + operation.setRequestTimeout(execOptions, this.options.socketOptions.readTimeout, this.endpoint, + () => this.timedOutOperations++, + () => this.timedOutOperations--); + + if (streamId === null) { + this.log('info', + 'Enqueuing ' + + this._pendingWrites.length + + ', if this message is recurrent consider configuring more connections per host or lowering the pressure'); + this._pendingWrites.push(operation); + return operation; + } + this._write(operation, streamId); + return operation; + } + + /** + * Pushes the item into the queue. + * @param {OperationState} operation + * @param {Number} streamId + * @private + */ + _write(operation, streamId) { + operation.streamId = streamId; + const self = this; + this.writeQueue.push(operation, function writeCallback (err) { + if (err) { + // The request was not written. + // There was a serialization error or the operation has already timed out or was cancelled + self._streamIds.push(streamId); + return operation.setResult(err); + } + self.log('verbose', 'Sent stream #' + streamId + ' to ' + self.endpointFriendlyName); + if (operation.isByRow()) { + self.parser.setOptions(streamId, { byRow: true }); + } + self._setIdleTimeout(); + self._operations.set(streamId, operation); + }); + } + + _setIdleTimeout() { + if (!this.options.pooling.heartBeatInterval) { + return; + } + const self = this; + // Scheduling the new timeout before de-scheduling the previous performs significantly better + // than de-scheduling first, see nodejs implementation: https://github.com/nodejs/node/blob/master/lib/timers.js + const previousTimeout = this._idleTimeout; + self._idleTimeout = setTimeout(() => self._idleTimeoutHandler(), self.options.pooling.heartBeatInterval); + if (previousTimeout) { + //remove the previous timeout for the idle request + clearTimeout(previousTimeout); + } + } + + /** + * Function that gets executed once the idle timeout has passed to issue a request to keep the connection alive + */ + _idleTimeoutHandler() { + if (this.sendingIdleQuery) { + //don't issue another + //schedule for next time + this._idleTimeout = setTimeout(() => this._idleTimeoutHandler(), this.options.pooling.heartBeatInterval); + return; + } + + this.log('verbose', `Connection to ${this.endpointFriendlyName} idling, issuing a request to prevent disconnects`); + this.sendingIdleQuery = true; + this.sendStream(requests.options, null, (err) => { + this.sendingIdleQuery = false; + if (!err) { + //The sending succeeded + //There is a valid response but we don't care about the response + return; + } + this.log('warning', 'Received heartbeat request error', err); + this.emit('idleRequestError', err, this); + }); + } + + /** + * Returns an available streamId or null if there isn't any available + * @returns {Number} + */ + _getStreamId() { + return this._streamIds.pop(); + } + + freeStreamId(header) { + const streamId = header.streamId; + + if (streamId < 0) { + // Event ids don't have a matching request operation + return; + } + + this._operations.delete(streamId); + this._streamIds.push(streamId); + + if (this.emitDrain && this._streamIds.inUse === 0 && this._pendingWrites.length === 0) { + this.emit('drain'); + } + + this._writeNext(); + } + + _writeNext() { + if (this._pendingWrites.length === 0) { + return; + } + const streamId = this._getStreamId(); + if (streamId === null) { + // No streamId available + return; + } + const self = this; + let operation; + while ((operation = this._pendingWrites.shift()) && !operation.canBeWritten()) { + // Trying to obtain an pending operation that can be written + } + + if (!operation) { + // There isn't a pending operation that can be written + this._streamIds.push(streamId); + return; + } + + // Schedule after current I/O callbacks have been executed + setImmediate(function writeNextPending() { + self._write(operation, streamId); + }); + } + + /** + * Returns the number of requests waiting for response + * @returns {Number} + */ + getInFlight() { + return this._streamIds.inUse; + } + + /** + * Handles a result and error response + */ + handleResult(header, err, result) { + const streamId = header.streamId; + if(streamId < 0) { + return this.log('verbose', 'event received', header); + } + const operation = this._operations.get(streamId); + if (!operation) { + return this.log('error', 'The server replied with a wrong streamId #' + streamId); + } + this.log('verbose', 'Received frame #' + streamId + ' from ' + this.endpointFriendlyName); + operation.setResult(err, result, header.bodyLength); + } + + handleNodeEvent(header, event) { + switch (event.eventType) { + case types.protocolEvents.schemaChange: + this.emit('nodeSchemaChange', event); + break; + case types.protocolEvents.topologyChange: + this.emit('nodeTopologyChange', event); + break; + case types.protocolEvents.statusChange: + this.emit('nodeStatusChange', event); + break; + } + } + + /** + * Handles a row response + */ + handleRow(header, row, meta, rowLength, flags) { + const streamId = header.streamId; + if(streamId < 0) { + return this.log('verbose', 'Event received', header); + } + const operation = this._operations.get(streamId); + if (!operation) { + return this.log('error', 'The server replied with a wrong streamId #' + streamId); + } + operation.setResultRow(row, meta, rowLength, flags, header); + } + + /** + * Closes the socket (if not already closed) and cancels all in-flight requests. + * Multiple calls to this method have no additional side-effects. + * @param {Function} [callback] + */ + close(callback) { + callback = callback || utils.noop; + + if (!this.connected && !this.isSocketOpen) { + return callback(); + } + + this.connected = false; + // Drain is never going to be emitted, once it is set to closed + this.removeAllListeners('drain'); + this.clearAndInvokePending(); + + if (!this.isSocketOpen) { + return callback(); + } + + // Set the socket as closed now (before socket.end() is called) to avoid being invoked more than once + this.isSocketOpen = false; + this.log('verbose', `Closing connection to ${this.endpointFriendlyName}`); + const self = this; + + // If server doesn't acknowledge the half-close within connection timeout, destroy the socket. + const endTimeout = setTimeout(() => { + this.log('info', `${this.endpointFriendlyName} did not respond to connection close within ` + + `${this.options.socketOptions.connectTimeout}ms, destroying connection`); + this.netClient.destroy(); + }, this.options.socketOptions.connectTimeout); + + this.netClient.once('close', function (hadError) { + clearTimeout(endTimeout); + if (hadError) { + self.log('info', 'The socket closed with a transmission error'); + } + setImmediate(callback); + }); + + // At this point, the error event can be triggered because: + // - It's connected and writes haven't completed yet + // - The server abruptly closed its end of the connection (ECONNRESET) as a result of protocol error / auth error + // We need to remove any listeners and make sure we callback are pending writes + this.netClient.removeAllListeners('error'); + this.netClient.on('error', err => this.clearAndInvokePending(err)); + + // Half-close the socket, it will result in 'close' event being fired + this.netClient.end(); + } + + /** + * Gets the local IP address to which this connection socket is bound to. + * @returns {String|undefined} + */ + getLocalAddress() { + if (!this.netClient) { + return undefined; + } + + return this.netClient.localAddress; + } +} + +module.exports = Connection; diff --git a/node_modules/cassandra-driver/lib/control-connection.js b/node_modules/cassandra-driver/lib/control-connection.js new file mode 100644 index 0000000..54b3e61 --- /dev/null +++ b/node_modules/cassandra-driver/lib/control-connection.js @@ -0,0 +1,1073 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const events = require('events'); +const util = require('util'); +const net = require('net'); +const dns = require('dns'); + +const errors = require('./errors'); +const { Host, HostMap } = require('./host'); +const Metadata = require('./metadata'); +const EventDebouncer = require('./metadata/event-debouncer'); +const Connection = require('./connection'); +const requests = require('./requests'); +const utils = require('./utils'); +const types = require('./types'); +const promiseUtils = require('./promise-utils'); +const f = util.format; + +const selectPeers = "SELECT * FROM system.peers"; +const selectLocal = "SELECT * FROM system.local WHERE key='local'"; +const newNodeDelay = 1000; +const metadataQueryAbortTimeout = 2000; +const schemaChangeTypes = { + created: 'CREATED', + updated: 'UPDATED', + dropped: 'DROPPED' +}; +const supportedProductTypeKey = 'PRODUCT_TYPE'; +const supportedDbaas = 'DATASTAX_APOLLO'; + +/** + * Represents a connection used by the driver to receive events and to check the status of the cluster. + *

It uses an existing connection from the hosts' connection pool to maintain the driver metadata up-to-date.

+ */ +class ControlConnection extends events.EventEmitter { + + /** + * Creates a new instance of ControlConnection. + * @param {Object} options + * @param {ProfileManager} profileManager + * @param {{borrowHostConnection: function, createConnection: function}} [context] An object containing methods to + * allow dependency injection. + */ + constructor(options, profileManager, context) { + super(); + + this.protocolVersion = null; + this.hosts = new HostMap(); + this.setMaxListeners(0); + this.log = utils.log; + Object.defineProperty(this, "options", { value: options, enumerable: false, writable: false}); + + /** + * Cluster metadata that is going to be shared between the Client and ControlConnection + */ + this.metadata = new Metadata(this.options, this); + this.initialized = false; + + /** + * Host used by the control connection + * @type {Host|null} + */ + this.host = null; + + /** + * Connection used to retrieve metadata and subscribed to events + * @type {Connection|null} + */ + this.connection = null; + + this._addressTranslator = this.options.policies.addressResolution; + this._reconnectionPolicy = this.options.policies.reconnection; + this._reconnectionSchedule = this._reconnectionPolicy.newSchedule(); + this._isShuttingDown = false; + + // Reference to the encoder of the last valid connection + this._encoder = null; + this._debouncer = new EventDebouncer(options.refreshSchemaDelay, this.log.bind(this)); + this._profileManager = profileManager; + this._triedHosts = null; + this._resolvedContactPoints = new Map(); + this._contactPoints = new Set(); + + // Timeout used for delayed handling of topology changes + this._topologyChangeTimeout = null; + // Timeout used for delayed handling of node status changes + this._nodeStatusChangeTimeout = null; + + if (context && context.borrowHostConnection) { + this._borrowHostConnection = context.borrowHostConnection; + } + + if (context && context.createConnection) { + this._createConnection = context.createConnection; + } + } + + /** + * Stores the contact point information and what it resolved to. + * @param {String|null} address + * @param {String} port + * @param {String} name + * @param {Boolean} isIPv6 + */ + _addContactPoint(address, port, name, isIPv6) { + if (address === null) { + // Contact point could not be resolved, store that the resolution came back empty + this._resolvedContactPoints.set(name, utils.emptyArray); + return; + } + + const portNumber = parseInt(port, 10) || this.options.protocolOptions.port; + const endpoint = `${address}:${portNumber}`; + this._contactPoints.add(endpoint); + + // Use RFC 3986 for IPv4 and IPv6 + const standardEndpoint = !isIPv6 ? endpoint : `[${address}]:${portNumber}`; + + let resolvedAddressedByName = this._resolvedContactPoints.get(name); + + // NODEJS-646 + // + // We might have a frozen empty array if DNS resolution wasn't working when this name was + // initially added, and if that's the case we can't add anything. Detect that case and + // reset to a mutable array. + if (resolvedAddressedByName === undefined || resolvedAddressedByName === utils.emptyArray) { + resolvedAddressedByName = []; + this._resolvedContactPoints.set(name, resolvedAddressedByName); + } + + resolvedAddressedByName.push(standardEndpoint); + } + + async _parseContactPoint(name) { + let addressOrName = name; + let port = null; + + if (name.indexOf('[') === 0 && name.indexOf(']:') > 1) { + // IPv6 host notation [ip]:port (RFC 3986 section 3.2.2) + const index = name.lastIndexOf(']:'); + addressOrName = name.substr(1, index - 1); + port = name.substr(index + 2); + } else if (name.indexOf(':') > 0) { + // IPv4 or host name with port notation + const parts = name.split(':'); + if (parts.length === 2) { + addressOrName = parts[0]; + port = parts[1]; + } + } + + if (net.isIP(addressOrName)) { + this._addContactPoint(addressOrName, port, name, net.isIPv6(addressOrName)); + return; + } + + const addresses = await this._resolveAll(addressOrName); + if (addresses.length > 0) { + addresses.forEach(addressInfo => this._addContactPoint(addressInfo.address, port, name, addressInfo.isIPv6)); + } else { + // Store that we attempted resolving the name but was not found + this._addContactPoint(null, null, name, false); + } + } + + /** + * Initializes the control connection by establishing a Connection using a suitable protocol + * version to be used and retrieving cluster metadata. + */ + async init() { + if (this.initialized) { + // Prevent multiple serial initializations + return; + } + + if (!this.options.sni) { + // Parse and resolve contact points + await Promise.all(this.options.contactPoints.map(name => this._parseContactPoint(name))); + } else { + this.options.contactPoints.forEach(cp => this._contactPoints.add(cp)); + const address = this.options.sni.address; + const separatorIndex = address.lastIndexOf(':'); + + if (separatorIndex === -1) { + throw new new errors.DriverInternalError('The SNI endpoint address should contain ip/name and port'); + } + + const nameOrIp = address.substr(0, separatorIndex); + this.options.sni.port = address.substr(separatorIndex + 1); + this.options.sni.addressResolver = new utils.AddressResolver({ nameOrIp, dns }); + await this.options.sni.addressResolver.init(); + } + + if (this._contactPoints.size === 0) { + throw new errors.NoHostAvailableError({}, 'No host could be resolved'); + } + + await this._initializeConnection(); + } + + _setHealthListeners(host, connection) { + const self = this; + let wasRefreshCalled = 0; + + function removeListeners() { + host.removeListener('down', downOrIgnoredHandler); + host.removeListener('ignore', downOrIgnoredHandler); + connection.removeListener('socketClose', socketClosedHandler); + } + + function startReconnecting(hostDown) { + if (wasRefreshCalled++ !== 0) { + // Prevent multiple calls to reconnect + return; + } + + removeListeners(); + + if (self._isShuttingDown) { + // Don't attempt to reconnect when the ControlConnection is being shutdown + return; + } + + if (hostDown) { + self.log('warning', + `Host ${host.address} used by the ControlConnection DOWN, ` + + `connection to ${connection.endpointFriendlyName} will not longer be used`); + } else { + self.log('warning', `Connection to ${connection.endpointFriendlyName} used by the ControlConnection was closed`); + } + + promiseUtils.toBackground(self._refresh()); + } + + function downOrIgnoredHandler() { + startReconnecting(true); + } + + function socketClosedHandler() { + startReconnecting(false); + } + + host.once('down', downOrIgnoredHandler); + host.once('ignore', downOrIgnoredHandler); + connection.once('socketClose', socketClosedHandler); + } + + /** + * Iterates through the hostIterator and Gets the following open connection. + * @param {Iterator} hostIterator + * @returns {Connection!} + */ + _borrowAConnection(hostIterator) { + let connection = null; + + while (!connection) { + const item = hostIterator.next(); + const host = item.value; + + if (item.done) { + throw new errors.NoHostAvailableError(this._triedHosts); + } + + // Only check distance once the load-balancing policies have been initialized + const distance = this._profileManager.getDistance(host); + if (!host.isUp() || distance === types.distance.ignored) { + continue; + } + + try { + connection = this._borrowHostConnection(host); + } catch (err) { + this._triedHosts[host.address] = err; + } + } + + return connection; + } + + /** + * Iterates through the contact points and tries to open a connection. + * @param {Iterator} contactPointsIterator + * @returns {Promise} + */ + async _borrowFirstConnection(contactPointsIterator) { + let connection = null; + + while (!connection) { + const item = contactPointsIterator.next(); + const contactPoint = item.value; + + if (item.done) { + throw new errors.NoHostAvailableError(this._triedHosts); + } + + try { + connection = await this._createConnection(contactPoint); + } catch (err) { + this._triedHosts[contactPoint] = err; + } + } + + if (!connection) { + const err = new errors.NoHostAvailableError(this._triedHosts); + this.log('error', 'ControlConnection failed to acquire a connection'); + throw err; + } + + this.protocolVersion = connection.protocolVersion; + this._encoder = connection.encoder; + this.connection = connection; + } + + /** Default implementation for borrowing connections, that can be injected at constructor level */ + _borrowHostConnection(host) { + // Borrow any open connection, regardless of the keyspace + return host.borrowConnection(); + } + + /** + * Default implementation for creating initial connections, that can be injected at constructor level + * @param {String} contactPoint + */ + async _createConnection(contactPoint) { + const c = new Connection(contactPoint, null, this.options); + + try { + await c.openAsync(); + } catch (err) { + promiseUtils.toBackground(c.closeAsync()); + throw err; + } + + return c; + } + + /** + * Gets the info from local and peer metadata, reloads the keyspaces metadata and rebuilds tokens. + *

It throws an error when there's a failure or when reconnecting and there's no connection.

+ * @param {Boolean} initializing Determines whether this function was called in order to initialize the control + * connection the first time + * @param {Boolean} isReconnecting Determines whether the refresh is being done because the ControlConnection is + * switching to use this connection to this host. + */ + async _refreshHosts(initializing, isReconnecting) { + // Get a reference to the current connection as it might change from external events + const c = this.connection; + + if (!c) { + if (isReconnecting) { + throw new errors.DriverInternalError('Connection reference has been lost when reconnecting'); + } + + // it's possible that this was called as a result of a topology change, but the connection was lost + // between scheduling time and now. This will be called again when there is a new connection. + return; + } + + this.log('info', 'Refreshing local and peers info'); + + const rsLocal = await c.send(new requests.QueryRequest(selectLocal), null); + this._setLocalInfo(initializing, isReconnecting, c, rsLocal); + + if (!this.host) { + throw new errors.DriverInternalError('Information from system.local could not be retrieved'); + } + + const rsPeers = await c.send(new requests.QueryRequest(selectPeers), null); + await this.setPeersInfo(initializing, rsPeers); + + if (!this.initialized) { + // resolve protocol version from highest common version among hosts. + const highestCommon = types.protocolVersion.getHighestCommon(c, this.hosts); + const reconnect = highestCommon !== this.protocolVersion; + + // set protocol version on each host. + this.protocolVersion = highestCommon; + this.hosts.forEach(h => h.setProtocolVersion(this.protocolVersion)); + + // if protocol version changed, reconnect the control connection with new version. + if (reconnect) { + this.log('info', `Reconnecting since the protocol version changed to 0x${highestCommon.toString(16)}`); + c.decreaseVersion(this.protocolVersion); + await c.closeAsync(); + + try { + await c.openAsync(); + } catch (err) { + // Close in the background + promiseUtils.toBackground(c.closeAsync()); + + throw err; + } + } + + // To acquire metadata we need to specify the cassandra version + this.metadata.setCassandraVersion(this.host.getCassandraVersion()); + this.metadata.buildTokens(this.hosts); + + if (!this.options.isMetadataSyncEnabled) { + this.metadata.initialized = true; + return; + } + + await this.metadata.refreshKeyspacesInternal(false); + this.metadata.initialized = true; + } + } + + async _refreshControlConnection(hostIterator) { + + if (this.options.sni) { + this.connection = this._borrowAConnection(hostIterator); + } + else { + try { this.connection = this._borrowAConnection(hostIterator); } + catch(err) { + + /* NODEJS-632: refresh nodes before getting hosts for reconnect since some hostnames may have + * shifted during the flight. */ + this.log("info", "ControlConnection could not reconnect using existing connections. Refreshing contact points and retrying"); + this._contactPoints.clear(); + this._resolvedContactPoints.clear(); + await Promise.all(this.options.contactPoints.map(name => this._parseContactPoint(name))); + const refreshedContactPoints = Array.from(this._contactPoints).join(','); + this.log('info', `Refreshed contact points: ${refreshedContactPoints}`); + await this._initializeConnection(); + } + } + } + + /** + * Acquires a new connection and refreshes topology and keyspace metadata. + *

When it fails obtaining a connection and there aren't any more hosts, it schedules reconnection.

+ *

When it fails obtaining the metadata, it marks connection and/or host unusable and retries using the same + * iterator from query plan / host list

+ * @param {Iterator} [hostIterator] + */ + async _refresh(hostIterator) { + if (this._isShuttingDown) { + this.log('info', 'The ControlConnection will not be refreshed as the Client is being shutdown'); + return; + } + + // Reset host and connection + this.host = null; + this.connection = null; + + try { + if (!hostIterator) { + this.log('info', 'Trying to acquire a connection to a new host'); + this._triedHosts = {}; + hostIterator = await promiseUtils.newQueryPlan(this._profileManager.getDefaultLoadBalancing(), null, null); + } + + await this._refreshControlConnection(hostIterator); + } catch (err) { + // There was a failure obtaining a connection or during metadata retrieval + this.log('error', 'ControlConnection failed to acquire a connection', err); + + if (!this._isShuttingDown) { + const delay = this._reconnectionSchedule.next().value; + this.log('warning', `ControlConnection could not reconnect, scheduling reconnection in ${delay}ms`); + setTimeout(() => this._refresh(), delay); + this.emit('newConnection', err); + } + + return; + } + + this.log('info',`ControlConnection connected to ${this.connection.endpointFriendlyName}`); + + try { + await this._refreshHosts(false, true); + + await this._registerToConnectionEvents(); + } catch (err) { + this.log('error', 'ControlConnection failed to retrieve topology and keyspaces information', err); + this._triedHosts[this.connection.endpoint] = err; + + if (err.isSocketError && this.host) { + this.host.removeFromPool(this.connection); + } + + // Retry the whole thing with the same query plan + return await this._refresh(hostIterator); + } + + this._reconnectionSchedule = this._reconnectionPolicy.newSchedule(); + this._setHealthListeners(this.host, this.connection); + this.emit('newConnection', null, this.connection, this.host); + + this.log('info', `ControlConnection connected to ${this.connection.endpointFriendlyName} and up to date`); + } + + /** + * Acquires a connection and refreshes topology and keyspace metadata for the first time. + * @returns {Promise} + */ + async _initializeConnection() { + this.log('info', 'Getting first connection'); + + // Reset host and connection + this.host = null; + this.connection = null; + this._triedHosts = {}; + + // Randomize order of contact points resolved. + const contactPointsIterator = utils.shuffleArray(Array.from(this._contactPoints))[Symbol.iterator](); + + while (true) { + await this._borrowFirstConnection(contactPointsIterator); + + this.log('info', `ControlConnection using protocol version 0x${ + this.protocolVersion.toString(16)}, connected to ${this.connection.endpointFriendlyName}`); + + try { + await this._getSupportedOptions(); + await this._refreshHosts(true, true); + await this._registerToConnectionEvents(); + + // We have a valid connection, leave the loop + break; + + } catch (err) { + this.log('error', 'ControlConnection failed to retrieve topology and keyspaces information', err); + this._triedHosts[this.connection.endpoint] = err; + } + } + + // The healthy connection used to initialize should be part of the Host pool + this.host.pool.addExistingConnection(this.connection); + + this.initialized = true; + this._setHealthListeners(this.host, this.connection); + this.log('info', `ControlConnection connected to ${this.connection.endpointFriendlyName}`); + } + + async _getSupportedOptions() { + const response = await this.connection.send(requests.options, null); + + // response.supported is a string multi map, decoded as an Object. + const productType = response.supported && response.supported[supportedProductTypeKey]; + if (Array.isArray(productType) && productType[0] === supportedDbaas) { + this.metadata.setProductTypeAsDbaas(); + } + } + + async _registerToConnectionEvents() { + this.connection.on('nodeTopologyChange', this._nodeTopologyChangeHandler.bind(this)); + this.connection.on('nodeStatusChange', this._nodeStatusChangeHandler.bind(this)); + this.connection.on('nodeSchemaChange', this._nodeSchemaChangeHandler.bind(this)); + const request = new requests.RegisterRequest(['TOPOLOGY_CHANGE', 'STATUS_CHANGE', 'SCHEMA_CHANGE']); + await this.connection.send(request, null); + } + + /** + * Handles a TOPOLOGY_CHANGE event + */ + _nodeTopologyChangeHandler(event) { + this.log('info', 'Received topology change', event); + + // all hosts information needs to be refreshed as tokens might have changed + clearTimeout(this._topologyChangeTimeout); + + // Use an additional timer to make sure that the refresh hosts is executed only AFTER the delay + // In this case, the event debouncer doesn't help because it could not honor the sliding delay (ie: processNow) + this._topologyChangeTimeout = setTimeout(() => + promiseUtils.toBackground(this._scheduleRefreshHosts()), newNodeDelay); + } + + /** + * Handles a STATUS_CHANGE event + */ + _nodeStatusChangeHandler(event) { + const self = this; + const addressToTranslate = event.inet.address.toString(); + const port = this.options.protocolOptions.port; + this._addressTranslator.translate(addressToTranslate, port, function translateCallback(endPoint) { + const host = self.hosts.get(endPoint); + if (!host) { + self.log('warning', 'Received status change event but host was not found: ' + addressToTranslate); + return; + } + const distance = self._profileManager.getDistance(host); + if (event.up) { + if (distance === types.distance.ignored) { + return host.setUp(true); + } + clearTimeout(self._nodeStatusChangeTimeout); + // Waits a couple of seconds before marking it as UP + self._nodeStatusChangeTimeout = setTimeout(() => host.checkIsUp(), newNodeDelay); + return; + } + // marked as down + if (distance === types.distance.ignored) { + return host.setDown(); + } + self.log('warning', 'Received status change to DOWN for host ' + host.address); + }); + } + + /** + * Handles a SCHEMA_CHANGE event + */ + _nodeSchemaChangeHandler(event) { + this.log('info', 'Schema change', event); + if (!this.options.isMetadataSyncEnabled) { + return; + } + + promiseUtils.toBackground(this.handleSchemaChange(event, false)); + } + + /** + * Schedules metadata refresh and callbacks when is refreshed. + * @param {{keyspace: string, isKeyspace: boolean, schemaChangeType, table, udt, functionName, aggregate}} event + * @param {Boolean} processNow + * @returns {Promise} + */ + handleSchemaChange(event, processNow) { + const self = this; + let handler, cqlObject; + + if (event.isKeyspace) { + if (event.schemaChangeType === schemaChangeTypes.dropped) { + handler = function removeKeyspace() { + // if on the same event queue there is a creation, this handler is not going to be executed + // it is safe to remove the keyspace metadata + delete self.metadata.keyspaces[event.keyspace]; + }; + + return this._scheduleObjectRefresh(handler, event.keyspace, null, processNow); + } + + return this._scheduleKeyspaceRefresh(event.keyspace, processNow); + } + + const ksInfo = this.metadata.keyspaces[event.keyspace]; + if (!ksInfo) { + // it hasn't been loaded and it is not part of the metadata, don't mind + return Promise.resolve(); + } + + if (event.table) { + cqlObject = event.table; + handler = function clearTableState() { + delete ksInfo.tables[event.table]; + delete ksInfo.views[event.table]; + }; + } + else if (event.udt) { + cqlObject = event.udt; + handler = function clearUdtState() { + delete ksInfo.udts[event.udt]; + }; + } + else if (event.functionName) { + cqlObject = event.functionName; + handler = function clearFunctionState() { + delete ksInfo.functions[event.functionName]; + }; + } + else if (event.aggregate) { + cqlObject = event.aggregate; + handler = function clearKeyspaceState() { + delete ksInfo.aggregates[event.aggregate]; + }; + } + + if (!handler) { + // Forward compatibility + return Promise.resolve(); + } + + // It's a cql object change clean the internal cache + return this._scheduleObjectRefresh(handler, event.keyspace, cqlObject, processNow); + } + + /** + * @param {Function} handler + * @param {String} keyspace + * @param {String} cqlObject + * @param {Boolean} processNow + * @returns {Promise} + */ + _scheduleObjectRefresh(handler, keyspace, cqlObject, processNow) { + return this._debouncer.eventReceived({ handler, keyspace, cqlObject }, processNow); + } + + /** + * @param {String} keyspace + * @param {Boolean} processNow + * @returns {Promise} + */ + _scheduleKeyspaceRefresh(keyspace, processNow) { + return this._debouncer.eventReceived({ + handler: () => this.metadata.refreshKeyspace(keyspace), + keyspace + }, processNow); + } + + /** @returns {Promise} */ + _scheduleRefreshHosts() { + return this._debouncer.eventReceived({ + handler: () => this._refreshHosts(false, false), + all: true + }, false); + } + + /** + * Sets the information for the host used by the control connection. + * @param {Boolean} initializing + * @param {Connection} c + * @param {Boolean} setCurrentHost Determines if the host retrieved must be set as the current host + * @param result + */ + _setLocalInfo(initializing, setCurrentHost, c, result) { + if (!result || !result.rows || !result.rows.length) { + this.log('warning', 'No local info could be obtained'); + return; + } + + const row = result.rows[0]; + + let localHost; + + // Note that with SNI enabled, we can trust that rpc_address will contain a valid value. + const endpoint = !this.options.sni + ? c.endpoint + : `${row['rpc_address']}:${this.options.protocolOptions.port}`; + + if (initializing) { + localHost = new Host(endpoint, this.protocolVersion, this.options, this.metadata); + this.hosts.set(endpoint, localHost); + this.log('info', `Adding host ${endpoint}`); + } else { + localHost = this.hosts.get(endpoint); + + if (!localHost) { + this.log('error', 'Localhost could not be found'); + return; + } + } + + localHost.datacenter = row['data_center']; + localHost.rack = row['rack']; + localHost.tokens = row['tokens']; + localHost.hostId = row['host_id']; + localHost.cassandraVersion = row['release_version']; + setDseParameters(localHost, row); + this.metadata.setPartitioner(row['partitioner']); + this.log('info', 'Local info retrieved'); + + if (setCurrentHost) { + // Set the host as the one being used by the ControlConnection. + this.host = localHost; + } + } + + /** + * @param {Boolean} initializing Determines whether this function was called in order to initialize the control + * connection the first time. + * @param {ResultSet} result + */ + async setPeersInfo(initializing, result) { + if (!result || !result.rows) { + return; + } + + // A map of peers, could useful for in case there are discrepancies + const peers = {}; + const port = this.options.protocolOptions.port; + const foundDataCenters = new Set(); + + if (this.host && this.host.datacenter) { + foundDataCenters.add(this.host.datacenter); + } + + for (const row of result.rows) { + const endpoint = await this.getAddressForPeerHost(row, port); + + if (!endpoint) { + continue; + } + + peers[endpoint] = true; + let host = this.hosts.get(endpoint); + let isNewHost = !host; + + if (isNewHost) { + host = new Host(endpoint, this.protocolVersion, this.options, this.metadata); + this.log('info', `Adding host ${endpoint}`); + isNewHost = true; + } + + host.datacenter = row['data_center']; + host.rack = row['rack']; + host.tokens = row['tokens']; + host.hostId = row['host_id']; + host.cassandraVersion = row['release_version']; + setDseParameters(host, row); + + if (host.datacenter) { + foundDataCenters.add(host.datacenter); + } + + if (isNewHost) { + // Add it to the map (and trigger events) after all the properties + // were set to avoid race conditions + this.hosts.set(endpoint, host); + + if (!initializing) { + // Set the distance at Host level, that way the connection pool is created with the correct settings + this._profileManager.getDistance(host); + + // When we are not initializing, we start with the node set as DOWN + host.setDown(); + } + } + } + + // Is there a difference in number between peers + local != hosts + if (this.hosts.length > result.rows.length + 1) { + // There are hosts in the current state that don't belong (nodes removed or wrong contactPoints) + this.log('info', 'Removing nodes from the pool'); + const toRemove = []; + + this.hosts.forEach(h => { + //It is not a peer and it is not local host + if (!peers[h.address] && h !== this.host) { + this.log('info', 'Removing host ' + h.address); + toRemove.push(h.address); + h.shutdown(true); + } + }); + + this.hosts.removeMultiple(toRemove); + } + + if (initializing && this.options.localDataCenter) { + const localDc = this.options.localDataCenter; + + if (!foundDataCenters.has(localDc)) { + throw new errors.ArgumentError(`localDataCenter was configured as '${ + localDc}', but only found hosts in data centers: [${Array.from(foundDataCenters).join(', ')}]`); + } + } + + this.log('info', 'Peers info retrieved'); + } + + /** + * Gets the address from a peers row and translates the address. + * @param {Object|Row} row + * @param {Number} defaultPort + * @returns {Promise} + */ + getAddressForPeerHost(row, defaultPort) { + return new Promise(resolve => { + let address = row['rpc_address']; + const peer = row['peer']; + const bindAllAddress = '0.0.0.0'; + + if (!address) { + this.log('error', f('No rpc_address found for host %s in %s\'s peers system table. %s will be ignored.', + peer, this.host.address, peer)); + return resolve(null); + } + + if (address.toString() === bindAllAddress) { + this.log('warning', f('Found host with 0.0.0.0 as rpc_address, using listen_address (%s) to contact it instead.' + + ' If this is incorrect you should avoid the use of 0.0.0.0 server side.', peer)); + address = peer; + } + + this._addressTranslator.translate(address.toString(), defaultPort, resolve); + }); + } + + /** + * Uses the DNS protocol to resolve a IPv4 and IPv6 addresses (A and AAAA records) for the hostname. + * It returns an Array of addresses that can be empty and logs the error. + * @private + * @param name + */ + async _resolveAll(name) { + const addresses = []; + const resolve4 = util.promisify(dns.resolve4); + const resolve6 = util.promisify(dns.resolve6); + const lookup = util.promisify(dns.lookup); + + // Ignore errors for resolve calls + const ipv4Promise = resolve4(name).catch(() => {}).then(r => r || utils.emptyArray); + const ipv6Promise = resolve6(name).catch(() => {}).then(r => r || utils.emptyArray); + + let arr; + arr = await ipv4Promise; + arr.forEach(address => addresses.push({address, isIPv6: false})); + + arr = await ipv6Promise; + arr.forEach(address => addresses.push({address, isIPv6: true})); + + if (addresses.length === 0) { + // In case dns.resolve*() methods don't yield a valid address for the host name + // Use system call getaddrinfo() that might resolve according to host system definitions + try { + arr = await lookup(name, { all: true }); + arr.forEach(({address, family}) => addresses.push({address, isIPv6: family === 6})); + } catch (err) { + this.log('error', `Host with name ${name} could not be resolved`, err); + } + } + + return addresses; + } + + /** + * Waits for a connection to be available. If timeout expires before getting a connection it callbacks in error. + * @returns {Promise} + */ + _waitForReconnection() { + return new Promise((resolve, reject) => { + const callback = promiseUtils.getCallback(resolve, reject); + + // eslint-disable-next-line prefer-const + let timeout; + + function newConnectionListener(err) { + clearTimeout(timeout); + callback(err); + } + + this.once('newConnection', newConnectionListener); + + timeout = setTimeout(() => { + this.removeListener('newConnection', newConnectionListener); + callback(new errors.OperationTimedOutError('A connection could not be acquired before timeout.')); + }, metadataQueryAbortTimeout); + }); + } + + /** + * Executes a query using the active connection + * @param {String|Request} cqlQuery + * @param {Boolean} [waitReconnect] Determines if it should wait for reconnection in case the control connection is not + * connected at the moment. Default: true. + */ + async query(cqlQuery, waitReconnect = true) { + const queryOnConnection = async () => { + if (!this.connection || this._isShuttingDown) { + throw new errors.NoHostAvailableError({}, 'ControlConnection is not connected at the time'); + } + + const request = typeof cqlQuery === 'string' ? new requests.QueryRequest(cqlQuery, null, null) : cqlQuery; + return await this.connection.send(request, null); + }; + + if (!this.connection && waitReconnect) { + // Wait until its reconnected (or timer elapses) + await this._waitForReconnection(); + } + + return await queryOnConnection(); + } + + /** @returns {Encoder} The encoder used by the current connection */ + getEncoder() { + if (!this._encoder) { + throw new errors.DriverInternalError('Encoder is not defined'); + } + return this._encoder; + } + + /** + * Cancels all timers and shuts down synchronously. + */ + shutdown() { + this._isShuttingDown = true; + this._debouncer.shutdown(); + // Emit a "newConnection" event with Error, as it may clear timeouts that were waiting new connections + this.emit('newConnection', new errors.DriverError('ControlConnection is being shutdown')); + // Cancel timers + clearTimeout(this._topologyChangeTimeout); + clearTimeout(this._nodeStatusChangeTimeout); + } + + /** + * Resets the Connection to its initial state. + */ + async reset() { + // Reset the internal state of the ControlConnection for future initialization attempts + const currentHosts = this.hosts.clear(); + + // Set the shutting down flag temporarily to avoid reconnects. + this._isShuttingDown = true; + + // Shutdown all individual pools, ignoring any shutdown error + await Promise.all(currentHosts.map(h => h.shutdown())); + + this.initialized = false; + this._isShuttingDown = false; + } + + /** + * Gets a Map containing the original contact points and the addresses that each one resolved to. + */ + getResolvedContactPoints() { + return this._resolvedContactPoints; + } + + /** + * Gets the local IP address to which the control connection socket is bound to. + * @returns {String|undefined} + */ + getLocalAddress() { + if (!this.connection) { + return undefined; + } + + return this.connection.getLocalAddress(); + } + + /** + * Gets the address and port of host the control connection is connected to. + * @returns {String|undefined} + */ + getEndpoint() { + if (!this.connection) { + return undefined; + } + + return this.connection.endpoint; + } +} + +/** + * Parses the DSE workload and assigns it to a host. + * @param {Host} host + * @param {Row} row + * @private + */ +function setDseParameters(host, row) { + if (row['workloads'] !== undefined) { + host.workloads = row['workloads']; + } + else if (row['workload']) { + host.workloads = [ row['workload'] ]; + } + else { + host.workloads = utils.emptyArray; + } + + if (row['dse_version'] !== undefined) { + host.dseVersion = row['dse_version']; + } +} + +module.exports = ControlConnection; diff --git a/node_modules/cassandra-driver/lib/datastax/cloud/index.js b/node_modules/cassandra-driver/lib/datastax/cloud/index.js new file mode 100644 index 0000000..e44a4c7 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/cloud/index.js @@ -0,0 +1,338 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const https = require('https'); +const fs = require('fs'); +const util = require('util'); +const AdmZip = require('adm-zip'); +const { URL } = require('url'); + +const errors = require('../../errors'); +const utils = require('../../utils'); +const { DsePlainTextAuthProvider, NoAuthProvider } = require('../../auth'); + +// Use the callback-based method fs.readFile() instead of fs.promises as we have to support Node.js 8+ +const readFile = util.promisify(fs.readFile); + +/** + * When the user sets the cloud options, it uses the secure bundle or endpoint to access the metadata service and + * setting the connection options + * @param {ClientOptions} options + * @returns {Promise} + */ +async function init(options) { + if (!options.cloud) { + return; + } + + const cloudOptions = new CloudOptions(options); + await parseZipFile(cloudOptions); + await getMetadataServiceInfoAsync(cloudOptions); + + if (!cloudOptions.clientOptions.sslOptions.checkServerIdentity) { + // With SNI enabled, hostname (uuid) and CN will not match + // Use a custom validation function to validate against the proxy address. + // Note: this function is only called if the certificate passed all other checks, like CA validation. + cloudOptions.clientOptions.sslOptions.checkServerIdentity = (_, cert) => + checkServerIdentity(cert, cloudOptions.clientOptions.sni.address); + } +} + +class CloudOptions { + constructor(clientOptions) { + this.clientOptions = clientOptions; + + if (clientOptions.cloud.secureConnectBundle) { + this.secureConnectBundle = clientOptions.cloud.secureConnectBundle; + this.serviceUrl = null; + } else { + this.serviceUrl = clientOptions.cloud.endpoint; + } + // Include a log emitter to enable logging within the cloud connection logic + this.logEmitter = clientOptions.logEmitter; + + this.contactPoints = null; + this.localDataCenter = null; + } + + /** + * The sslOptions in the client options from a given map. + * @param {Map} zipEntries + */ + setSslOptions(zipEntries) { + this.clientOptions.sslOptions = Object.assign({ + ca: [zipEntries.get('ca.crt') ], + cert: zipEntries.get('cert'), + key: zipEntries.get('key'), + rejectUnauthorized: true + }, this.clientOptions.sslOptions); + } + + /** + * + * @param username + * @param password + */ + setAuthProvider(username, password) { + if (!username || !password) { + return; + } + + if (this.clientOptions.authProvider && !(this.clientOptions.authProvider instanceof NoAuthProvider)) { + // There is an auth provider set by the user + return; + } + + this.clientOptions.authProvider = new DsePlainTextAuthProvider(username, password); + } +} + +/** + * @param {CloudOptions} cloudOptions + * @returns {Promise} + */ +async function parseZipFile(cloudOptions) { + if (cloudOptions.serviceUrl) { + // Service url already was provided + return; + } + + if (!cloudOptions.secureConnectBundle) { + throw new TypeError('secureConnectBundle must be provided'); + } + + const data = await readFile(cloudOptions.secureConnectBundle); + const zip = new AdmZip(data); + const zipEntries = new Map(zip.getEntries().map(e => [e.entryName, e.getData()])); + + if (!zipEntries.get('config.json')) { + throw new TypeError('Config file must be contained in secure bundle'); + } + + const config = JSON.parse(zipEntries.get('config.json').toString('utf8')); + if (!config['host'] || !config['port']) { + throw new TypeError('Config file must include host and port information'); + } + + cloudOptions.serviceUrl = `${config['host']}:${config['port']}/metadata`; + cloudOptions.setSslOptions(zipEntries); + cloudOptions.setAuthProvider(config.username, config.password); +} + +/** + * Gets the information retrieved from the metadata service. + * Invokes the callback with {proxyAddress, localDataCenter, contactPoints} as result + * @param {CloudOptions} cloudOptions + * @param {Function} callback + */ +function getMetadataServiceInfo(cloudOptions, callback) { + const regex = /^(.+?):(\d+)(.*)$/; + const matches = regex.exec(cloudOptions.serviceUrl); + callback = utils.callbackOnce(callback); + + if (!matches || matches.length !== 4) { + throw new TypeError('url should be composed of host, port number and path, without scheme'); + } + + const requestOptions = Object.assign({ + hostname: matches[1], + port: matches[2], + path: matches[3] || undefined, + timeout: cloudOptions.clientOptions.socketOptions.connectTimeout + }, cloudOptions.clientOptions.sslOptions); + + const req = https.get(requestOptions, res => { + let data = ''; + + utils.log('verbose', `Connected to metadata service with SSL/TLS protocol ${res.socket.getProtocol()}`, {}, cloudOptions); + + res + .on('data', chunk => data += chunk.toString()) + .on('end', () => { + if (res.statusCode !== 200) { + return callback(getServiceRequestError(new Error(`Obtained http status ${res.statusCode}`), requestOptions)); + } + + let message; + + try { + message = JSON.parse(data); + + if (!message || !message['contact_info']) { + throw new TypeError('contact_info should be defined in response'); + } + + } catch (err) { + return callback(getServiceRequestError(err, requestOptions, true)); + } + + const contactInfo = message['contact_info']; + + // Set the connect options + cloudOptions.clientOptions.contactPoints = contactInfo['contact_points']; + cloudOptions.clientOptions.localDataCenter = contactInfo['local_dc']; + cloudOptions.clientOptions.sni = { address: contactInfo['sni_proxy_address'] }; + + callback(); + }); + }); + + req.on('error', err => callback(getServiceRequestError(err, requestOptions))); + + // We need to both set the timeout in the requestOptions and invoke ClientRequest#setTimeout() + // to handle all possible scenarios, for some reason... (tested with one OR the other and didn't fully work) + // Setting the also the timeout handler, aborting will emit 'error' and close + req.setTimeout(cloudOptions.clientOptions.socketOptions.connectTimeout, () => req.abort()); +} + +const getMetadataServiceInfoAsync = util.promisify(getMetadataServiceInfo); + +/** + * Returns an Error that wraps the inner error obtained while fetching metadata information. + * @private + */ +function getServiceRequestError(err, requestOptions, isParsingError) { + const message = isParsingError + ? 'There was an error while parsing the metadata service information' + : 'There was an error fetching the metadata information'; + + const url = `${requestOptions.hostname}:${requestOptions.port}${(requestOptions.path) ? requestOptions.path : '/'}`; + return new errors.NoHostAvailableError({ [url] : err }, message); +} + +/** + * @param {{subject: {CN: string}, subjectaltname: string?}} cert A certificate object as defined by + * TLS module https://nodejs.org/docs/latest-v12.x/api/tls.html#tls_certificate_object + * @param {string} sniAddress + * @returns {Error|undefined} Similar to tls.checkServerIdentity() returns an Error object, populating it with reason, + * host, and cert on failure. Otherwise, it returns undefined. + * @internal + * @ignore + */ +function checkServerIdentity(cert, sniAddress) { + // Based on logic defined by the Node.js Core module + // https://github.com/nodejs/node/blob/ff48009fefcecedfee2c6ff1719e5be3f6969049/lib/tls.js#L212-L290 + + // SNI address is composed by hostname and port + const hostName = sniAddress.split(':')[0]; + const altNames = cert.subjectaltname; + const cn = cert.subject.CN; + + if (hostName === cn) { + // quick check based on common name + return undefined; + } + + const parsedAltNames = []; + if (altNames) { + for (const name of altNames.split(', ')) { + if (name.startsWith('DNS:')) { + parsedAltNames.push(name.slice(4)); + } else if (name.startsWith('URI:')) { + parsedAltNames.push(new URL(name.slice(4)).hostname); + } + } + } + + const hostParts = hostName.split('.'); + const wildcard = (pattern) => checkParts(hostParts, pattern); + + let valid; + if (parsedAltNames.length > 0) { + valid = parsedAltNames.some(wildcard); + } else { + // Use the common name + valid = wildcard(cn); + } + + if (!valid) { + const error = new Error(`Host: ${hostName} is not cert's CN/altnames: ${cn} / ${altNames}`); + error.reason = error.message; + error.host = hostName; + error.cert = cert; + return error; + } +} + +/** + * Simplified version of Node.js tls core lib check() function + * https://github.com/nodejs/node/blob/ff48009fefcecedfee2c6ff1719e5be3f6969049/lib/tls.js#L148-L209 + * @private + * @returns {boolean} + */ +function checkParts(hostParts, pattern) { + // Empty strings, null, undefined, etc. never match. + if (!pattern) { + return false; + } + + const patternParts = pattern.split('.'); + + if (hostParts.length !== patternParts.length) { + return false; + } + + // Check host parts from right to left first. + for (let i = hostParts.length - 1; i > 0; i -= 1) { + if (hostParts[i] !== patternParts[i]) { + return false; + } + } + + const hostSubdomain = hostParts[0]; + const patternSubdomain = patternParts[0]; + const patternSubdomainParts = patternSubdomain.split('*'); + + // Short-circuit when the subdomain does not contain a wildcard. + // RFC 6125 does not allow wildcard substitution for components + // containing IDNA A-labels (Punycode) so match those verbatim. + if (patternSubdomainParts.length === 1 || patternSubdomain.includes('xn--')) { + return hostSubdomain === patternSubdomain; + } + + // More than one wildcard is always wrong. + if (patternSubdomainParts.length > 2) { + return false; + } + + // *.tld wildcards are not allowed. + if (patternParts.length <= 2) { + return false; + } + + const [prefix, suffix] = patternSubdomainParts; + + if (prefix.length + suffix.length > hostSubdomain.length) { + return false; + } + + if (!hostSubdomain.startsWith(prefix)) { + return false; + } + + if (!hostSubdomain.endsWith(suffix)) { + return false; + } + + return true; +} + +module.exports = { + checkServerIdentity, + init +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/complex-type-helper.js b/node_modules/cassandra-driver/lib/datastax/graph/complex-type-helper.js new file mode 100644 index 0000000..130bf0a --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/complex-type-helper.js @@ -0,0 +1,99 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const { GraphTypeWrapper, UdtGraphWrapper } = require('./wrappers'); +const types = require('../../types'); +const Encoder = require('../../encoder'); +const { dataTypes } = types; + +function getTypeDefinitionByValue(value) { + if (value instanceof types.Tuple) { + return { + 'cqlType': 'tuple', + 'definition': value.elements.map(getTypeDefinitionByValue) + }; + } + + if (value instanceof Map) { + // Try to guess the types of the key and value based on the first element + const result = { 'cqlType': 'map' }; + if (value.size > 0) { + const first = value.entries().next().value; + result['definition'] = first.map(getTypeDefinitionByValue); + } + + return result; + } + + if (value instanceof UdtGraphWrapper) { + return getUdtTypeDefinitionByValue(value); + } + + let type; + if (value instanceof GraphTypeWrapper) { + type = value.typeInfo; + } else { + type = Encoder.guessDataType(value); + } + + if (!type) { + return null; + } + + return getDefinitionByType(type); +} + +function getDefinitionByType(type) { + if (type.code === dataTypes.udt) { + return getUdtTypeDefinition(type.info); + } + + if (type.code === dataTypes.tuple || type.code === dataTypes.map) { + return { + 'cqlType': types.getDataTypeNameByCode(type), + 'definition': type.info.map(getDefinitionByType) + }; + } + + if (type.code === dataTypes.list || type.code === dataTypes.set) { + return { + 'cqlType': type.code === dataTypes.list ? 'list' : 'set', + 'definition': [ getDefinitionByType(type.info) ] + }; + } + + return { 'cqlType': types.getDataTypeNameByCode(type) }; +} + +function getUdtTypeDefinition(udtInfo) { + return { + 'cqlType': 'udt', + 'keyspace': udtInfo.keyspace, + 'name': udtInfo.name, + 'definition': udtInfo.fields.map(field => + // fieldName should be the first property serialized + Object.assign({ 'fieldName': field.name }, getDefinitionByType(field.type)) + ), + }; +} + +function getUdtTypeDefinitionByValue(wrappedValue) { + return getUdtTypeDefinition(wrappedValue.udtInfo); +} + +module.exports = { getTypeDefinitionByValue, getUdtTypeDefinitionByValue }; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/custom-type-serializers.js b/node_modules/cassandra-driver/lib/datastax/graph/custom-type-serializers.js new file mode 100644 index 0000000..e25ef3b --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/custom-type-serializers.js @@ -0,0 +1,362 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const types = require('../../types'); +const utils = require('../../utils'); +const { getTypeDefinitionByValue, getUdtTypeDefinitionByValue } = require('./complex-type-helper'); +const { Point, Polygon, LineString } = require('../../geometry'); +const { Edge } = require('./structure'); +const { GraphTypeWrapper, UdtGraphWrapper } = require('./wrappers'); +const { Tuple, dataTypes } = types; + +const typeKey = '@type'; +const valueKey = '@value'; + +class EdgeDeserializer { + constructor() { + this.key = 'g:Edge'; + } + + deserialize(obj) { + const value = obj[valueKey]; + return new Edge(this.reader.read(value['id']), this.reader.read(value['outV']), value['outVLabel'], value['label'], this.reader.read(value['inV']), value['inVLabel'], this.reader.read(value['properties'])); + } +} + +/** + * Uses toString() instance method and fromString() static method to serialize and deserialize the value. + * @abstract + * @private + */ +class StringBasedTypeSerializer { + + /** + * Creates a new instance of the deserializer. + * @param {String} key + * @param {Function} targetType + */ + constructor(key, targetType) { + if (!key) { + throw new Error('Deserializer must provide a type key'); + } + if (!targetType) { + throw new Error('Deserializer must provide a target type'); + } + this.key = key; + this.targetType = targetType; + } + + deserialize(obj) { + let value = obj[valueKey]; + if (typeof value !== 'string') { + value = value.toString(); + } + return this.targetType.fromString(value); + } + + serialize(value) { + return { + [typeKey]: this.key, + [valueKey]: value.toString() + }; + } + + canBeUsedFor(value) { + return value instanceof this.targetType; + } +} + +class UuidSerializer extends StringBasedTypeSerializer { + constructor() { + super('g:UUID', types.Uuid); + } +} + +class LongSerializer extends StringBasedTypeSerializer { + constructor() { + super('g:Int64', types.Long); + } +} + +class BigDecimalSerializer extends StringBasedTypeSerializer { + constructor() { + super('gx:BigDecimal', types.BigDecimal); + } +} + +class BigIntegerSerializer extends StringBasedTypeSerializer { + constructor() { + super('gx:BigInteger', types.Integer); + } +} + +class InetAddressSerializer extends StringBasedTypeSerializer { + constructor() { + super('gx:InetAddress', types.InetAddress); + } +} + +class LocalDateSerializer extends StringBasedTypeSerializer { + constructor() { + super('gx:LocalDate', types.LocalDate); + } +} + +class LocalTimeSerializer extends StringBasedTypeSerializer { + constructor() { + super('gx:LocalTime', types.LocalTime); + } +} + +class InstantSerializer extends StringBasedTypeSerializer { + constructor() { + super('gx:Instant', Date); + } + + serialize(item) { + return { + [typeKey]: this.key, + [valueKey]: item.toISOString() + }; + } + + deserialize(obj) { + return new Date(obj[valueKey]); + } +} + +class BlobSerializer extends StringBasedTypeSerializer { + constructor() { + super('dse:Blob', Buffer); + } + + deserialize(obj) { + return utils.allocBufferFromString(obj[valueKey], 'base64'); + } + + serialize(item) { + return { + [typeKey]: this.key, + [valueKey]: item.toString('base64') + }; + } +} + +class PointSerializer extends StringBasedTypeSerializer { + constructor() { + super('dse:Point', Point); + } +} + +class LineStringSerializer extends StringBasedTypeSerializer { + constructor() { + super('dse:LineString', LineString); + } +} + +class PolygonSerializer extends StringBasedTypeSerializer { + constructor() { + super('dse:Polygon', Polygon); + } +} + +class TupleSerializer { + constructor() { + this.key = 'dse:Tuple'; + } + + deserialize(obj) { + // Skip definitions and go to the value + const value = obj[valueKey]['value']; + + if (!Array.isArray(value)) { + throw new Error('Expected Array, obtained: ' + value); + } + + const result = []; + + for (const element of value) { + result.push(this.reader.read(element)); + } + + return Tuple.fromArray(result); + } + + /** @param {Tuple} tuple */ + serialize(tuple) { + const result = { + 'cqlType': 'tuple', + 'definition': tuple.elements.map(getTypeDefinitionByValue), + 'value': tuple.elements.map(e => this.writer.adaptObject(e)) + }; + + return { + [typeKey]: this.key, + [valueKey]: result + }; + } + + canBeUsedFor(value) { + return value instanceof Tuple; + } +} + +class DurationSerializer { + constructor() { + this.key = 'dse:Duration'; + } + + deserialize(obj) { + // Skip definitions and go to the value + const value = obj[valueKey]; + + return new types.Duration( + this.reader.read(value['months']), this.reader.read(value['days']), this.reader.read(value['nanos'])); + } + + /** @param {Duration} value */ + serialize(value) { + return { + [typeKey]: this.key, + [valueKey]: { + 'months': value['months'], + 'days': value['days'], + 'nanos': value['nanoseconds'], + } + }; + } + + canBeUsedFor(value) { + return value instanceof types.Duration; + } +} + +class UdtSerializer { + constructor() { + this.key = 'dse:UDT'; + } + + deserialize(obj) { + // Skip definitions and go to the value + const valueRoot = obj[valueKey]; + const result = {}; + const value = valueRoot['value']; + + valueRoot['definition'].forEach((definition, index) => { + result[definition.fieldName] = this.reader.read(value[index]); + }); + + return result; + } + + serialize(udtWrapper) { + const serializedValue = getUdtTypeDefinitionByValue(udtWrapper); + // New properties can be added to the existing object without need to clone + // as getTypeDefinition() returns a new object each time + serializedValue['value'] = Object.entries(udtWrapper.value).map(([_, v]) => this.writer.adaptObject(v)); + + return { + [typeKey]: this.key, + [valueKey]: serializedValue + }; + } + + canBeUsedFor(value) { + return value instanceof UdtGraphWrapper; + } +} + +class InternalSerializer { + constructor(name, transformFn) { + this._name = name; + this._transformFn = transformFn || (x => x); + } + + serialize(item) { + return { + [typeKey]: this._name, + [valueKey]: this._transformFn(item) + }; + } +} + +// Associative array of graph type name by CQL type code, used by the type wrapper +const graphSONSerializerByCqlType = { + [dataTypes.int]: new InternalSerializer('g:Int32'), + [dataTypes.bigint]: new InternalSerializer('g:Int64'), + [dataTypes.double]: new InternalSerializer('g:Double'), + [dataTypes.float]: new InternalSerializer('g:Float'), + [dataTypes.timestamp]: new InternalSerializer('g:Timestamp', x => x.getTime()) +}; + +class GraphTypeWrapperSerializer { + constructor() { + // Use a fixed name that doesn't conflict with TinkerPop and DS Graph + this.key = 'client:wrapper'; + } + + serialize(wrappedValue) { + const s = graphSONSerializerByCqlType[wrappedValue.typeInfo.code]; + + if (!s) { + throw new Error(`No serializer found for wrapped value ${wrappedValue}`); + } + + return s.serialize(wrappedValue.value); + } + + canBeUsedFor(value) { + return value instanceof GraphTypeWrapper; + } +} + +const serializersArray = [ + EdgeDeserializer, + UuidSerializer, + LongSerializer, + BigDecimalSerializer, + BigIntegerSerializer, + InetAddressSerializer, + LocalDateSerializer, + LocalTimeSerializer, + InstantSerializer, + BlobSerializer, + PointSerializer, + LineStringSerializer, + PolygonSerializer, + TupleSerializer, + UdtSerializer, + GraphTypeWrapperSerializer, + DurationSerializer +]; + +function getCustomSerializers() { + const customSerializers = {}; + + serializersArray.forEach(sConstructor => { + const instance = new sConstructor(); + if (!instance.key) { + throw new TypeError(`Key for ${sConstructor} instance not set`); + } + + customSerializers[instance.key] = instance; + }); + + return customSerializers; +} + +module.exports = getCustomSerializers; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/graph-executor.js b/node_modules/cassandra-driver/lib/datastax/graph/graph-executor.js new file mode 100644 index 0000000..7015826 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/graph-executor.js @@ -0,0 +1,280 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const utils = require('../../utils'); +const policies = require('../../policies'); +const GraphResultSet = require('./result-set'); +const { GraphSON2Reader, GraphSON2Writer, GraphSON3Reader, GraphSON3Writer } = require('./graph-serializer'); +const getCustomTypeSerializers = require('./custom-type-serializers'); +const { GraphExecutionOptions, graphProtocol } = require('./options'); + +const graphLanguageGroovyString = 'gremlin-groovy'; +const graphEngineCore = 'Core'; + +const graphSON2Reader = new GraphSON2Reader({ serializers: getCustomTypeSerializers() }); +const graphSON2Writer = new GraphSON2Writer({ serializers: getCustomTypeSerializers() }); +const graphSON3Reader = new GraphSON3Reader({ serializers: getCustomTypeSerializers() }); +const graphSON3Writer = new GraphSON3Writer({ serializers: getCustomTypeSerializers() }); + +const rowParsers = new Map([ + [ graphProtocol.graphson2, getRowParser(graphSON2Reader) ], + [ graphProtocol.graphson3, getRowParser(graphSON3Reader) ] +]); + +const defaultWriters = new Map([ + [ graphProtocol.graphson1, x => JSON.stringify(x) ], + [ graphProtocol.graphson2, getDefaultWriter(graphSON2Writer) ], + [ graphProtocol.graphson3, getDefaultWriter(graphSON3Writer) ] +]); + +/** + * Internal class that contains the logic for executing a graph traversal. + * @ignore + */ +class GraphExecutor { + + /** + * Creates a new instance of GraphExecutor. + * @param {Client} client + * @param {ClientOptions} rawOptions + * @param {Function} handler + */ + constructor(client, rawOptions, handler) { + this._client = client; + this._handler = handler; + + // Retrieve the retry policy for the default profile to determine if it was specified + this._defaultProfileRetryPolicy = client.profileManager.getDefaultConfiguredRetryPolicy(); + + // Use graphBaseOptions as a way to gather all defaults that affect graph executions + this._graphBaseOptions = utils.extend({ + executeAs: client.options.queryOptions.executeAs, + language: graphLanguageGroovyString, + source: 'g', + readTimeout: 0, + // As the default retry policy might retry non-idempotent queries + // we should use default retry policy for all graph queries that does not retry + retry: new policies.retry.FallthroughRetryPolicy() + }, rawOptions.graphOptions, client.profileManager.getDefault().graphOptions); + + if (this._graphBaseOptions.readTimeout === null) { + this._graphBaseOptions.readTimeout = client.options.socketOptions.readTimeout; + } + } + + /** + * Executes the graph traversal. + * @param {String|Object} query + * @param {Object} parameters + * @param {GraphQueryOptions} options + */ + async send(query, parameters, options) { + if (Array.isArray(parameters)) { + throw new TypeError('Parameters must be a Object instance as an associative array'); + } + + if (!query) { + throw new TypeError('Query must be defined'); + } + + const execOptions = new GraphExecutionOptions( + options, this._client, this._graphBaseOptions, this._defaultProfileRetryPolicy); + + if (execOptions.getGraphSource() === 'a') { + const host = await this._getAnalyticsMaster(); + execOptions.setPreferredHost(host); + } + + // A query object that allows to plugin any executable thing + const isQueryObject = typeof query === 'object' && query.graphLanguage && query.value && query.queryWriterFactory; + + if (isQueryObject) { + // Use the provided graph language to override the current + execOptions.setGraphLanguage(query.graphLanguage); + } + + this._setGraphProtocol(execOptions); + execOptions.setGraphPayload(); + parameters = GraphExecutor._buildGraphParameters(parameters, execOptions.getGraphSubProtocol()); + + if (typeof query !== 'string') { + // Its a traversal that needs to be converted + // Transforming the provided query into a traversal requires the protocol to be set first. + // Query writer factory can be defined in the options or in the query object + let queryWriter = execOptions.getQueryWriter(); + + if (isQueryObject) { + queryWriter = query.queryWriterFactory(execOptions.getGraphSubProtocol()); + } else if (!queryWriter) { + queryWriter = GraphExecutor._writerFactory(execOptions.getGraphSubProtocol()); + } + + query = queryWriter(!isQueryObject ? query : query.value); + } + + return await this._executeGraphQuery(query, parameters, execOptions); + } + + /** + * Sends the graph traversal. + * @param {string} query + * @param {object} parameters + * @param {GraphExecutionOptions} execOptions + * @returns {Promise} + * @private + */ + async _executeGraphQuery(query, parameters, execOptions) { + const result = await this._handler.call(this._client, query, parameters, execOptions); + + // Instances of rowParser transform Row instances into Traverser instances. + // Traverser instance is an object with the following form { object: any, bulk: number } + const rowParser = execOptions.getRowParser() || GraphExecutor._rowParserFactory(execOptions.getGraphSubProtocol()); + + return new GraphResultSet(result, rowParser); + } + + /** + * Uses the RPC call to obtain the analytics master host. + * @returns {Promise} + * @private + */ + async _getAnalyticsMaster() { + try { + const result = await this._client.execute('CALL DseClientTool.getAnalyticsGraphServer()', utils.emptyArray); + + if (result.rows.length === 0) { + this._client.log('verbose', + 'Empty response querying graph analytics server, query will not be routed optimally'); + return null; + } + + const resultField = result.rows[0]['result']; + if (!resultField || !resultField['location']) { + this._client.log('verbose', + 'Unexpected response querying graph analytics server, query will not be routed optimally', + result.rows[0]); + return null; + } + + const hostName = resultField['location'].substr(0, resultField['location'].lastIndexOf(':')); + const addressTranslator = this._client.options.policies.addressResolution; + + return await new Promise(resolve => { + addressTranslator.translate(hostName, this._client.options.protocolOptions.port, (endpoint) => + resolve(this._client.hosts.get(endpoint))); + }); + } catch (err) { + this._client.log('verbose', 'Error querying graph analytics server, query will not be routed optimally', err); + return null; + } + } + + /** + * Resolves what protocol should be used for decoding graph results for the given execution. + * + *

Resolution is done in the following manner if graphResults is not set:

+ * + *
    + *
  • If graph name is set, and associated keyspace's graph engine is set to "Core", use {@link + * graphProtocol#graphson3}. + *
  • Else, if the graph language is not 'gremlin-groovy', use {@link graphProtocol#graphson2} + *
  • Otherwise, use {@link graphProtocol#graphson1} + *
+ * @param {GraphExecutionOptions} execOptions + */ + _setGraphProtocol(execOptions) { + let protocol = execOptions.getGraphSubProtocol(); + + if (protocol) { + return; + } + + if (execOptions.getGraphName()) { + const keyspace = this._client.metadata.keyspaces[execOptions.getGraphName()]; + if (keyspace && keyspace.graphEngine === graphEngineCore) { + protocol = graphProtocol.graphson3; + } + } + + if (!protocol) { + // Decide the minimal version supported by the graph language + if (execOptions.getGraphLanguage() === graphLanguageGroovyString) { + protocol = graphProtocol.graphson1; + } else { + protocol = graphProtocol.graphson2; + } + } + + execOptions.setGraphSubProtocol(protocol); + } + + /** + * Only GraphSON1 parameters are supported. + * @param {Array|function|null} parameters + * @param {string} protocol + * @returns {string[]|null} + * @private + */ + static _buildGraphParameters(parameters, protocol) { + if (!parameters || typeof parameters !== 'object') { + return null; + } + + const queryWriter = GraphExecutor._writerFactory(protocol); + + return [ + (protocol !== graphProtocol.graphson1 && protocol !== graphProtocol.graphson2) + ? queryWriter(new Map(Object.entries(parameters))) + : queryWriter(parameters) + ]; + } + + static _rowParserFactory(protocol) { + const handler = rowParsers.get(protocol); + + if (!handler) { + // Default to no row parser + return null; + } + + return handler; + } + + static _writerFactory(protocol) { + const handler = defaultWriters.get(protocol); + + if (!handler) { + throw new Error(`No writer defined for protocol ${protocol}`); + } + + return handler; + } +} + +function getRowParser(reader) { + return row => { + const item = reader.read(JSON.parse(row['gremlin'])); + return { object: item['result'], bulk: item['bulk'] || 1 }; + }; +} + +function getDefaultWriter(writer) { + return value => writer.write(value); +} + +module.exports = GraphExecutor; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/graph-serializer.js b/node_modules/cassandra-driver/lib/datastax/graph/graph-serializer.js new file mode 100644 index 0000000..4331161 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/graph-serializer.js @@ -0,0 +1,260 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * @module datastax/graph/tinkerpop/graphSerializers + * @ignore + */ + + +/** + * @author Jorge Bay Gondra + */ +'use strict'; + +const typeSerializers = require('./type-serializers'); + +/** + * GraphSON2 writer. + */ +class GraphSON2Writer { + + /** + * @param {Object} [options] + * @param {Object} [options.serializers] An object used as an associative array with GraphSON 2 type name as keys and + * serializer instances as values, ie: { 'g:Int64': longSerializer }. + * @constructor + */ + constructor(options) { + this._options = options || {}; + // Create instance of the default serializers + this._serializers = this.getDefaultSerializers().map(serializerConstructor => { + const s = new serializerConstructor(); + s.writer = this; + return s; + }); + + const customSerializers = this._options.serializers || {}; + + Object.keys(customSerializers).forEach(key => { + const s = customSerializers[key]; + if (!s.serialize) { + return; + } + s.writer = this; + // Insert custom serializers first + this._serializers.unshift(s); + }); + } + + /** + * Gets the default serializers to be used. + * @returns {Array} + */ + getDefaultSerializers() { + return graphSON2Serializers; + } + + adaptObject(value) { + let s; + + for (let i = 0; i < this._serializers.length; i++) { + const currentSerializer = this._serializers[i]; + if (currentSerializer.canBeUsedFor && currentSerializer.canBeUsedFor(value)) { + s = currentSerializer; + break; + } + } + + if (s) { + return s.serialize(value); + } + + if (Array.isArray(value)) { + // We need to handle arrays when there is no serializer + // for older versions of GraphSON + return value.map(item => this.adaptObject(item)); + } + + // Default (strings / objects / ...) + return value; + } + + /** + * Returns the GraphSON representation of the provided object instance. + * @param {Object} obj + * @returns {String} + */ + write(obj) { + return JSON.stringify(this.adaptObject(obj)); + } +} + +/** + * GraphSON3 writer. + */ +class GraphSON3Writer extends GraphSON2Writer { + getDefaultSerializers() { + return graphSON3Serializers; + } +} + +/** + * GraphSON2 reader. + */ +class GraphSON2Reader { + /** + * GraphSON Reader + * @param {Object} [options] + * @param {Object} [options.serializers] An object used as an associative array with GraphSON 2 type name as keys and + * deserializer instances as values, ie: { 'g:Int64': longSerializer }. + * @constructor + */ + constructor(options) { + this._options = options || {}; + this._deserializers = {}; + + const defaultDeserializers = this.getDefaultDeserializers(); + Object.keys(defaultDeserializers).forEach(typeName => { + const serializerConstructor = defaultDeserializers[typeName]; + const s = new serializerConstructor(); + s.reader = this; + this._deserializers[typeName] = s; + }); + + if (this._options.serializers) { + const customSerializers = this._options.serializers || {}; + Object.keys(customSerializers).forEach(key => { + const s = customSerializers[key]; + if (!s.deserialize) { + return; + } + s.reader = this; + this._deserializers[key] = s; + }); + } + } + + /** + * Gets the default deserializers as an associative array. + * @returns {Object} + */ + getDefaultDeserializers() { + return graphSON2Deserializers; + } + + read(obj) { + if (obj === undefined) { + return undefined; + } + if (obj === null) { + return null; + } + if (Array.isArray(obj)) { + return obj.map(item => this.read(item)); + } + const type = obj[typeSerializers.typeKey]; + if (type) { + const d = this._deserializers[type]; + if (d) { + // Use type serializer + return d.deserialize(obj); + } + return obj[typeSerializers.valueKey]; + } + if (obj && typeof obj === 'object' && obj.constructor === Object) { + return this._deserializeObject(obj); + } + // Default (for boolean, number and other scalars) + return obj; + } + + _deserializeObject(obj) { + const keys = Object.keys(obj); + const result = {}; + for (let i = 0; i < keys.length; i++) { + result[keys[i]] = this.read(obj[keys[i]]); + } + return result; + } +} + +/** + * GraphSON3 reader. + */ +class GraphSON3Reader extends GraphSON2Reader { + getDefaultDeserializers() { + return graphSON3Deserializers; + } +} + +const graphSON2Deserializers = { + 'g:Traverser': typeSerializers.TraverserSerializer, + 'g:TraversalStrategy': typeSerializers.TraversalStrategySerializer, + 'g:Int32': typeSerializers.NumberSerializer, + 'g:Int64': typeSerializers.NumberSerializer, + 'g:Float': typeSerializers.NumberSerializer, + 'g:Double': typeSerializers.NumberSerializer, + 'g:Date': typeSerializers.DateSerializer, + 'g:Direction': typeSerializers.DirectionSerializer, + 'g:Vertex': typeSerializers.VertexSerializer, + 'g:Edge': typeSerializers.EdgeSerializer, + 'g:VertexProperty': typeSerializers.VertexPropertySerializer, + 'g:Property': typeSerializers.PropertySerializer, + 'g:Path': typeSerializers.Path3Serializer, + 'g:TextP': typeSerializers.TextPSerializer, + 'g:T': typeSerializers.TSerializer, + 'g:BulkSet': typeSerializers.BulkSetSerializer +}; + +const graphSON3Deserializers = Object.assign({}, graphSON2Deserializers, { + 'g:List': typeSerializers.ListSerializer, + 'g:Set': typeSerializers.SetSerializer, + 'g:Map': typeSerializers.MapSerializer +}); + +const graphSON2Serializers = [ + typeSerializers.NumberSerializer, + typeSerializers.DateSerializer, + typeSerializers.BytecodeSerializer, + typeSerializers.TraverserSerializer, + typeSerializers.TraversalStrategySerializer, + typeSerializers.PSerializer, + typeSerializers.TextPSerializer, + typeSerializers.LambdaSerializer, + typeSerializers.EnumSerializer, + typeSerializers.VertexSerializer, + typeSerializers.EdgeSerializer, + typeSerializers.LongSerializer +]; + +const graphSON3Serializers = graphSON2Serializers.concat([ + typeSerializers.ListSerializer, + typeSerializers.SetSerializer, + typeSerializers.MapSerializer +]); + +module.exports = { + GraphSON3Writer, + GraphSON3Reader, + GraphSON2Writer, + GraphSON2Reader, + GraphSONWriter: GraphSON3Writer, + GraphSONReader: GraphSON3Reader +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/index.d.ts b/node_modules/cassandra-driver/lib/datastax/graph/index.d.ts new file mode 100644 index 0000000..b6e860c --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/index.d.ts @@ -0,0 +1,92 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { types } from '../../types'; + +export namespace graph { + interface Edge extends Element { + outV?: Vertex; + outVLabel?: string; + inV?: Vertex; + inVLabel?: string; + properties?: object; + } + + interface Element { + id: any; + label: string; + } + + class GraphResultSet implements Iterator { + constructor(rs: types.ResultSet); + + first(): any; + + toArray(): any[]; + + values(): Iterator; + + next(value?: any): IteratorResult; + } + + interface Path { + labels: any[]; + objects: any[]; + } + + interface Property { + value: any + key: any + } + + interface Vertex extends Element { + properties?: { [key: string]: any[] } + } + + interface VertexProperty extends Element { + value: any + key: string + properties?: any + } + + function asDouble(value: number): object; + + function asFloat(value: number): object; + + function asInt(value: number): object; + + function asTimestamp(value: Date): object; + + function asUdt(value: object): object; + + interface EnumValue { + toString(): string + } + + namespace t { + const id: EnumValue; + const key: EnumValue; + const label: EnumValue; + const value: EnumValue; + } + + namespace direction { + // `in` is a reserved word + const in_: EnumValue; + const out: EnumValue; + const both: EnumValue; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/index.js b/node_modules/cassandra-driver/lib/datastax/graph/index.js new file mode 100644 index 0000000..a0333a0 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/index.js @@ -0,0 +1,82 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +/** + * Graph module. + * @module datastax/graph + */ + +const GraphResultSet = require('./result-set'); +const getCustomTypeSerializers = require('./custom-type-serializers'); +const { asInt, asDouble, asFloat, asTimestamp, asUdt, UdtGraphWrapper, GraphTypeWrapper} = require('./wrappers'); +const { Edge, Element, Path, Property, Vertex, VertexProperty } = require('./structure'); + +class EnumValue { + constructor(typeName, elementName) { + this.typeName = typeName; + this.elementName = elementName; + } + + toString() { + return this.elementName; + } +} + +/** + * Represents a collection of tokens for more concise Traversal definitions. + */ +const t = { + id: new EnumValue('T', 'id'), + key: new EnumValue('T', 'key'), + label: new EnumValue('T', 'label'), + value: new EnumValue('T', 'value'), +}; + +/** + * Represents the edge direction. + */ +const direction = { + 'both': new EnumValue('Direction', 'BOTH'), + 'in': new EnumValue('Direction', 'IN'), + 'out': new EnumValue('Direction', 'OUT') +}; + +// `in` is a reserved keyword depending on the context +// TinkerPop JavaScript GLV only exposes `in` but it can lead to issues for TypeScript users and others. +// Expose an extra property to represent `Direction.IN`. +direction.in_ = direction.in; + +module.exports = { + Edge, + Element, + Path, + Property, + Vertex, + VertexProperty, + + asInt, + asDouble, + asFloat, + asTimestamp, + asUdt, + direction, + getCustomTypeSerializers, + GraphResultSet, + GraphTypeWrapper, + t, + UdtGraphWrapper +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/options.js b/node_modules/cassandra-driver/lib/datastax/graph/options.js new file mode 100644 index 0000000..2e0e7e7 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/options.js @@ -0,0 +1,334 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const types = require('../../types'); +const utils = require('../../utils'); +const { DefaultExecutionOptions, proxyExecuteKey } = require('../../execution-options'); +const Long = types.Long; + +let consistencyNames; + +const graphProtocol = Object.freeze({ + graphson1: 'graphson-1.0', + graphson2: 'graphson-2.0', + graphson3: 'graphson-3.0' +}); + +const payloadKeys = Object.freeze({ + language :'graph-language', + source: 'graph-source', + name: 'graph-name', + results: 'graph-results', + writeConsistency: 'graph-write-consistency', + readConsistency: 'graph-read-consistency', + timeout: 'request-timeout' +}); + +/** + * Graph options that extends {@link QueryOptions}. + *

+ * Consider using [execution profiles]{@link ExecutionProfile} if you plan to reuse options across different + * query executions. + *

+ * @typedef {QueryOptions} module:datastax/graph~GraphQueryOptions + * @property {String} [graphLanguage] The graph language to use in graph queries. + * @property {String} [graphResults] The protocol to use for serializing and deserializing graph results. + *

+ * Note that this value should rarely be set by users and will otherwise be unset. When unset the server resolves + * the protocol based on the graphLanguage specified. + *

+ * @property {String} [graphName] The graph name to be used in the query. You can use null to clear the + * value from the DseClientOptions and execute a query without a default graph. + * @property {Number} [graphReadConsistency] Specifies the + * [consistency level]{@link module:types~consistencies} + * to be used for the graph read queries in this execution. + *

+ * When defined, it overrides the consistency level only for the READ part of the graph query. + *

+ * @property {String} [graphSource] The graph traversal source name to use in graph queries. + * @property {Number} [graphWriteConsistency] Specifies the [consistency level]{@link module:types~consistencies} to + * be used for the graph write queries in this execution. + *

+ * When defined, it overrides the consistency level only for the WRITE part of the graph query. + *

+ * @property {RetryPolicy} [retry] Sets the retry policy to be used for the graph query execution. + *

+ * When not specified in the {@link GraphQueryOptions} or in the {@link ExecutionProfile}, it will use by default + * a retry policy that does not retry graph executions. + *

+ */ + +/** + * Gets the default options with the custom payload for a given profile. + * @param {ProfileManager} profileManager + * @param baseOptions + * @param {RetryPolicy|null} defaultRetryPolicy + * @param {ExecutionProfile} profile + * @returns {DseClientOptions} + * @private + */ +function getDefaultGraphOptions(profileManager, baseOptions, defaultRetryPolicy, profile) { + return profileManager.getOrCreateGraphOptions(profile, function createDefaultOptions() { + const profileOptions = profile.graphOptions || utils.emptyObject; + const defaultProfile = profileManager.getDefault(); + const options = { + customPayload: { + [payloadKeys.language]: utils.allocBufferFromString(profileOptions.language || baseOptions.language), + [payloadKeys.source]: utils.allocBufferFromString(profileOptions.source || baseOptions.source) + }, + graphLanguage: profileOptions.language || baseOptions.language, + graphResults: profileOptions.results || baseOptions.results, + graphSource: profileOptions.source || baseOptions.source, + graphName: utils.ifUndefined(profileOptions.name, baseOptions.name) + }; + + if (profile !== defaultProfile) { + options.retry = profile.retry || baseOptions.retry; + } else { + // Based on an implementation detail of the execution profiles, the retry policy for the default profile is + // always loaded (required), but that doesn't mean that it was specified by the user. + // If it wasn't specified by the user, use the default retry policy for graph statements. + options.retry = defaultRetryPolicy || baseOptions.retry; + } + + if (baseOptions.executeAs) { + options.customPayload[proxyExecuteKey] = utils.allocBufferFromString(baseOptions.executeAs); + } + + if (options.graphName) { + options.customPayload[payloadKeys.name] = utils.allocBufferFromString(options.graphName); + } + + const graphResults = utils.ifUndefined(profileOptions.results, baseOptions.graphResults); + if (graphResults !== undefined) { + options.customPayload[payloadKeys.results] = utils.allocBufferFromString(graphResults); + } + + const readConsistency = utils.ifUndefined(profileOptions.readConsistency, baseOptions.readConsistency); + if (readConsistency !== undefined) { + options.customPayload[payloadKeys.readConsistency] = + utils.allocBufferFromString(getConsistencyName(readConsistency)); + } + + const writeConsistency = utils.ifUndefined(profileOptions.writeConsistency, baseOptions.writeConsistency); + if (writeConsistency !== undefined) { + options.customPayload[payloadKeys.writeConsistency] = + utils.allocBufferFromString(getConsistencyName(writeConsistency)); + } + + options.readTimeout = utils.ifUndefined3(profile.readTimeout, defaultProfile.readTimeout, baseOptions.readTimeout); + if (options.readTimeout > 0) { + // Write the graph read timeout payload + options.customPayload[payloadKeys.timeout] = longBuffer(options.readTimeout); + } + + return options; + }); +} + +/** + * Sets the payload key. If the value is not provided, it uses the value from the default profile options. + * @param {Object} payload + * @param {QueryOptions} profileOptions + * @param {String} key + * @param {String|Number|null} value + * @param {Function} [converter] + * @private + */ +function setPayloadKey(payload, profileOptions, key, value, converter) { + converter = converter || utils.allocBufferFromString; + if (value === null) { + // Use null to avoid set payload for a key + return; + } + + if (value !== undefined) { + payload[key] = converter(value); + return; + } + + if (profileOptions.customPayload[key]) { + payload[key] = profileOptions.customPayload[key]; + } +} + +function longBuffer(value) { + value = Long.fromNumber(value); + return Long.toBuffer(value); +} + +/** + * Gets the name in upper case of the consistency level. + * @param {Number} consistency + * @private + */ +function getConsistencyName(consistency) { + // eslint-disable-next-line + if (consistency == undefined) { + //null or undefined => undefined + return undefined; + } + loadConsistencyNames(); + const name = consistencyNames[consistency]; + if (!name) { + throw new Error(util.format( + 'Consistency %s not found, use values defined as properties in types.consistencies object', consistency + )); + } + return name; +} + +function loadConsistencyNames() { + if (consistencyNames) { + return; + } + consistencyNames = {}; + const propertyNames = Object.keys(types.consistencies); + for (let i = 0; i < propertyNames.length; i++) { + const name = propertyNames[i]; + consistencyNames[types.consistencies[name]] = name.toUpperCase(); + } + //Using java constants naming conventions + consistencyNames[types.consistencies.localQuorum] = 'LOCAL_QUORUM'; + consistencyNames[types.consistencies.eachQuorum] = 'EACH_QUORUM'; + consistencyNames[types.consistencies.localSerial] = 'LOCAL_SERIAL'; + consistencyNames[types.consistencies.localOne] = 'LOCAL_ONE'; +} + +/** + * Represents a wrapper around the options related to a graph execution. + * @internal + * @ignore + */ +class GraphExecutionOptions extends DefaultExecutionOptions { + + /** + * Creates a new instance of GraphExecutionOptions. + * @param {GraphQueryOptions} queryOptions The user provided query options. + * @param {Client} client the client instance. + * @param graphBaseOptions The default graph base options. + * @param {RetryPolicy} defaultProfileRetryPolicy + */ + constructor(queryOptions, client, graphBaseOptions, defaultProfileRetryPolicy) { + + queryOptions = queryOptions || utils.emptyObject; + super(queryOptions, client, null); + + this._defaultGraphOptions = getDefaultGraphOptions( + client.profileManager, graphBaseOptions, defaultProfileRetryPolicy, this.getProfile()); + + this._preferredHost = null; + this._graphSubProtocol = queryOptions.graphResults || this._defaultGraphOptions.graphResults; + this._graphLanguage = queryOptions.graphLanguage || this._defaultGraphOptions.graphLanguage; + } + + setPreferredHost(host) { + this._preferredHost = host; + } + + getPreferredHost() { + return this._preferredHost; + } + + getGraphSource() { + return this.getRawQueryOptions().graphSource || this._defaultGraphOptions.graphSource; + } + + getGraphLanguage() { + return this._graphLanguage; + } + + setGraphLanguage(value) { + this._graphLanguage = value; + } + + getGraphName() { + return utils.ifUndefined(this.getRawQueryOptions().graphName, this._defaultGraphOptions.graphName); + } + + getGraphSubProtocol() { + return this._graphSubProtocol; + } + + setGraphSubProtocol(protocol) { + this._graphSubProtocol = protocol; + } + + /** Graph executions have a specific default read timeout */ + getReadTimeout() { + return this.getRawQueryOptions().readTimeout || this._defaultGraphOptions.readTimeout; + } + + /** Graph executions have a specific default retry policy */ + getRetryPolicy() { + return this.getRawQueryOptions().retry || this._defaultGraphOptions.retry; + } + + getRowParser() { + const factory = this.getRawQueryOptions().rowParserFactory; + + if (!factory) { + return null; + } + + return factory(this.getGraphSubProtocol()); + } + + getQueryWriter() { + const factory = this.getRawQueryOptions().queryWriterFactory; + + if (!factory) { + return null; + } + + return factory(this.getGraphSubProtocol()); + } + + setGraphPayload() { + const options = this.getRawQueryOptions(); + const defaultOptions = this._defaultGraphOptions; + + // Clone the existing custom payload (if any) + const payload = Object.assign({}, this.getCustomPayload()); + + // Override the payload for DSE Graph exclusive options + setPayloadKey(payload, defaultOptions, payloadKeys.language, + this.getGraphLanguage() !== this._defaultGraphOptions.graphLanguage ? this.getGraphLanguage() : undefined); + setPayloadKey(payload, defaultOptions, payloadKeys.source, options.graphSource); + setPayloadKey(payload, defaultOptions, payloadKeys.name, options.graphName); + setPayloadKey(payload, defaultOptions, payloadKeys.readConsistency, + getConsistencyName(options.graphReadConsistency)); + setPayloadKey(payload, defaultOptions, payloadKeys.writeConsistency, + getConsistencyName(options.graphWriteConsistency)); + + // Use the read timeout defined by the user or the one default to graph executions + setPayloadKey(payload, defaultOptions, payloadKeys.timeout, + this.getReadTimeout() > 0 ? this.getReadTimeout() : null, longBuffer); + + // Graph result is always set + payload[payloadKeys.results] = defaultOptions.graphResults === this.getGraphSubProtocol() + ? defaultOptions.customPayload[payloadKeys.results] : utils.allocBufferFromString(this.getGraphSubProtocol()); + + this.setCustomPayload(payload); + } +} + +module.exports = { + GraphExecutionOptions, + graphProtocol, + payloadKeys +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/result-set.js b/node_modules/cassandra-driver/lib/datastax/graph/result-set.js new file mode 100644 index 0000000..8e84670 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/result-set.js @@ -0,0 +1,156 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const utils = require('../../utils'); + +/** + * Creates a new instance of GraphResultSet. + * @class + * @classdesc + * Represents the result set of a [graph query execution]{@link Client#executeGraph} containing vertices, edges or + * scalar values depending on the query. + *

+ * It allows iteration of the items using for..of statements under ES2015 and exposes + * forEach(), first() and toArray() to access the underlying items. + *

+ * @example + * for (let vertex of result} { ... } + * @example + * const arr = result.toArray(); + * @example + * const vertex = result.first(); + * @param {ResultSet} result + * @param {Function} [rowParser] + * @alias module:datastax/graph~GraphResultSet + * @constructor + */ +function GraphResultSet(result, rowParser) { + /** + * Information on the execution of a successful query: + * @member {Object} + * @property {Number} achievedConsistency The consistency level that has been actually achieved by the query. + * @property {String} queriedHost The Cassandra host that coordinated this query. + * @property {Object} triedHosts Gets the associative array of host that were queried before getting a valid response, + * being the last host the one that replied correctly. + * @property {Uuid} traceId Identifier of the trace session. + * @property {Array.} warnings Warning messages generated by the server when executing the query. + */ + this.info = result.info; + const rows = result.rows; + rowParser = rowParser || parsePlainJsonRow; + + /** + * This property has been deprecated because it may return a lower value than the actual length of the results. + * Use toArray() instead. + *

Gets the length of the result.

+ * @deprecated Use toArray() instead. This property will be removed in the following major version. + * @member {Number} + */ + this.length = result.rowLength; + + /** + * A string token representing the current page state of query. It can be used in the following executions to + * continue paging and retrieve the remained of the result for the query. + * @member {String} + */ + this.pageState = result.pageState; + + /** + * Returns the first element of the result or null if the result is empty. + * @returns {Object} + */ + this.first = function first() { + const iterator = this.values(); + const item = iterator.next(); + if (item.done) { + return null; + } + + return item.value; + }; + + /** + * Executes a provided function once per result element. + * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. + * @param {Object} [thisArg] Value to use as this when executing callback. + */ + this.forEach = function forEach(callback, thisArg) { + if (!rows.length) { + return; + } + const iterator = this.values(); + let item = iterator.next(); + let index = 0; + while (!item.done) { + callback.call(thisArg || this, item.value, index++); + item = iterator.next(); + } + }; + + /** + * Results an Array of graph result elements (vertex, edge, scalar). + * @returns {Array} + */ + this.toArray = function toArray() { + if (!rows.length) { + return utils.emptyArray; + } + return utils.iteratorToArray(this.values()); + }; + + /** + * Returns a new Iterator object that contains the values for each index in the result. + * @returns {Iterator} + */ + this.values = function* values() { + for (const traverser of this.getTraversers()) { + const bulk = traverser.bulk || 1; + + for (let j = 0; j < bulk; j++) { + yield traverser.object; + } + } + }; + + /** + * Gets the traversers represented contained in the result set. + * @returns {Iterator} + */ + this.getTraversers = function* () { + for (const row of rows) { + yield rowParser(row); + } + }; +} + +if (typeof Symbol !== 'undefined' && typeof Symbol.iterator === 'symbol') { + // Make iterable + GraphResultSet.prototype[Symbol.iterator] = function getIterator() { + return this.values(); + }; +} + +/** + * @param {Row} row + * @private + */ +function parsePlainJsonRow(row) { + const parsed = JSON.parse(row['gremlin']); + return { object: parsed.result, bulk: parsed.bulk || 1 }; +} + +module.exports = GraphResultSet; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/structure.js b/node_modules/cassandra-driver/lib/datastax/graph/structure.js new file mode 100644 index 0000000..deef3e1 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/structure.js @@ -0,0 +1,167 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const util = require('util'); + +/** + * @classdesc + * Represents a graph Element. + * @param id + * @param label + * @abstract + * @memberOf module:datastax/graph + * @constructor + */ +function Element(id, label) { + /** + * Gets the element id. + */ + this.id = id; + /** + * Gets the element label. + * @type {String} + */ + this.label = label; +} + +/** + * @classdesc + * Represents a graph Vertex. + * @param id + * @param {String} label + * @param {Object} properties + * @extends {Element} + * @memberOf module:datastax/graph + * @constructor + */ +function Vertex(id, label, properties) { + Element.call(this, id, label); + /** + * Gets the vertex properties. + * @type {Object} + */ + this.properties = properties; +} + +util.inherits(Vertex, Element); + +/** + * @classdesc + * Represents a graph Edge. + * @param id + * @param outV + * @param {outVLabel} outVLabel + * @param {String} label + * @param inV + * @param {String} inVLabel + * @param {Object} properties + * @extends {Element} + * @memberOf module:datastax/graph + * @constructor + */ +function Edge(id, outV, outVLabel, label, inV, inVLabel, properties) { + Element.call(this, id, label); + /** + * Gets the id of outgoing vertex of the edge. + */ + this.outV = outV; + /** + * Gets the label of the outgoing vertex. + */ + this.outVLabel = outVLabel; + /** + * Gets the id of the incoming vertex of the edge. + */ + this.inV = inV; + + /** + * Gets the label of the incoming vertex. + */ + this.inVLabel = inVLabel; + /** + * Gets the properties of the edge as an associative array. + * @type {Object} + */ + this.properties = {}; + (function adaptProperties(self) { + if (properties) { + const keys = Object.keys(properties); + for (let i = 0; i < keys.length; i++) { + const k = keys[i]; + self.properties[k] = properties[k].value; + } + } + })(this); +} + +util.inherits(Edge, Element); + +/** + * @classdesc + * Represents a graph vertex property. + * @param id + * @param {String} label + * @param value + * @param {Object} properties + * @extends {Element} + * @memberOf module:datastax/graph + * @constructor + */ +function VertexProperty(id, label, value, properties) { + Element.call(this, id, label); + this.value = value; + this.key = this.label; + this.properties = properties; +} + +util.inherits(VertexProperty, Element); + +/** + * @classdesc + * Represents a property. + * @param key + * @param value + * @memberOf module:datastax/graph + * @constructor + */ +function Property(key, value) { + this.key = key; + this.value = value; +} + +/** + * @classdesc + * Represents a walk through a graph as defined by a traversal. + * @param {Array} labels + * @param {Array} objects + * @memberOf module:datastax/graph + * @constructor + */ +function Path(labels, objects) { + this.labels = labels; + this.objects = objects; +} + +module.exports = { + Edge, + Element, + Path, + Property, + Vertex, + VertexProperty +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/graph/type-serializers.js b/node_modules/cassandra-driver/lib/datastax/graph/type-serializers.js new file mode 100644 index 0000000..880c0f3 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/type-serializers.js @@ -0,0 +1,501 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * @module datastax/graph/tinkerpop/typeSerializers + * @ignore + */ + +/** + * @author Jorge Bay Gondra + */ +'use strict'; + +// Replace dependencies to minimize code changes from Apache TinkerPop +const t = { + P: UnsupportedType, TextP: UnsupportedType, Traversal: UnsupportedType, Traverser: UnsupportedType, + EnumValue: UnsupportedType +}; +const ts = { TraversalStrategy: UnsupportedType }; +const Bytecode = UnsupportedType; +const g = require('./index'); +const utils = { Long: UnsupportedType }; +t.t = g.t; +t.direction = g.direction; + +function UnsupportedType() { } + +const valueKey = '@value'; +const typeKey = '@type'; + +/** + * @abstract + */ +class TypeSerializer { + serialize() { + throw new Error('serialize() method not implemented for ' + this.constructor.name); + } + + deserialize() { + throw new Error('deserialize() method not implemented for ' + this.constructor.name); + } + + canBeUsedFor() { + throw new Error('canBeUsedFor() method not implemented for ' + this.constructor.name); + } +} + +class NumberSerializer extends TypeSerializer { + serialize(item) { + if (isNaN(item)) { + return { + [typeKey]: 'g:Double', + [valueKey]: 'NaN' + }; + } else if (item === Number.POSITIVE_INFINITY) { + return { + [typeKey]: 'g:Double', + [valueKey]: 'Infinity' + }; + } else if (item === Number.NEGATIVE_INFINITY) { + return { + [typeKey]: 'g:Double', + [valueKey]: '-Infinity' + }; + } else { + return item; + } + } + + deserialize(obj) { + var val = obj[valueKey]; + if (val === 'NaN') { + return NaN; + } else if (val === 'Infinity') { + return Number.POSITIVE_INFINITY; + } else if (val === '-Infinity') { + return Number.NEGATIVE_INFINITY; + } else { + return parseFloat(val); + } + } + + canBeUsedFor(value) { + return (typeof value === 'number'); + } +} + +class DateSerializer extends TypeSerializer { + serialize(item) { + return { + [typeKey]: 'g:Date', + [valueKey]: item.getTime() + }; + } + + deserialize(obj) { + return new Date(obj[valueKey]); + } + + canBeUsedFor(value) { + return (value instanceof Date); + } +} + +class LongSerializer extends TypeSerializer { + serialize(item) { + return { + [typeKey]: 'g:Int64', + [valueKey]: item.value + }; + } + + canBeUsedFor(value) { + return (value instanceof utils.Long); + } +} + +class BytecodeSerializer extends TypeSerializer { + serialize(item) { + let bytecode = item; + if (item instanceof t.Traversal) { + bytecode = item.getBytecode(); + } + const result = {}; + result[typeKey] = 'g:Bytecode'; + const resultValue = result[valueKey] = {}; + const sources = this._serializeInstructions(bytecode.sourceInstructions); + if (sources) { + resultValue['source'] = sources; + } + const steps = this._serializeInstructions(bytecode.stepInstructions); + if (steps) { + resultValue['step'] = steps; + } + return result; + } + + _serializeInstructions(instructions) { + if (instructions.length === 0) { + return null; + } + const result = new Array(instructions.length); + result[0] = instructions[0]; + for (let i = 0; i < instructions.length; i++) { + result[i] = instructions[i].map(item => this.writer.adaptObject(item)); + } + return result; + } + + canBeUsedFor(value) { + return (value instanceof Bytecode) || (value instanceof t.Traversal); + } +} + +class PSerializer extends TypeSerializer { + /** @param {P} item */ + serialize(item) { + const result = {}; + result[typeKey] = 'g:P'; + const resultValue = result[valueKey] = { + 'predicate': item.operator + }; + if (item.other === undefined || item.other === null) { + resultValue['value'] = this.writer.adaptObject(item.value); + } + else { + resultValue['value'] = [ this.writer.adaptObject(item.value), this.writer.adaptObject(item.other) ]; + } + return result; + } + + canBeUsedFor(value) { + return (value instanceof t.P); + } +} + +class TextPSerializer extends TypeSerializer { + /** @param {TextP} item */ + serialize(item) { + const result = {}; + result[typeKey] = 'g:TextP'; + const resultValue = result[valueKey] = { + 'predicate': item.operator + }; + if (item.other === undefined || item.other === null) { + resultValue['value'] = this.writer.adaptObject(item.value); + } + else { + resultValue['value'] = [ this.writer.adaptObject(item.value), this.writer.adaptObject(item.other) ]; + } + return result; + } + + canBeUsedFor(value) { + return (value instanceof t.TextP); + } +} + +class LambdaSerializer extends TypeSerializer { + /** @param {Function} item */ + serialize(item) { + return { + [typeKey]: 'g:Lambda', + [valueKey]: { + 'arguments': item.length, + 'language': 'gremlin-javascript', + 'script': item.toString() + } + }; + } + + canBeUsedFor(value) { + return (typeof value === 'function'); + } +} + +class EnumSerializer extends TypeSerializer { + /** @param {EnumValue} item */ + serialize(item) { + return { + [typeKey]: 'g:' + item.typeName, + [valueKey]: item.elementName + }; + } + + canBeUsedFor(value) { + return value && value.typeName && value instanceof t.EnumValue; + } +} + +class TraverserSerializer extends TypeSerializer { + /** @param {Traverser} item */ + serialize(item) { + return { + [typeKey]: 'g:Traverser', + [valueKey]: { + 'value': this.writer.adaptObject(item.object), + 'bulk': this.writer.adaptObject(item.bulk) + } + }; + } + + deserialize(obj) { + const value = obj[valueKey]; + return new t.Traverser(this.reader.read(value['value']), this.reader.read(value['bulk'])); + } + + canBeUsedFor(value) { + return (value instanceof t.Traverser); + } +} + +class TraversalStrategySerializer extends TypeSerializer { + /** @param {TraversalStrategy} item */ + serialize(item) { + return { + [typeKey]: 'g:' + item.constructor.name, + [valueKey]: item.configuration + }; + } + + canBeUsedFor(value) { + return (value instanceof ts.TraversalStrategy); + } +} + +class VertexSerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + return new g.Vertex(this.reader.read(value['id']), value['label'], this.reader.read(value['properties'])); + } + + /** @param {Vertex} item */ + serialize(item) { + return { + [typeKey]: 'g:Vertex', + [valueKey]: { + 'id': this.writer.adaptObject(item.id), + 'label': item.label + } + }; + } + + canBeUsedFor(value) { + return (value instanceof g.Vertex); + } +} + +class VertexPropertySerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + return new g.VertexProperty( + this.reader.read(value['id']), + value['label'], + this.reader.read(value['value']), + this.reader.read(value['properties']) + ); + } +} + +class PropertySerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + return new g.Property( + value['key'], + this.reader.read(value['value'])); + } +} + +class EdgeSerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + return new g.Edge( + this.reader.read(value['id']), + new g.Vertex(this.reader.read(value['outV']), this.reader.read(value['outVLabel'])), + value['label'], + new g.Vertex(this.reader.read(value['inV']), this.reader.read(value['inVLabel'])), + this.reader.read(value['properties']) + ); + } + + /** @param {Edge} item */ + serialize(item) { + return { + [typeKey]: 'g:Edge', + [valueKey]: { + 'id': this.writer.adaptObject(item.id), + 'label': item.label, + 'outV': this.writer.adaptObject(item.outV.id), + 'outVLabel': item.outV.label, + 'inV': this.writer.adaptObject(item.inV.id), + 'inVLabel': item.inV.label + } + }; + } + + canBeUsedFor(value) { + return (value instanceof g.Edge); + } +} + +class PathSerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + const objects = value['objects'].map(o => this.reader.read(o)); + return new g.Path(this.reader.read(value['labels']), objects); + } +} + +class Path3Serializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + return new g.Path(this.reader.read(value['labels']), this.reader.read(value['objects'])); + } +} + +class TSerializer extends TypeSerializer { + deserialize(obj) { + return t.t[obj[valueKey]]; + } +} + +class DirectionSerializer extends TypeSerializer { + deserialize(obj) { + return t.direction[obj[valueKey].toLowerCase()]; + } +} + +class ArraySerializer extends TypeSerializer { + constructor(typeKey) { + super(); + this.typeKey = typeKey; + } + + deserialize(obj) { + const value = obj[valueKey]; + if (!Array.isArray(value)) { + throw new Error('Expected Array, obtained: ' + value); + } + return value.map(x => this.reader.read(x)); + } + + /** @param {Array} item */ + serialize(item) { + return { + [typeKey]: this.typeKey, + [valueKey]: item.map(x => this.writer.adaptObject(x)) + }; + } + + canBeUsedFor(value) { + return Array.isArray(value); + } +} + +class BulkSetSerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + if (!Array.isArray(value)) { + throw new Error('Expected Array, obtained: ' + value); + } + + // coerce the BulkSet to List. if the bulk exceeds the int space then we can't coerce to List anyway, + // so this query will be trouble. we'd need a legit BulkSet implementation here in js. this current + // implementation is here to replicate the previous functionality that existed on the server side in + // previous versions. + let result = []; + for (let ix = 0, iy = value.length; ix < iy; ix += 2) { + const pair = value.slice(ix, ix + 2); + result = result.concat(Array(this.reader.read(pair[1])).fill(this.reader.read(pair[0]))); + } + + return result; + } +} + +class MapSerializer extends TypeSerializer { + deserialize(obj) { + const value = obj[valueKey]; + if (!Array.isArray(value)) { + throw new Error('Expected Array, obtained: ' + value); + } + const result = new Map(); + for (let i = 0; i < value.length; i += 2) { + result.set(this.reader.read(value[i]), this.reader.read(value[i + 1])); + } + return result; + } + + /** @param {Map} map */ + serialize(map) { + const arr = []; + map.forEach((v, k) => { + arr.push(this.writer.adaptObject(k)); + arr.push(this.writer.adaptObject(v)); + }); + return { + [typeKey]: 'g:Map', + [valueKey]: arr + }; + } + + canBeUsedFor(value) { + return value instanceof Map; + } +} + +class ListSerializer extends ArraySerializer { + constructor() { + super('g:List'); + } +} + +class SetSerializer extends ArraySerializer { + constructor() { + super('g:Set'); + } +} + +module.exports = { + BulkSetSerializer, + BytecodeSerializer, + DateSerializer, + DirectionSerializer, + EdgeSerializer, + EnumSerializer, + LambdaSerializer, + ListSerializer, + LongSerializer, + MapSerializer, + NumberSerializer, + Path3Serializer, + PathSerializer, + PropertySerializer, + PSerializer, + TextPSerializer, + SetSerializer, + TSerializer, + TraverserSerializer, + TraversalStrategySerializer, + typeKey, + valueKey, + VertexPropertySerializer, + VertexSerializer +}; diff --git a/node_modules/cassandra-driver/lib/datastax/graph/wrappers.js b/node_modules/cassandra-driver/lib/datastax/graph/wrappers.js new file mode 100644 index 0000000..0bbf8c0 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/graph/wrappers.js @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const types = require('../../types'); +const { dataTypes } = types; + +/** + * Internal representation of a value with additional type information. + * @internal + * @ignore + */ +class GraphTypeWrapper { + constructor(value, typeInfo) { + this.value = value; + this.typeInfo = typeof typeInfo === 'number' ? { code: typeInfo } : typeInfo; + } +} + + +/** + * Internal representation of user-defined type with the metadata. + * @internal + * @ignore + */ +class UdtGraphWrapper { + constructor(value, udtInfo) { + this.value = value; + + if (!udtInfo || !udtInfo.name || !udtInfo.keyspace || !udtInfo.fields) { + throw new TypeError(`udtInfo must be an object with name, keyspace and field properties defined`); + } + + this.udtInfo = udtInfo; + } +} + +/** + * Wraps a number or null value to hint the client driver that the data type of the value is an int + * @memberOf module:datastax/graph + */ +function asInt(value) { return new GraphTypeWrapper(value, dataTypes.int); } + +/** + * Wraps a number or null value to hint the client driver that the data type of the value is a double + * @memberOf module:datastax/graph + */ +function asDouble(value) { return new GraphTypeWrapper(value, dataTypes.double); } + +/** + * Wraps a number or null value to hint the client driver that the data type of the value is a double + * @memberOf module:datastax/graph + */ +function asFloat(value) { return new GraphTypeWrapper(value, dataTypes.float); } + +/** + * Wraps a Date or null value to hint the client driver that the data type of the value is a timestamp + * @memberOf module:datastax/graph + */ +function asTimestamp(value) { return new GraphTypeWrapper(value, dataTypes.timestamp); } + +/** + * Wraps an Object or null value to hint the client driver that the data type of the value is a user-defined type. + * @memberOf module:datastax/graph + * @param {object} value The object representing the UDT. + * @param {{name: string, keyspace: string, fields: Array}} udtInfo The UDT metadata as defined by the driver. + */ +function asUdt(value, udtInfo) { return new UdtGraphWrapper(value, udtInfo); } + +module.exports = { asInt, asDouble, asFloat, asTimestamp, asUdt, UdtGraphWrapper, GraphTypeWrapper }; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/index.d.ts b/node_modules/cassandra-driver/lib/datastax/index.d.ts new file mode 100644 index 0000000..e41483f --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/index.d.ts @@ -0,0 +1,24 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as graphModule from './graph'; +import * as searchModule from './search'; + +export namespace datastax { + export import graph = graphModule.graph; + + export import search = searchModule.search; +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/index.js b/node_modules/cassandra-driver/lib/datastax/index.js new file mode 100644 index 0000000..b193461 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/index.js @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * DataStax module. + *

+ * Contains modules and classes to represent functionality that is specific to DataStax products. + *

+ * @module datastax + */ + +exports.graph = require('./graph'); +exports.search = require('./search'); \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/search/date-range.js b/node_modules/cassandra-driver/lib/datastax/search/date-range.js new file mode 100644 index 0000000..877c9a3 --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/search/date-range.js @@ -0,0 +1,537 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const utils = require('../../utils'); +const Long = require('long'); + +/** + * Regex to parse dates in the following format YYYY-MM-DDThh:mm:ss.mssZ + * Looks cumbersome but it's straightforward: + * - "(\d{1,6})": year mandatory 1 to 6 digits + * - (?:-(\d{1,2}))?(?:-(\d{1,2}))? two non-capturing groups representing the month and day (1 to 2 digits captured). + * - (?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2}))?)?Z? A non-capturing group for the time portion + * @private + */ +const dateRegex = + /^[-+]?(\d{1,6})(?:-(\d{1,2}))?(?:-(\d{1,2}))?(?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2})(?:\.(\d{1,3}))?)?)?Z?$/; +const multipleBoundariesRegex = /^\[(.+?) TO (.+)]$/; +const unbounded = Object.freeze(new DateRangeBound(null, -1)); + +const dateRangeType = { + // single value as in "2001-01-01" + singleValue: 0, + // closed range as in "[2001-01-01 TO 2001-01-31]" + closedRange: 1, + // open range high as in "[2001-01-01 TO *]" + openRangeHigh: 2, + // - 0x03 - open range low as in "[* TO 2001-01-01]" + openRangeLow: 3, + // - 0x04 - both ranges open as in "[* TO *]" + openBoth: 4, + // - 0x05 - single open range as in "[*]" + openSingle: 5 +}; + +/** + * Defines the possible values of date range precision. + * @type {Object} + * @property {Number} year + * @property {Number} month + * @property {Number} day + * @property {Number} hour + * @property {Number} minute + * @property {Number} second + * @property {Number} millisecond + * @memberof module:search + */ +const dateRangePrecision = { + year: 0, + month: 1, + day: 2, + hour: 3, + minute: 4, + second: 5, + millisecond: 6 +}; + +/** + * Creates a new instance of DateRange using a lower bound and an upper bound. + *

Consider using DateRange.fromString() to create instances more easily.

+ * @classdesc + * Represents a range of dates, corresponding to the Apache Solr type + *
DateRangeField. + *

+ * A date range can have one or two bounds, namely lower bound and upper bound, to represent an interval of time. + * Date range bounds are both inclusive. For example: + *

+ *
    + *
  • 2015 TO 2016-10 represents from the first day of 2015 to the last day of October 2016
  • + *
  • 2015 represents during the course of the year 2015.
  • + *
  • 2017 TO * represents any date greater or equals to the first day of the year 2017.
  • + *
+ *

+ * Note that this JavaScript representation of DateRangeField does not support Dates outside of the range + * supported by ECMAScript Date: –100,000,000 days to 100,000,000 days measured relative to midnight at the + * beginning of 01 January, 1970 UTC. Being -271821-04-20T00:00:00.000Z the minimum lower boundary + * and 275760-09-13T00:00:00.000Z the maximum higher boundary. + *

+ * @param {DateRangeBound} lowerBound A value representing the range lower bound, composed by a + * Date and a precision. Use DateRangeBound.unbounded for an open lower bound. + * @param {DateRangeBound} [upperBound] A value representing the range upper bound, composed by a + * Date and a precision. Use DateRangeBound.unbounded for an open upper bound. When it's not + * defined, the DateRange instance is considered as a single value range. + * @constructor + * @memberOf module:datastax/search + */ +function DateRange(lowerBound, upperBound) { + if (!lowerBound) { + throw new TypeError('The lower boundaries must be defined'); + } + /** + * Gets the lower bound of this range (inclusive). + * @type {DateRangeBound} + */ + this.lowerBound = lowerBound; + /** + * Gets the upper bound of this range (inclusive). + * @type {DateRangeBound|null} + */ + this.upperBound = upperBound || null; + + // Define the type + if (this.upperBound === null) { + if (this.lowerBound !== unbounded) { + this._type = dateRangeType.singleValue; + } + else { + this._type = dateRangeType.openSingle; + } + } + else { + if (this.lowerBound !== unbounded) { + this._type = this.upperBound !== unbounded ? dateRangeType.closedRange : dateRangeType.openRangeHigh; + } + else { + this._type = this.upperBound !== unbounded ? dateRangeType.openRangeLow : dateRangeType.openBoth; + } + } +} + +/** + * Returns true if the value of this DateRange instance and other are the same. + * @param {DateRange} other + * @returns {Boolean} + */ +DateRange.prototype.equals = function (other) { + if (!(other instanceof DateRange)) { + return false; + } + return (other.lowerBound.equals(this.lowerBound) && + (other.upperBound ? other.upperBound.equals(this.upperBound) : !this.upperBound)); +}; + +/** + * Returns the string representation of the instance. + * @return {String} + */ +DateRange.prototype.toString = function () { + if (this.upperBound === null) { + return this.lowerBound.toString(); + } + return '[' + this.lowerBound.toString() + ' TO ' + this.upperBound.toString() + ']'; +}; + +DateRange.prototype.toBuffer = function () { + // Serializes the value containing: + // [] + if (this._type === dateRangeType.openBoth || this._type === dateRangeType.openSingle) { + return utils.allocBufferFromArray([ this._type ]); + } + let buffer; + let offset = 0; + if (this._type !== dateRangeType.closedRange) { + // byte + long + byte + const boundary = this._type !== dateRangeType.openRangeLow ? this.lowerBound : this.upperBound; + buffer = utils.allocBufferUnsafe(10); + buffer.writeUInt8(this._type, offset++); + offset = writeDate(boundary.date, buffer, offset); + buffer.writeUInt8(boundary.precision, offset); + return buffer; + } + // byte + long + byte + long + byte + buffer = utils.allocBufferUnsafe(19); + buffer.writeUInt8(this._type, offset++); + offset = writeDate(this.lowerBound.date, buffer, offset); + buffer.writeUInt8(this.lowerBound.precision, offset++); + offset = writeDate(this.upperBound.date, buffer, offset); + buffer.writeUInt8(this.upperBound.precision, offset); + return buffer; +}; + +/** + * Returns the DateRange representation of a given string. + *

String representations of dates are always expressed in Coordinated Universal Time (UTC)

+ * @param {String} dateRangeString + */ +DateRange.fromString = function (dateRangeString) { + const matches = multipleBoundariesRegex.exec(dateRangeString); + if (!matches) { + return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(dateRangeString))); + } + return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(matches[1])), DateRangeBound.toUpperBound(DateRangeBound.fromString(matches[2]))); +}; + +/** + * Deserializes the buffer into a DateRange + * @param {Buffer} buffer + * @return {DateRange} + */ +DateRange.fromBuffer = function (buffer) { + if (buffer.length === 0) { + throw new TypeError('DateRange serialized value must have at least 1 byte'); + } + const type = buffer.readUInt8(0); + if (type === dateRangeType.openBoth) { + return new DateRange(unbounded, unbounded); + } + if (type === dateRangeType.openSingle) { + return new DateRange(unbounded); + } + let offset = 1; + let date1; + let lowerBound; + let upperBound = null; + if (type !== dateRangeType.closedRange) { + date1 = readDate(buffer, offset); + offset += 8; + lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset)); + if (type === dateRangeType.openRangeLow) { + // lower boundary is open, the first serialized boundary is the upperBound + upperBound = lowerBound; + lowerBound = unbounded; + } + else { + upperBound = type === dateRangeType.openRangeHigh ? unbounded : null; + } + return new DateRange(lowerBound, upperBound); + } + date1 = readDate(buffer, offset); + offset += 8; + lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset++)); + const date2 = readDate(buffer, offset); + offset += 8; + upperBound = new DateRangeBound(date2, buffer.readUInt8(offset)); + return new DateRange(lowerBound, upperBound); +}; + +/** + * Writes a Date, long millis since epoch, to a buffer starting from offset. + * @param {Date} date + * @param {Buffer} buffer + * @param {Number} offset + * @return {Number} The new offset. + * @private + */ +function writeDate(date, buffer, offset) { + const long = Long.fromNumber(date.getTime()); + buffer.writeUInt32BE(long.getHighBitsUnsigned(), offset); + buffer.writeUInt32BE(long.getLowBitsUnsigned(), offset + 4); + return offset + 8; +} + +/** + * Reads a Date, long millis since epoch, from a buffer starting from offset. + * @param {Buffer} buffer + * @param {Number} offset + * @return {Date} + * @private + */ +function readDate(buffer, offset) { + const long = new Long(buffer.readInt32BE(offset+4), buffer.readInt32BE(offset)); + return new Date(long.toNumber()); +} + +/** + * @classdesc + * Represents a date range boundary, composed by a Date and a precision. + * @param {Date} date The timestamp portion, representing a single moment in time. Consider using + * Date.UTC() method to build the Date instance. + * @param {Number} precision The precision portion. Valid values for DateRangeBound precision are + * defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @constructor + * @memberOf module:datastax/search + */ +function DateRangeBound(date, precision) { + /** + * The timestamp portion of the boundary. + * @type {Date} + */ + this.date = date; + /** + * The precision portion of the boundary. Valid values are defined in the + * [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @type {Number} + */ + this.precision = precision; +} + +/** + * Returns the string representation of the instance. + * @return {String} + */ +DateRangeBound.prototype.toString = function () { + if (this.precision === -1) { + return '*'; + } + let precision = 0; + const isoString = this.date.toISOString(); + let i; + let char; + // The years take at least the first 4 characters + for (i = 4; i < isoString.length && precision <= this.precision; i++) { + char = isoString.charAt(i); + if (precision === dateRangePrecision.day && char === 'T') { + precision = dateRangePrecision.hour; + continue; + } + if (precision >= dateRangePrecision.hour && char === ':' || char === '.') { + precision++; + continue; + } + if (precision < dateRangePrecision.day && char === '-') { + precision++; + } + } + let start = 0; + const firstChar = isoString.charAt(0); + let sign = ''; + let toRemoveIndex = 4; + if (firstChar === '+' || firstChar === '-') { + sign = firstChar; + if (firstChar === '-') { + // since we are retaining the -, don't remove as many zeros. + toRemoveIndex = 3; + } + // Remove additional zeros + for (start = 1; start < toRemoveIndex; start++) { + if (isoString.charAt(start) !== '0') { + break; + } + } + } + if (this.precision !== dateRangePrecision.millisecond) { + // i holds the position of the first char that marks the end of a precision (ie: '-', 'T', ...), + // we should not include it in the result, except its the 'Z' char for the complete representation + i--; + } + return sign + isoString.substring(start, i); +}; + +/** + * Returns true if the value of this DateRange instance and other are the same. + * @param {DateRangeBound} other + * @return {boolean} + */ +DateRangeBound.prototype.equals = function (other) { + if (!(other instanceof DateRangeBound)) { + return false; + } + if (other.precision !== this.precision) { + return false; + } + return datesEqual(other.date, this.date); +}; + +function datesEqual(d1, d2) { + const t1 = d1 ? d1.getTime() : null; + const t2 = d2 ? d2.getTime() : null; + return t1 === t2; +} + +DateRangeBound.prototype.isUnbounded = function () { + return (this.precision === -1); +}; + +/** + * Parses a date string and returns a DateRangeBound. + * @param {String} boundaryString + * @return {DateRangeBound} + */ +DateRangeBound.fromString = function(boundaryString) { + if (!boundaryString) { + return null; + } + if (boundaryString === '*') { + return unbounded; + } + const matches = dateRegex.exec(boundaryString); + if (!matches) { + throw TypeError('String provided is not a valid date ' + boundaryString); + } + if (matches[7] !== undefined && matches[5] === undefined) { + // Due to a limitation in the regex, its possible to match dates like 2015T03:02.001, without the seconds + // portion but with the milliseconds specified. + throw new TypeError('String representation of the date contains the milliseconds portion but not the seconds: ' + + boundaryString); + } + const builder = new BoundaryBuilder(boundaryString.charAt(0) === '-'); + for (let i = 1; i < matches.length; i++) { + builder.set(i-1, matches[i], boundaryString); + } + return builder.build(); +}; + +/** + * The unbounded {@link DateRangeBound} instance. Unbounded bounds are syntactically represented by a * + * (star) sign. + * @type {DateRangeBound} + */ +DateRangeBound.unbounded = unbounded; + +/** + * Converts a {DateRangeBound} into a lower-bounded bound by rounding down its date + * based on its precision. + * + * @param {DateRangeBound} bound The bound to round down. + * @returns {DateRangeBound} with the date rounded down to the given precision. + */ +DateRangeBound.toLowerBound = function (bound) { + if(bound === unbounded) { + return bound; + } + const rounded = new Date(bound.date.getTime()); + // in this case we want to fallthrough + /* eslint-disable no-fallthrough */ + switch (bound.precision) { + case dateRangePrecision.year: + rounded.setUTCMonth(0); + case dateRangePrecision.month: + rounded.setUTCDate(1); + case dateRangePrecision.day: + rounded.setUTCHours(0); + case dateRangePrecision.hour: + rounded.setUTCMinutes(0); + case dateRangePrecision.minute: + rounded.setUTCSeconds(0); + case dateRangePrecision.second: + rounded.setUTCMilliseconds(0); + } + /* eslint-enable no-fallthrough */ + return new DateRangeBound(rounded, bound.precision); +}; + +/** + * Converts a {DateRangeBound} into a upper-bounded bound by rounding up its date + * based on its precision. + * + * @param {DateRangeBound} bound The bound to round up. + * @returns {DateRangeBound} with the date rounded up to the given precision. + */ +DateRangeBound.toUpperBound = function (bound) { + if (bound === unbounded) { + return bound; + } + const rounded = new Date(bound.date.getTime()); + // in this case we want to fallthrough + /* eslint-disable no-fallthrough */ + switch (bound.precision) { + case dateRangePrecision.year: + rounded.setUTCMonth(11); + case dateRangePrecision.month: + // Advance to the beginning of next month and set day of month to 0 + // which sets the date to the last day of the previous month. + // This gives us the effect of YYYY-MM-LastDayOfThatMonth + rounded.setUTCMonth(rounded.getUTCMonth() + 1, 0); + case dateRangePrecision.day: + rounded.setUTCHours(23); + case dateRangePrecision.hour: + rounded.setUTCMinutes(59); + case dateRangePrecision.minute: + rounded.setUTCSeconds(59); + case dateRangePrecision.second: + rounded.setUTCMilliseconds(999); + } + /* eslint-enable no-fallthrough */ + return new DateRangeBound(rounded, bound.precision); +}; + +/** @private */ +function BoundaryBuilder(isNegative) { + this._sign = isNegative ? -1 : 1; + this._index = 0; + this._values = new Int32Array(7); +} + +BoundaryBuilder.prototype.set = function (index, value, stringDate) { + if (value === undefined) { + return; + } + if (index > 6) { + throw new TypeError('Index out of bounds: ' + index); + } + if (index > this._index) { + this._index = index; + } + const numValue = +value; + switch (index) { + case dateRangePrecision.month: + if (numValue < 1 || numValue > 12) { + throw new TypeError('Month portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.day: + if (numValue < 1 || numValue > 31) { + throw new TypeError('Day portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.hour: + if (numValue > 23) { + throw new TypeError('Hour portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.minute: + case dateRangePrecision.second: + if (numValue > 59) { + throw new TypeError('Minute/second portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.millisecond: + if (numValue > 999) { + throw new TypeError('Millisecond portion is not valid for date: ' + stringDate); + } + break; + } + this._values[index] = numValue; +}; + +/** @return {DateRangeBound} */ +BoundaryBuilder.prototype.build = function () { + const date = new Date(0); + let month = this._values[1]; + if (month) { + // ES Date months are represented from 0 to 11 + month--; + } + date.setUTCFullYear(this._sign * this._values[0], month, this._values[2] || 1); + date.setUTCHours(this._values[3], this._values[4], this._values[5], this._values[6]); + return new DateRangeBound(date, this._index); +}; + +exports.unbounded = unbounded; +exports.dateRangePrecision = dateRangePrecision; +exports.DateRange = DateRange; +exports.DateRangeBound = DateRangeBound; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/search/index.d.ts b/node_modules/cassandra-driver/lib/datastax/search/index.d.ts new file mode 100644 index 0000000..57ba44f --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/search/index.d.ts @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export namespace search { + enum dateRangePrecision { + year = 0, + month, + day, + hour, + minute, + second, + millisecond + } + + class DateRange { + lowerBound: DateRangeBound; + upperBound: DateRangeBound; + + constructor(lowerBound: DateRangeBound, upperBound: DateRangeBound); + + equals(other: DateRangeBound): boolean; + + toString(): string; + + static fromString(value: string): DateRange; + + static fromBuffer(value: Buffer): DateRange; + } + + class DateRangeBound { + date: Date; + + precision: number; + + equals(other: DateRangeBound): boolean; + + toString(): string; + + static fromString(value: string): DateRangeBound; + + static toLowerBound(bound: DateRangeBound): DateRangeBound; + + static toUpperBound(bound: DateRangeBound): DateRangeBound; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/datastax/search/index.js b/node_modules/cassandra-driver/lib/datastax/search/index.js new file mode 100644 index 0000000..c101f5d --- /dev/null +++ b/node_modules/cassandra-driver/lib/datastax/search/index.js @@ -0,0 +1,30 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const dateRangeModule = require('./date-range'); + +/** + * Search module. + *

+ * Contains the classes to represent the set of types for search data that come with DSE 5.1+ + *

+ * @module datastax/search + */ + +exports.DateRange = dateRangeModule.DateRange; +exports.DateRangeBound = dateRangeModule.DateRangeBound; +exports.dateRangePrecision = dateRangeModule.dateRangePrecision; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/encoder.js b/node_modules/cassandra-driver/lib/encoder.js new file mode 100644 index 0000000..deb8ae0 --- /dev/null +++ b/node_modules/cassandra-driver/lib/encoder.js @@ -0,0 +1,1865 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +const types = require('./types'); +const dataTypes = types.dataTypes; +const Long = types.Long; +const Integer = types.Integer; +const BigDecimal = types.BigDecimal; +const MutableLong = require('./types/mutable-long'); +const utils = require('./utils'); +const token = require('./token'); +const { DateRange } = require('./datastax/search'); +const geo = require('./geometry'); +const Geometry = geo.Geometry; +const LineString = geo.LineString; +const Point = geo.Point; +const Polygon = geo.Polygon; + +const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; + +const buffers = { + int16Zero: utils.allocBufferFromArray([0, 0]), + int32Zero: utils.allocBufferFromArray([0, 0, 0, 0]), + int8Zero: utils.allocBufferFromArray([0]), + int8One: utils.allocBufferFromArray([1]), + int8MaxValue: utils.allocBufferFromArray([0xff]) +}; + +// BigInt: Avoid using literals (e.g., 32n) as we must be able to compile with older engines +const isBigIntSupported = typeof BigInt !== 'undefined'; +const bigInt32 = isBigIntSupported ? BigInt(32) : null; +const bigInt8 = isBigIntSupported ? BigInt(8) : null; +const bigInt0 = isBigIntSupported ? BigInt(0) : null; +const bigIntMinus1 = isBigIntSupported ? BigInt(-1) : null; +const bigInt32BitsOn = isBigIntSupported ? BigInt(0xffffffff) : null; +const bigInt8BitsOn = isBigIntSupported ? BigInt(0xff) : null; + +const complexTypeNames = Object.freeze({ + list : 'org.apache.cassandra.db.marshal.ListType', + set : 'org.apache.cassandra.db.marshal.SetType', + map : 'org.apache.cassandra.db.marshal.MapType', + udt : 'org.apache.cassandra.db.marshal.UserType', + tuple : 'org.apache.cassandra.db.marshal.TupleType', + frozen : 'org.apache.cassandra.db.marshal.FrozenType', + reversed : 'org.apache.cassandra.db.marshal.ReversedType', + composite : 'org.apache.cassandra.db.marshal.CompositeType', + empty : 'org.apache.cassandra.db.marshal.EmptyType', + collection: 'org.apache.cassandra.db.marshal.ColumnToCollectionType' +}); +const cqlNames = Object.freeze({ + frozen: 'frozen', + list: 'list', + 'set': 'set', + map: 'map', + tuple: 'tuple', + empty: 'empty', + duration: 'duration' +}); +const singleTypeNames = Object.freeze({ + 'org.apache.cassandra.db.marshal.UTF8Type': dataTypes.varchar, + 'org.apache.cassandra.db.marshal.AsciiType': dataTypes.ascii, + 'org.apache.cassandra.db.marshal.UUIDType': dataTypes.uuid, + 'org.apache.cassandra.db.marshal.TimeUUIDType': dataTypes.timeuuid, + 'org.apache.cassandra.db.marshal.Int32Type': dataTypes.int, + 'org.apache.cassandra.db.marshal.BytesType': dataTypes.blob, + 'org.apache.cassandra.db.marshal.FloatType': dataTypes.float, + 'org.apache.cassandra.db.marshal.DoubleType': dataTypes.double, + 'org.apache.cassandra.db.marshal.BooleanType': dataTypes.boolean, + 'org.apache.cassandra.db.marshal.InetAddressType': dataTypes.inet, + 'org.apache.cassandra.db.marshal.SimpleDateType': dataTypes.date, + 'org.apache.cassandra.db.marshal.TimeType': dataTypes.time, + 'org.apache.cassandra.db.marshal.ShortType': dataTypes.smallint, + 'org.apache.cassandra.db.marshal.ByteType': dataTypes.tinyint, + 'org.apache.cassandra.db.marshal.DateType': dataTypes.timestamp, + 'org.apache.cassandra.db.marshal.TimestampType': dataTypes.timestamp, + 'org.apache.cassandra.db.marshal.LongType': dataTypes.bigint, + 'org.apache.cassandra.db.marshal.DecimalType': dataTypes.decimal, + 'org.apache.cassandra.db.marshal.IntegerType': dataTypes.varint, + 'org.apache.cassandra.db.marshal.CounterColumnType': dataTypes.counter +}); +const singleFqTypeNamesLength = Object.keys(singleTypeNames).reduce(function (previous, current) { + return current.length > previous ? current.length : previous; +}, 0); + +const customTypeNames = Object.freeze({ + duration: 'org.apache.cassandra.db.marshal.DurationType', + lineString: 'org.apache.cassandra.db.marshal.LineStringType', + point: 'org.apache.cassandra.db.marshal.PointType', + polygon: 'org.apache.cassandra.db.marshal.PolygonType', + dateRange: 'org.apache.cassandra.db.marshal.DateRangeType' +}); + +const nullValueBuffer = utils.allocBufferFromArray([255, 255, 255, 255]); +const unsetValueBuffer = utils.allocBufferFromArray([255, 255, 255, 254]); + +/** + * For backwards compatibility, empty buffers as text/blob/custom values are supported. + * In the case of other types, they are going to be decoded as a null value. + * @private + * @type {Set} + */ +const zeroLengthTypesSupported = new Set([ + dataTypes.text, + dataTypes.ascii, + dataTypes.varchar, + dataTypes.custom, + dataTypes.blob +]); + +/** + * Serializes and deserializes to and from a CQL type and a Javascript Type. + * @param {Number} protocolVersion + * @param {ClientOptions} options + * @constructor + */ +function Encoder(protocolVersion, options) { + this.encodingOptions = options.encoding || utils.emptyObject; + defineInstanceMembers.call(this); + this.setProtocolVersion(protocolVersion); + setEncoders.call(this); + if (this.encodingOptions.copyBuffer) { + this.handleBuffer = handleBufferCopy; + } + else { + this.handleBuffer = handleBufferRef; + } +} + +/** + * Declares the privileged instance members. + * @private + */ +function defineInstanceMembers() { + /** + * Sets the protocol version and the encoding/decoding methods depending on the protocol version + * @param {Number} value + * @ignore + * @internal + */ + this.setProtocolVersion = function (value) { + this.protocolVersion = value; + //Set the collection serialization based on the protocol version + this.decodeCollectionLength = decodeCollectionLengthV3; + this.getLengthBuffer = getLengthBufferV3; + this.collectionLengthSize = 4; + if (!types.protocolVersion.uses4BytesCollectionLength(this.protocolVersion)) { + this.decodeCollectionLength = decodeCollectionLengthV2; + this.getLengthBuffer = getLengthBufferV2; + this.collectionLengthSize = 2; + } + }; + + const customDecoders = { + [customTypeNames.duration]: decodeDuration, + [customTypeNames.lineString]: decodeLineString, + [customTypeNames.point]: decodePoint, + [customTypeNames.polygon]: decodePolygon, + [customTypeNames.dateRange]: decodeDateRange + }; + + const customEncoders = { + [customTypeNames.duration]: encodeDuration, + [customTypeNames.lineString]: encodeLineString, + [customTypeNames.point]: encodePoint, + [customTypeNames.polygon]: encodePolygon, + [customTypeNames.dateRange]: encodeDateRange + }; + + // Decoding methods + this.decodeBlob = function (bytes) { + return this.handleBuffer(bytes); + }; + this.decodeCustom = function (bytes, typeName) { + const handler = customDecoders[typeName]; + if (handler) { + return handler.call(this, bytes); + } + return this.handleBuffer(bytes); + }; + this.decodeUtf8String = function (bytes) { + return bytes.toString('utf8'); + }; + this.decodeAsciiString = function (bytes) { + return bytes.toString('ascii'); + }; + this.decodeBoolean = function (bytes) { + return !!bytes.readUInt8(0); + }; + this.decodeDouble = function (bytes) { + return bytes.readDoubleBE(0); + }; + this.decodeFloat = function (bytes) { + return bytes.readFloatBE(0); + }; + this.decodeInt = function (bytes) { + return bytes.readInt32BE(0); + }; + this.decodeSmallint = function (bytes) { + return bytes.readInt16BE(0); + }; + this.decodeTinyint = function (bytes) { + return bytes.readInt8(0); + }; + + this._decodeCqlLongAsLong = function (bytes) { + return Long.fromBuffer(bytes); + }; + + this._decodeCqlLongAsBigInt = function (bytes) { + return BigInt.asIntN(64, (BigInt(bytes.readUInt32BE(0)) << bigInt32) | BigInt(bytes.readUInt32BE(4))); + }; + + this.decodeLong = this.encodingOptions.useBigIntAsLong + ? this._decodeCqlLongAsBigInt + : this._decodeCqlLongAsLong; + + this._decodeVarintAsInteger = function (bytes) { + return Integer.fromBuffer(bytes); + }; + + this._decodeVarintAsBigInt = function decodeVarintAsBigInt(bytes) { + let result = bigInt0; + if (bytes[0] <= 0x7f) { + for (let i = 0; i < bytes.length; i++) { + const b = BigInt(bytes[bytes.length - 1 - i]); + result = result | (b << BigInt(i * 8)); + } + } else { + for (let i = 0; i < bytes.length; i++) { + const b = BigInt(bytes[bytes.length - 1 - i]); + result = result | ((~b & bigInt8BitsOn) << BigInt(i * 8)); + } + result = ~result; + } + + return result; + }; + + this.decodeVarint = this.encodingOptions.useBigIntAsVarint + ? this._decodeVarintAsBigInt + : this._decodeVarintAsInteger; + + this.decodeDecimal = function(bytes) { + return BigDecimal.fromBuffer(bytes); + }; + this.decodeTimestamp = function(bytes) { + return new Date(this._decodeCqlLongAsLong(bytes).toNumber()); + }; + this.decodeDate = function (bytes) { + return types.LocalDate.fromBuffer(bytes); + }; + this.decodeTime = function (bytes) { + return types.LocalTime.fromBuffer(bytes); + }; + /* + * Reads a list from bytes + */ + this.decodeList = function (bytes, subtype) { + const totalItems = this.decodeCollectionLength(bytes, 0); + let offset = this.collectionLengthSize; + const list = new Array(totalItems); + for (let i = 0; i < totalItems; i++) { + //bytes length of the item + const length = this.decodeCollectionLength(bytes, offset); + offset += this.collectionLengthSize; + //slice it + list[i] = this.decode(bytes.slice(offset, offset+length), subtype); + offset += length; + } + return list; + }; + /* + * Reads a Set from bytes + */ + this.decodeSet = function (bytes, subtype) { + const arr = this.decodeList(bytes, subtype); + if (this.encodingOptions.set) { + const setConstructor = this.encodingOptions.set; + return new setConstructor(arr); + } + return arr; + }; + /* + * Reads a map (key / value) from bytes + */ + this.decodeMap = function (bytes, subtypes) { + let map; + const totalItems = this.decodeCollectionLength(bytes, 0); + let offset = this.collectionLengthSize; + const self = this; + function readValues(callback, thisArg) { + for (let i = 0; i < totalItems; i++) { + const keyLength = self.decodeCollectionLength(bytes, offset); + offset += self.collectionLengthSize; + const key = self.decode(bytes.slice(offset, offset + keyLength), subtypes[0]); + offset += keyLength; + const valueLength = self.decodeCollectionLength(bytes, offset); + offset += self.collectionLengthSize; + if (valueLength < 0) { + callback.call(thisArg, key, null); + continue; + } + const value = self.decode(bytes.slice(offset, offset + valueLength), subtypes[1]); + offset += valueLength; + callback.call(thisArg, key, value); + } + } + if (this.encodingOptions.map) { + const mapConstructor = this.encodingOptions.map; + map = new mapConstructor(); + readValues(map.set, map); + } + else { + map = {}; + readValues(function (key, value) { + map[key] = value; + }); + } + return map; + }; + this.decodeUuid = function (bytes) { + return new types.Uuid(this.handleBuffer(bytes)); + }; + this.decodeTimeUuid = function (bytes) { + return new types.TimeUuid(this.handleBuffer(bytes)); + }; + this.decodeInet = function (bytes) { + return new types.InetAddress(this.handleBuffer(bytes)); + }; + /** + * Decodes a user defined type into an object + * @param {Buffer} bytes + * @param {{fields: Array}} udtInfo + * @private + */ + this.decodeUdt = function (bytes, udtInfo) { + const result = {}; + let offset = 0; + for (let i = 0; i < udtInfo.fields.length && offset < bytes.length; i++) { + //bytes length of the field value + const length = bytes.readInt32BE(offset); + offset += 4; + //slice it + const field = udtInfo.fields[i]; + if (length < 0) { + result[field.name] = null; + continue; + } + result[field.name] = this.decode(bytes.slice(offset, offset+length), field.type); + offset += length; + } + return result; + }; + + this.decodeTuple = function (bytes, tupleInfo) { + const elements = new Array(tupleInfo.length); + let offset = 0; + + for (let i = 0; i < tupleInfo.length && offset < bytes.length; i++) { + const length = bytes.readInt32BE(offset); + offset += 4; + + if (length < 0) { + elements[i] = null; + continue; + } + + elements[i] = this.decode(bytes.slice(offset, offset+length), tupleInfo[i]); + offset += length; + } + + return types.Tuple.fromArray(elements); + }; + + //Encoding methods + this.encodeFloat = function (value) { + if (typeof value === 'string') { + // All numeric types are supported as strings for historical reasons + value = parseFloat(value); + + if (Number.isNaN(value)) { + throw new TypeError(`Expected string representation of a number, obtained ${util.inspect(value)}`); + } + } + + if (typeof value !== 'number') { + throw new TypeError('Expected Number, obtained ' + util.inspect(value)); + } + + const buf = utils.allocBufferUnsafe(4); + buf.writeFloatBE(value, 0); + return buf; + }; + + this.encodeDouble = function (value) { + if (typeof value === 'string') { + // All numeric types are supported as strings for historical reasons + value = parseFloat(value); + + if (Number.isNaN(value)) { + throw new TypeError(`Expected string representation of a number, obtained ${util.inspect(value)}`); + } + } + + if (typeof value !== 'number') { + throw new TypeError('Expected Number, obtained ' + util.inspect(value)); + } + + const buf = utils.allocBufferUnsafe(8); + buf.writeDoubleBE(value, 0); + return buf; + }; + + /** + * @param {Date|String|Long|Number} value + * @private + */ + this.encodeTimestamp = function (value) { + const originalValue = value; + if (typeof value === 'string') { + value = new Date(value); + } + if (value instanceof Date) { + //milliseconds since epoch + value = value.getTime(); + if (isNaN(value)) { + throw new TypeError('Invalid date: ' + originalValue); + } + } + if (this.encodingOptions.useBigIntAsLong) { + value = BigInt(value); + } + return this.encodeLong(value); + }; + /** + * @param {Date|String|LocalDate} value + * @returns {Buffer} + * @throws {TypeError} + * @private + */ + this.encodeDate = function (value) { + const originalValue = value; + try { + if (typeof value === 'string') { + value = types.LocalDate.fromString(value); + } + if (value instanceof Date) { + value = types.LocalDate.fromDate(value); + } + } + catch (err) { + //Wrap into a TypeError + throw new TypeError('LocalDate could not be parsed ' + err); + } + if (!(value instanceof types.LocalDate)) { + throw new TypeError('Expected Date/String/LocalDate, obtained ' + util.inspect(originalValue)); + } + return value.toBuffer(); + }; + /** + * @param {String|LocalDate} value + * @returns {Buffer} + * @throws {TypeError} + * @private + */ + this.encodeTime = function (value) { + const originalValue = value; + try { + if (typeof value === 'string') { + value = types.LocalTime.fromString(value); + } + } + catch (err) { + //Wrap into a TypeError + throw new TypeError('LocalTime could not be parsed ' + err); + } + if (!(value instanceof types.LocalTime)) { + throw new TypeError('Expected String/LocalTime, obtained ' + util.inspect(originalValue)); + } + return value.toBuffer(); + }; + /** + * @param {Uuid|String|Buffer} value + * @private + */ + this.encodeUuid = function (value) { + if (typeof value === 'string') { + try { + value = types.Uuid.fromString(value).getBuffer(); + } + catch (err) { + throw new TypeError(err.message); + } + } else if (value instanceof types.Uuid) { + value = value.getBuffer(); + } else { + throw new TypeError('Not a valid Uuid, expected Uuid/String/Buffer, obtained ' + util.inspect(value)); + } + + return value; + }; + /** + * @param {String|InetAddress|Buffer} value + * @returns {Buffer} + * @private + */ + this.encodeInet = function (value) { + if (typeof value === 'string') { + value = types.InetAddress.fromString(value); + } + if (value instanceof types.InetAddress) { + value = value.getBuffer(); + } + if (!(value instanceof Buffer)) { + throw new TypeError('Not a valid Inet, expected InetAddress/Buffer, obtained ' + util.inspect(value)); + } + return value; + }; + + /** + * @param {Long|Buffer|String|Number} value + * @private + */ + this._encodeBigIntFromLong = function (value) { + if (typeof value === 'number') { + value = Long.fromNumber(value); + } else if (typeof value === 'string') { + value = Long.fromString(value); + } + + let buf = null; + + if (value instanceof Long) { + buf = Long.toBuffer(value); + } else if (value instanceof MutableLong) { + buf = Long.toBuffer(value.toImmutable()); + } + + if (buf === null) { + throw new TypeError('Not a valid bigint, expected Long/Number/String/Buffer, obtained ' + util.inspect(value)); + } + + return buf; + }; + + this._encodeBigIntFromBigInt = function (value) { + if (typeof value === 'string') { + // All numeric types are supported as strings for historical reasons + value = BigInt(value); + } + + // eslint-disable-next-line valid-typeof + if (typeof value !== 'bigint') { + // Only BigInt values are supported + throw new TypeError('Not a valid BigInt value, obtained ' + util.inspect(value)); + } + + const buffer = utils.allocBufferUnsafe(8); + buffer.writeUInt32BE(Number(value >> bigInt32) >>> 0, 0); + buffer.writeUInt32BE(Number(value & bigInt32BitsOn), 4); + return buffer; + }; + + this.encodeLong = this.encodingOptions.useBigIntAsLong + ? this._encodeBigIntFromBigInt + : this._encodeBigIntFromLong; + + /** + * @param {Integer|Buffer|String|Number} value + * @returns {Buffer} + * @private + */ + this._encodeVarintFromInteger = function (value) { + if (typeof value === 'number') { + value = Integer.fromNumber(value); + } + if (typeof value === 'string') { + value = Integer.fromString(value); + } + let buf = null; + if (value instanceof Buffer) { + buf = value; + } + if (value instanceof Integer) { + buf = Integer.toBuffer(value); + } + if (buf === null) { + throw new TypeError('Not a valid varint, expected Integer/Number/String/Buffer, obtained ' + util.inspect(value)); + } + return buf; + }; + + this._encodeVarintFromBigInt = function (value) { + if (typeof value === 'string') { + // All numeric types are supported as strings for historical reasons + value = BigInt(value); + } + + // eslint-disable-next-line valid-typeof + if (typeof value !== 'bigint') { + throw new TypeError('Not a valid varint, expected BigInt, obtained ' + util.inspect(value)); + } + + if (value === bigInt0) { + return buffers.int8Zero; + + } + else if (value === bigIntMinus1) { + return buffers.int8MaxValue; + } + + const parts = []; + + if (value > bigInt0){ + while (value !== bigInt0) { + parts.unshift(Number(value & bigInt8BitsOn)); + value = value >> bigInt8; + } + + if (parts[0] > 0x7f) { + // Positive value needs a padding + parts.unshift(0); + } + } else { + while (value !== bigIntMinus1) { + parts.unshift(Number(value & bigInt8BitsOn)); + value = value >> bigInt8; + } + + if (parts[0] <= 0x7f) { + // Negative value needs a padding + parts.unshift(0xff); + } + } + + return utils.allocBufferFromArray(parts); + }; + + this.encodeVarint = this.encodingOptions.useBigIntAsVarint + ? this._encodeVarintFromBigInt + : this._encodeVarintFromInteger; + + /** + * @param {BigDecimal|Buffer|String|Number} value + * @returns {Buffer} + * @private + */ + this.encodeDecimal = function (value) { + if (typeof value === 'number') { + value = BigDecimal.fromNumber(value); + } else if (typeof value === 'string') { + value = BigDecimal.fromString(value); + } + + let buf = null; + + if (value instanceof BigDecimal) { + buf = BigDecimal.toBuffer(value); + } else { + throw new TypeError('Not a valid varint, expected BigDecimal/Number/String/Buffer, obtained ' + util.inspect(value)); + } + + return buf; + }; + this.encodeString = function (value, encoding) { + if (typeof value !== 'string') { + throw new TypeError('Not a valid text value, expected String obtained ' + util.inspect(value)); + } + return utils.allocBufferFromString(value, encoding); + }; + this.encodeUtf8String = function (value) { + return this.encodeString(value, 'utf8'); + }; + this.encodeAsciiString = function (value) { + return this.encodeString(value, 'ascii'); + }; + this.encodeBlob = function (value) { + if (!(value instanceof Buffer)) { + throw new TypeError('Not a valid blob, expected Buffer obtained ' + util.inspect(value)); + } + return value; + }; + this.encodeCustom = function (value, name) { + const handler = customEncoders[name]; + if (handler) { + return handler.call(this, value); + } + throw new TypeError('No encoding handler found for type ' + name); + }; + /** + * @param {Boolean} value + * @returns {Buffer} + * @private + */ + this.encodeBoolean = function (value) { + return value ? buffers.int8One : buffers.int8Zero; + }; + /** + * @param {Number|String} value + * @private + */ + this.encodeInt = function (value) { + if (isNaN(value)) { + throw new TypeError('Expected Number, obtained ' + util.inspect(value)); + } + const buf = utils.allocBufferUnsafe(4); + buf.writeInt32BE(value, 0); + return buf; + }; + /** + * @param {Number|String} value + * @private + */ + this.encodeSmallint = function (value) { + if (isNaN(value)) { + throw new TypeError('Expected Number, obtained ' + util.inspect(value)); + } + const buf = utils.allocBufferUnsafe(2); + buf.writeInt16BE(value, 0); + return buf; + }; + /** + * @param {Number|String} value + * @private + */ + this.encodeTinyint = function (value) { + if (isNaN(value)) { + throw new TypeError('Expected Number, obtained ' + util.inspect(value)); + } + const buf = utils.allocBufferUnsafe(1); + buf.writeInt8(value, 0); + return buf; + }; + this.encodeList = function (value, subtype) { + if (!Array.isArray(value)) { + throw new TypeError('Not a valid list value, expected Array obtained ' + util.inspect(value)); + } + if (value.length === 0) { + return null; + } + const parts = []; + parts.push(this.getLengthBuffer(value)); + for (let i = 0;i < value.length;i++) { + const val = value[i]; + if (val === null || typeof val === 'undefined' || val === types.unset) { + throw new TypeError('A collection can\'t contain null or unset values'); + } + const bytes = this.encode(val, subtype); + //include item byte length + parts.push(this.getLengthBuffer(bytes)); + //include item + parts.push(bytes); + } + return Buffer.concat(parts); + }; + this.encodeSet = function (value, subtype) { + if (this.encodingOptions.set && value instanceof this.encodingOptions.set) { + const arr = []; + value.forEach(function (x) { + arr.push(x); + }); + return this.encodeList(arr, subtype); + } + return this.encodeList(value, subtype); + }; + /** + * Serializes a map into a Buffer + * @param value + * @param {Array} [subtypes] + * @returns {Buffer} + * @private + */ + this.encodeMap = function (value, subtypes) { + const parts = []; + let propCounter = 0; + let keySubtype = null; + let valueSubtype = null; + const self = this; + if (subtypes) { + keySubtype = subtypes[0]; + valueSubtype = subtypes[1]; + } + function addItem(val, key) { + if (key === null || typeof key === 'undefined' || key === types.unset) { + throw new TypeError('A map can\'t contain null or unset keys'); + } + if (val === null || typeof val === 'undefined' || val === types.unset) { + throw new TypeError('A map can\'t contain null or unset values'); + } + const keyBuffer = self.encode(key, keySubtype); + //include item byte length + parts.push(self.getLengthBuffer(keyBuffer)); + //include item + parts.push(keyBuffer); + //value + const valueBuffer = self.encode(val, valueSubtype); + //include item byte length + parts.push(self.getLengthBuffer(valueBuffer)); + //include item + if (valueBuffer !== null) { + parts.push(valueBuffer); + } + propCounter++; + } + if (this.encodingOptions.map && value instanceof this.encodingOptions.map) { + //Use Map#forEach() method to iterate + value.forEach(addItem); + } + else { + //Use object + for (const key in value) { + if (!value.hasOwnProperty(key)) { + continue; + } + const val = value[key]; + addItem(val, key); + } + } + + parts.unshift(this.getLengthBuffer(propCounter)); + return Buffer.concat(parts); + }; + this.encodeUdt = function (value, udtInfo) { + const parts = []; + let totalLength = 0; + for (let i = 0; i < udtInfo.fields.length; i++) { + const field = udtInfo.fields[i]; + const item = this.encode(value[field.name], field.type); + if (!item) { + parts.push(nullValueBuffer); + totalLength += 4; + continue; + } + if (item === types.unset) { + parts.push(unsetValueBuffer); + totalLength += 4; + continue; + } + const lengthBuffer = utils.allocBufferUnsafe(4); + lengthBuffer.writeInt32BE(item.length, 0); + parts.push(lengthBuffer); + parts.push(item); + totalLength += item.length + 4; + } + return Buffer.concat(parts, totalLength); + }; + this.encodeTuple = function (value, tupleInfo) { + const parts = []; + let totalLength = 0; + const length = Math.min(tupleInfo.length, value.length); + + for (let i = 0; i < length; i++) { + const type = tupleInfo[i]; + const item = this.encode(value.get(i), type); + + if (!item) { + parts.push(nullValueBuffer); + totalLength += 4; + continue; + } + + if (item === types.unset) { + parts.push(unsetValueBuffer); + totalLength += 4; + continue; + } + + const lengthBuffer = utils.allocBufferUnsafe(4); + lengthBuffer.writeInt32BE(item.length, 0); + parts.push(lengthBuffer); + parts.push(item); + totalLength += item.length + 4; + } + + return Buffer.concat(parts, totalLength); + }; + + /** + * If not provided, it uses the array of buffers or the parameters and hints to build the routingKey + * @param {Array} params + * @param {ExecutionOptions} execOptions + * @param [keys] parameter keys and positions in the params array + * @throws TypeError + * @internal + * @ignore + */ + this.setRoutingKeyFromUser = function (params, execOptions, keys) { + let totalLength = 0; + const userRoutingKey = execOptions.getRoutingKey(); + if (Array.isArray(userRoutingKey)) { + if (userRoutingKey.length === 1) { + execOptions.setRoutingKey(userRoutingKey[0]); + return; + } + + // Its a composite routing key + totalLength = 0; + for (let i = 0; i < userRoutingKey.length; i++) { + const item = userRoutingKey[i]; + if (!item) { + // Invalid routing key part provided by the user, clear the value + execOptions.setRoutingKey(null); + return; + } + totalLength += item.length + 3; + } + + execOptions.setRoutingKey(concatRoutingKey(userRoutingKey, totalLength)); + return; + } + // If routingKey is present, ensure it is a Buffer, Token, or TokenRange. Otherwise throw an error. + if (userRoutingKey) { + if (userRoutingKey instanceof Buffer || userRoutingKey instanceof token.Token + || userRoutingKey instanceof token.TokenRange) { + return; + } + + throw new TypeError(`Unexpected routingKey '${util.inspect(userRoutingKey)}' provided. ` + + `Expected Buffer, Array, Token, or TokenRange.`); + } + + // If no params are present, return as routing key cannot be determined. + if (!params || params.length === 0) { + return; + } + + let routingIndexes = execOptions.getRoutingIndexes(); + if (execOptions.getRoutingNames()) { + routingIndexes = execOptions.getRoutingNames().map(k => keys[k]); + } + if (!routingIndexes) { + return; + } + + const parts = []; + const hints = execOptions.getHints() || utils.emptyArray; + + const encodeParam = !keys ? + (i => this.encode(params[i], hints[i])) : + (i => this.encode(params[i].value, hints[i])); + + try { + totalLength = this._encodeRoutingKeyParts(parts, routingIndexes, encodeParam); + } catch (e) { + // There was an error encoding a parameter that is part of the routing key, + // ignore now to fail afterwards + } + + if (totalLength === 0) { + return; + } + + execOptions.setRoutingKey(concatRoutingKey(parts, totalLength)); + }; + + /** + * Sets the routing key in the options based on the prepared statement metadata. + * @param {Object} meta Prepared metadata + * @param {Array} params Array of parameters + * @param {ExecutionOptions} execOptions + * @throws TypeError + * @internal + * @ignore + */ + this.setRoutingKeyFromMeta = function (meta, params, execOptions) { + const routingIndexes = execOptions.getRoutingIndexes(); + if (!routingIndexes) { + return; + } + const parts = new Array(routingIndexes.length); + const encodeParam = i => { + const columnInfo = meta.columns[i]; + return this.encode(params[i], columnInfo ? columnInfo.type : null); + }; + + let totalLength = 0; + + try { + totalLength = this._encodeRoutingKeyParts(parts, routingIndexes, encodeParam); + } catch (e) { + // There was an error encoding a parameter that is part of the routing key, + // ignore now to fail afterwards + } + + if (totalLength === 0) { + return; + } + + execOptions.setRoutingKey(concatRoutingKey(parts, totalLength)); + }; + + /** + * @param {Array} parts + * @param {Array} routingIndexes + * @param {Function} encodeParam + * @returns {Number} The total length + * @private + */ + this._encodeRoutingKeyParts = function (parts, routingIndexes, encodeParam) { + let totalLength = 0; + for (let i = 0; i < routingIndexes.length; i++) { + const paramIndex = routingIndexes[i]; + if (paramIndex === undefined) { + // Bad input from the user, ignore + return 0; + } + + const item = encodeParam(paramIndex); + if (item === null || item === undefined || item === types.unset) { + // The encoded partition key should an instance of Buffer + // Let it fail later in the pipeline for null/undefined parameter values + return 0; + } + + // Per each part of the routing key, 3 extra bytes are needed + totalLength += item.length + 3; + parts[i] = item; + } + return totalLength; + }; + + /** + * Parses a CQL name string into data type information + * @param {String} keyspace + * @param {String} typeName + * @param {Number} startIndex + * @param {Number|null} length + * @param {Function} udtResolver + * @returns {Promise<{err, info, options}>} callback Callback invoked with err and {{code: number, info: Object|Array|null, options: {frozen: Boolean}}} + * @internal + * @ignore + */ + this.parseTypeName = async function (keyspace, typeName, startIndex, length, udtResolver) { + startIndex = startIndex || 0; + if (!length) { + length = typeName.length; + } + + const dataType = { + code: 0, + info: null, + options: { + frozen: false + } + }; + + let innerTypes; + + if (typeName.indexOf("'", startIndex) === startIndex) { + //If quoted, this is a custom type. + dataType.info = typeName.substr(startIndex+1, length-2); + return dataType; + } + + if (!length) { + length = typeName.length; + } + + if (typeName.indexOf(cqlNames.frozen, startIndex) === startIndex) { + //Remove the frozen token + startIndex += cqlNames.frozen.length + 1; + length -= cqlNames.frozen.length + 2; + dataType.options.frozen = true; + } + + if (typeName.indexOf(cqlNames.list, startIndex) === startIndex) { + //move cursor across the name and bypass the angle brackets + startIndex += cqlNames.list.length + 1; + length -= cqlNames.list.length + 2; + innerTypes = parseParams(typeName, startIndex, length, '<', '>'); + + if (innerTypes.length !== 1) { + throw new TypeError('Not a valid type ' + typeName); + } + + dataType.code = dataTypes.list; + dataType.info = await this.parseTypeName(keyspace, innerTypes[0], 0, null, udtResolver); + return dataType; + } + + if (typeName.indexOf(cqlNames.set, startIndex) === startIndex) { + //move cursor across the name and bypass the angle brackets + startIndex += cqlNames.set.length + 1; + length -= cqlNames.set.length + 2; + innerTypes = parseParams(typeName, startIndex, length, '<', '>'); + + if (innerTypes.length !== 1) { + throw new TypeError('Not a valid type ' + typeName); + } + + dataType.code = dataTypes.set; + dataType.info = await this.parseTypeName(keyspace, innerTypes[0], 0, null, udtResolver); + return dataType; + } + + if (typeName.indexOf(cqlNames.map, startIndex) === startIndex) { + //move cursor across the name and bypass the angle brackets + startIndex += cqlNames.map.length + 1; + length -= cqlNames.map.length + 2; + innerTypes = parseParams(typeName, startIndex, length, '<', '>'); + + //It should contain the key and value types + if (innerTypes.length !== 2) { + throw new TypeError('Not a valid type ' + typeName); + } + + dataType.code = dataTypes.map; + dataType.info = await this._parseChildTypes(keyspace, innerTypes, udtResolver); + return dataType; + } + + if (typeName.indexOf(cqlNames.tuple, startIndex) === startIndex) { + //move cursor across the name and bypass the angle brackets + startIndex += cqlNames.tuple.length + 1; + length -= cqlNames.tuple.length + 2; + innerTypes = parseParams(typeName, startIndex, length, '<', '>'); + + if (innerTypes.length < 1) { + throw new TypeError('Not a valid type ' + typeName); + } + + dataType.code = dataTypes.tuple; + dataType.info = await this._parseChildTypes(keyspace, innerTypes, udtResolver); + return dataType; + } + + const quoted = typeName.indexOf('"', startIndex) === startIndex; + if (quoted) { + // Remove quotes + startIndex++; + length -= 2; + } + + // Quick check if its a single type + if (startIndex > 0) { + typeName = typeName.substr(startIndex, length); + } + + // Un-escape double quotes if quoted. + if (quoted) { + typeName = typeName.replace('""', '"'); + } + + const typeCode = dataTypes[typeName]; + if (typeof typeCode === 'number') { + dataType.code = typeCode; + return dataType; + } + + if (typeName === cqlNames.duration) { + dataType.info = customTypeNames.duration; + return dataType; + } + + if (typeName === cqlNames.empty) { + // Set as custom + dataType.info = 'empty'; + return dataType; + } + + const udtInfo = await udtResolver(keyspace, typeName); + if (udtInfo) { + dataType.code = dataTypes.udt; + dataType.info = udtInfo; + return dataType; + } + + throw new TypeError('Not a valid type "' + typeName + '"'); + }; + + /** + * @param {String} keyspace + * @param {Array} typeNames + * @param {Function} udtResolver + * @returns {Promise} + * @private + */ + this._parseChildTypes = function (keyspace, typeNames, udtResolver) { + return Promise.all(typeNames.map(name => this.parseTypeName(keyspace, name.trim(), 0, null, udtResolver))); + }; + + /** + * Parses a Cassandra fully-qualified class name string into data type information + * @param {String} typeName + * @param {Number} [startIndex] + * @param {Number} [length] + * @throws TypeError + * @returns {{code: number, info: Object|Array|null, options: {frozen: Boolean, reversed: Boolean}}} + * @internal + * @ignore + */ + this.parseFqTypeName = function (typeName, startIndex, length) { + const dataType = { + code: 0, + info: null, + options: { + reversed: false, + frozen: false + } + }; + startIndex = startIndex || 0; + let innerTypes; + if (!length) { + length = typeName.length; + } + if (length > complexTypeNames.reversed.length && typeName.indexOf(complexTypeNames.reversed) === startIndex) { + //Remove the reversed token + startIndex += complexTypeNames.reversed.length + 1; + length -= complexTypeNames.reversed.length + 2; + dataType.options.reversed = true; + } + if (length > complexTypeNames.frozen.length && + typeName.indexOf(complexTypeNames.frozen, startIndex) === startIndex) { + //Remove the frozen token + startIndex += complexTypeNames.frozen.length + 1; + length -= complexTypeNames.frozen.length + 2; + dataType.options.frozen = true; + } + if (typeName === complexTypeNames.empty) { + //set as custom + dataType.info = 'empty'; + return dataType; + } + //Quick check if its a single type + if (length <= singleFqTypeNamesLength) { + if (startIndex > 0) { + typeName = typeName.substr(startIndex, length); + } + const typeCode = singleTypeNames[typeName]; + if (typeof typeCode === 'number') { + dataType.code = typeCode; + return dataType; + } + throw new TypeError('Not a valid type "' + typeName + '"'); + } + if (typeName.indexOf(complexTypeNames.list, startIndex) === startIndex) { + //Its a list + //org.apache.cassandra.db.marshal.ListType(innerType) + //move cursor across the name and bypass the parenthesis + startIndex += complexTypeNames.list.length + 1; + length -= complexTypeNames.list.length + 2; + innerTypes = parseParams(typeName, startIndex, length); + if (innerTypes.length !== 1) { + throw new TypeError('Not a valid type ' + typeName); + } + dataType.code = dataTypes.list; + dataType.info = this.parseFqTypeName(innerTypes[0]); + return dataType; + } + if (typeName.indexOf(complexTypeNames.set, startIndex) === startIndex) { + //Its a set + //org.apache.cassandra.db.marshal.SetType(innerType) + //move cursor across the name and bypass the parenthesis + startIndex += complexTypeNames.set.length + 1; + length -= complexTypeNames.set.length + 2; + innerTypes = parseParams(typeName, startIndex, length); + if (innerTypes.length !== 1) + { + throw new TypeError('Not a valid type ' + typeName); + } + dataType.code = dataTypes.set; + dataType.info = this.parseFqTypeName(innerTypes[0]); + return dataType; + } + if (typeName.indexOf(complexTypeNames.map, startIndex) === startIndex) { + //org.apache.cassandra.db.marshal.MapType(keyType,valueType) + //move cursor across the name and bypass the parenthesis + startIndex += complexTypeNames.map.length + 1; + length -= complexTypeNames.map.length + 2; + innerTypes = parseParams(typeName, startIndex, length); + //It should contain the key and value types + if (innerTypes.length !== 2) { + throw new TypeError('Not a valid type ' + typeName); + } + dataType.code = dataTypes.map; + dataType.info = [this.parseFqTypeName(innerTypes[0]), this.parseFqTypeName(innerTypes[1])]; + return dataType; + } + if (typeName.indexOf(complexTypeNames.udt, startIndex) === startIndex) { + //move cursor across the name and bypass the parenthesis + startIndex += complexTypeNames.udt.length + 1; + length -= complexTypeNames.udt.length + 2; + return this._parseUdtName(typeName, startIndex, length); + } + if (typeName.indexOf(complexTypeNames.tuple, startIndex) === startIndex) { + //move cursor across the name and bypass the parenthesis + startIndex += complexTypeNames.tuple.length + 1; + length -= complexTypeNames.tuple.length + 2; + innerTypes = parseParams(typeName, startIndex, length); + if (innerTypes.length < 1) { + throw new TypeError('Not a valid type ' + typeName); + } + dataType.code = dataTypes.tuple; + dataType.info = innerTypes.map(x => this.parseFqTypeName(x)); + return dataType; + } + + // Assume custom type if cannot be parsed up to this point. + dataType.info = typeName.substr(startIndex, length); + return dataType; + }; + /** + * Parses type names with composites + * @param {String} typesString + * @returns {{types: Array, isComposite: Boolean, hasCollections: Boolean}} + * @internal + * @ignore + */ + this.parseKeyTypes = function (typesString) { + let i = 0; + let length = typesString.length; + const isComposite = typesString.indexOf(complexTypeNames.composite) === 0; + if (isComposite) { + i = complexTypeNames.composite.length + 1; + length--; + } + const types = []; + let startIndex = i; + let nested = 0; + let inCollectionType = false; + let hasCollections = false; + //as collection types are not allowed, it is safe to split by , + while (++i < length) { + switch (typesString[i]) { + case ',': + if (nested > 0) { + break; + } + if (inCollectionType) { + //remove type id + startIndex = typesString.indexOf(':', startIndex) + 1; + } + types.push(typesString.substring(startIndex, i)); + startIndex = i + 1; + break; + case '(': + if (nested === 0 && typesString.indexOf(complexTypeNames.collection, startIndex) === startIndex) { + inCollectionType = true; + hasCollections = true; + //skip collection type + i++; + startIndex = i; + break; + } + nested++; + break; + case ')': + if (inCollectionType && nested === 0){ + types.push(typesString.substring(typesString.indexOf(':', startIndex) + 1, i)); + startIndex = i + 1; + break; + } + nested--; + break; + } + } + if (startIndex < length) { + types.push(typesString.substring(startIndex, length)); + } + return { + types: types.map(name => this.parseFqTypeName(name)), + hasCollections: hasCollections, + isComposite: isComposite + }; + }; + this._parseUdtName = function (typeName, startIndex, length) { + const udtParams = parseParams(typeName, startIndex, length); + if (udtParams.length < 2) { + //It should contain at least the keyspace, name of the udt and a type + throw new TypeError('Not a valid type ' + typeName); + } + const dataType = { + code: dataTypes.udt, + info: null + }; + const udtInfo = { + keyspace: udtParams[0], + name: utils.allocBufferFromString(udtParams[1], 'hex').toString(), + fields: [] + }; + for (let i = 2; i < udtParams.length; i++) { + const p = udtParams[i]; + const separatorIndex = p.indexOf(':'); + const fieldType = this.parseFqTypeName(p, separatorIndex + 1, p.length - (separatorIndex + 1)); + udtInfo.fields.push({ + name: utils.allocBufferFromString(p.substr(0, separatorIndex), 'hex').toString(), + type: fieldType + }); + } + dataType.info = udtInfo; + return dataType; + }; +} + +/** + * Sets the encoder and decoder methods for this instance + * @private + */ +function setEncoders() { + this.decoders = { + [dataTypes.custom]: this.decodeCustom, + [dataTypes.ascii]: this.decodeAsciiString, + [dataTypes.bigint]: this.decodeLong, + [dataTypes.blob]: this.decodeBlob, + [dataTypes.boolean]: this.decodeBoolean, + [dataTypes.counter]: this.decodeLong, + [dataTypes.decimal]: this.decodeDecimal, + [dataTypes.double]: this.decodeDouble, + [dataTypes.float]: this.decodeFloat, + [dataTypes.int]: this.decodeInt, + [dataTypes.text]: this.decodeUtf8String, + [dataTypes.timestamp]: this.decodeTimestamp, + [dataTypes.uuid]: this.decodeUuid, + [dataTypes.varchar]: this.decodeUtf8String, + [dataTypes.varint]: this.decodeVarint, + [dataTypes.timeuuid]: this.decodeTimeUuid, + [dataTypes.inet]: this.decodeInet, + [dataTypes.date]: this.decodeDate, + [dataTypes.time]: this.decodeTime, + [dataTypes.smallint]: this.decodeSmallint, + [dataTypes.tinyint]: this.decodeTinyint, + [dataTypes.duration]: decodeDuration, + [dataTypes.list]: this.decodeList, + [dataTypes.map]: this.decodeMap, + [dataTypes.set]: this.decodeSet, + [dataTypes.udt]: this.decodeUdt, + [dataTypes.tuple]: this.decodeTuple + }; + + this.encoders = { + [dataTypes.custom]: this.encodeCustom, + [dataTypes.ascii]: this.encodeAsciiString, + [dataTypes.bigint]: this.encodeLong, + [dataTypes.blob]: this.encodeBlob, + [dataTypes.boolean]: this.encodeBoolean, + [dataTypes.counter]: this.encodeLong, + [dataTypes.decimal]: this.encodeDecimal, + [dataTypes.double]: this.encodeDouble, + [dataTypes.float]: this.encodeFloat, + [dataTypes.int]: this.encodeInt, + [dataTypes.text]: this.encodeUtf8String, + [dataTypes.timestamp]: this.encodeTimestamp, + [dataTypes.uuid]: this.encodeUuid, + [dataTypes.varchar]: this.encodeUtf8String, + [dataTypes.varint]: this.encodeVarint, + [dataTypes.timeuuid]: this.encodeUuid, + [dataTypes.inet]: this.encodeInet, + [dataTypes.date]: this.encodeDate, + [dataTypes.time]: this.encodeTime, + [dataTypes.smallint]: this.encodeSmallint, + [dataTypes.tinyint]: this.encodeTinyint, + [dataTypes.duration]: encodeDuration, + [dataTypes.list]: this.encodeList, + [dataTypes.map]: this.encodeMap, + [dataTypes.set]: this.encodeSet, + [dataTypes.udt]: this.encodeUdt, + [dataTypes.tuple]: this.encodeTuple + }; +} + +/** + * Decodes Cassandra bytes into Javascript values. + *

+ * This is part of an experimental API, this can be changed future releases. + *

+ * @param {Buffer} buffer Raw buffer to be decoded. + * @param {Object} type An object containing the data type code and info. + * @param {Number} type.code Type code. + * @param {Object} [type.info] Additional information on the type for complex / nested types. + */ +Encoder.prototype.decode = function (buffer, type) { + if (buffer === null || (buffer.length === 0 && !zeroLengthTypesSupported.has(type.code))) { + return null; + } + + const decoder = this.decoders[type.code]; + + if (!decoder) { + throw new Error('Unknown data type: ' + type.code); + } + + return decoder.call(this, buffer, type.info); +}; + +/** + * Encodes Javascript types into Buffer according to the Cassandra protocol. + *

+ * This is part of an experimental API, this can be changed future releases. + *

+ * @param {*} value The value to be converted. + * @param {{code: number, info: *|Object}|String|Number} [typeInfo] The type information. + *

It can be either a:

+ *
    + *
  • A String representing the data type.
  • + *
  • A Number with one of the values of {@link module:types~dataTypes dataTypes}.
  • + *
  • An Object containing the type.code as one of the values of + * {@link module:types~dataTypes dataTypes} and type.info. + *
  • + *
+ * @returns {Buffer} + * @throws {TypeError} When there is an encoding error + */ +Encoder.prototype.encode = function (value, typeInfo) { + if (value === undefined) { + value = this.encodingOptions.useUndefinedAsUnset && this.protocolVersion >= 4 ? types.unset : null; + } + + if (value === types.unset) { + if (!types.protocolVersion.supportsUnset(this.protocolVersion)) { + throw new TypeError('Unset value can not be used for this version of Cassandra, protocol version: ' + + this.protocolVersion); + } + + return value; + } + + if (value === null || value instanceof Buffer) { + return value; + } + + /** @type {{code: Number, info: object}} */ + let type = { + code: null, + info: null + }; + + if (typeInfo) { + if (typeof typeInfo === 'number') { + type.code = typeInfo; + } + else if (typeof typeInfo === 'string') { + type = dataTypes.getByName(typeInfo); + } + if (typeof typeInfo.code === 'number') { + type.code = typeInfo.code; + type.info = typeInfo.info; + } + if (typeof type.code !== 'number') { + throw new TypeError('Type information not valid, only String and Number values are valid hints'); + } + } + else { + //Lets guess + type = Encoder.guessDataType(value); + if (!type) { + throw new TypeError('Target data type could not be guessed, you should use prepared statements for accurate type mapping. Value: ' + util.inspect(value)); + } + } + + const encoder = this.encoders[type.code]; + + if (!encoder) { + throw new Error('Type not supported ' + type.code); + } + + return encoder.call(this, value, type.info); +}; + +/** + * Try to guess the Cassandra type to be stored, based on the javascript value type + * @param value + * @returns {{code: number, info: object}|null} + * @ignore + * @internal + */ +Encoder.guessDataType = function (value) { + let code = null; + let info = null; + const esTypeName = (typeof value); + if (esTypeName === 'number') { + code = dataTypes.double; + } + else if (esTypeName === 'string') { + code = dataTypes.text; + if (value.length === 36 && uuidRegex.test(value)){ + code = dataTypes.uuid; + } + } + else if (esTypeName === 'boolean') { + code = dataTypes.boolean; + } + else if (value instanceof Buffer) { + code = dataTypes.blob; + } + else if (value instanceof Date) { + code = dataTypes.timestamp; + } + else if (value instanceof Long) { + code = dataTypes.bigint; + } + else if (value instanceof Integer) { + code = dataTypes.varint; + } + else if (value instanceof BigDecimal) { + code = dataTypes.decimal; + } + else if (value instanceof types.Uuid) { + code = dataTypes.uuid; + } + else if (value instanceof types.InetAddress) { + code = dataTypes.inet; + } + else if (value instanceof types.Tuple) { + code = dataTypes.tuple; + } + else if (value instanceof types.LocalDate) { + code = dataTypes.date; + } + else if (value instanceof types.LocalTime) { + code = dataTypes.time; + } + else if (value instanceof types.Duration) { + code = dataTypes.custom; + info = customTypeNames.duration; + } + else if (Array.isArray(value)) { + code = dataTypes.list; + } + else if (value instanceof Geometry) { + code = dataTypes.custom; + if (value instanceof LineString) { + info = customTypeNames.lineString; + } else if (value instanceof Point) { + info = customTypeNames.point; + } else if (value instanceof Polygon) { + info = customTypeNames.polygon; + } + } + else if (value instanceof DateRange) { + code = dataTypes.custom; + info = customTypeNames.dateRange; + } + + if (code === null) { + return null; + } + return { code: code, info: info }; +}; + +/** + * Gets a buffer containing with the bytes (BE) representing the collection length for protocol v2 and below + * @param {Buffer|Number} value + * @returns {Buffer} + * @private + */ +function getLengthBufferV2(value) { + if (!value) { + return buffers.int16Zero; + } + const lengthBuffer = utils.allocBufferUnsafe(2); + if (typeof value === 'number') { + lengthBuffer.writeUInt16BE(value, 0); + } + else { + lengthBuffer.writeUInt16BE(value.length, 0); + } + return lengthBuffer; +} + +/** + * Gets a buffer containing with the bytes (BE) representing the collection length for protocol v3 and above + * @param {Buffer|Number} value + * @returns {Buffer} + * @private + */ +function getLengthBufferV3(value) { + if (!value) { + return buffers.int32Zero; + } + const lengthBuffer = utils.allocBufferUnsafe(4); + if (typeof value === 'number') { + lengthBuffer.writeInt32BE(value, 0); + } + else { + lengthBuffer.writeInt32BE(value.length, 0); + } + return lengthBuffer; +} + +/** + * @param {Buffer} buffer + * @private + */ +function handleBufferCopy(buffer) { + if (buffer === null) { + return null; + } + return utils.copyBuffer(buffer); +} + +/** + * @param {Buffer} buffer + * @private + */ +function handleBufferRef(buffer) { + return buffer; +} +/** + * Decodes collection length for protocol v3 and above + * @param bytes + * @param offset + * @returns {Number} + * @private + */ +function decodeCollectionLengthV3(bytes, offset) { + return bytes.readInt32BE(offset); +} +/** + * Decodes collection length for protocol v2 and below + * @param bytes + * @param offset + * @returns {Number} + * @private + */ +function decodeCollectionLengthV2(bytes, offset) { + return bytes.readUInt16BE(offset); +} + +function decodeDuration(bytes) { + return types.Duration.fromBuffer(bytes); +} + +function encodeDuration(value) { + if (!(value instanceof types.Duration)) { + throw new TypeError('Not a valid duration, expected Duration/Buffer obtained ' + util.inspect(value)); + } + return value.toBuffer(); +} + +/** + * @private + * @param {Buffer} buffer + */ +function decodeLineString(buffer) { + return LineString.fromBuffer(buffer); +} + +/** + * @private + * @param {LineString} value + */ +function encodeLineString(value) { + return value.toBuffer(); +} + +/** + * @private + * @param {Buffer} buffer + */ +function decodePoint(buffer) { + return Point.fromBuffer(buffer); +} + +/** + * @private + * @param {LineString} value + */ +function encodePoint(value) { + return value.toBuffer(); +} + +/** + * @private + * @param {Buffer} buffer + */ +function decodePolygon(buffer) { + return Polygon.fromBuffer(buffer); +} + +/** + * @private + * @param {Polygon} value + */ +function encodePolygon(value) { + return value.toBuffer(); +} + +function decodeDateRange(buffer) { + return DateRange.fromBuffer(buffer); +} + +/** + * @private + * @param {DateRange} value + */ +function encodeDateRange(value) { + return value.toBuffer(); +} + +/** + * @param {String} value + * @param {Number} startIndex + * @param {Number} length + * @param {String} [open] + * @param {String} [close] + * @returns {Array} + * @private + */ +function parseParams(value, startIndex, length, open, close) { + open = open || '('; + close = close || ')'; + const types = []; + let paramStart = startIndex; + let level = 0; + for (let i = startIndex; i < startIndex + length; i++) { + const c = value[i]; + if (c === open) { + level++; + } + if (c === close) { + level--; + } + if (level === 0 && c === ',') { + types.push(value.substr(paramStart, i - paramStart)); + paramStart = i + 1; + } + } + //Add the last one + types.push(value.substr(paramStart, length - (paramStart - startIndex))); + return types; +} + +/** + * @param {Array.} parts + * @param {Number} totalLength + * @returns {Buffer} + * @private + */ +function concatRoutingKey(parts, totalLength) { + if (totalLength === 0) { + return null; + } + if (parts.length === 1) { + return parts[0]; + } + const routingKey = utils.allocBufferUnsafe(totalLength); + let offset = 0; + for (let i = 0; i < parts.length; i++) { + const item = parts[i]; + routingKey.writeUInt16BE(item.length, offset); + offset += 2; + item.copy(routingKey, offset); + offset += item.length; + routingKey[offset] = 0; + offset++; + } + return routingKey; +} + +module.exports = Encoder; diff --git a/node_modules/cassandra-driver/lib/errors.js b/node_modules/cassandra-driver/lib/errors.js new file mode 100644 index 0000000..8ec192f --- /dev/null +++ b/node_modules/cassandra-driver/lib/errors.js @@ -0,0 +1,175 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +/** + * Contains the error classes exposed by the driver. + * @module errors + */ + +/** + * Base Error + * @private + */ +function DriverError (message) { + Error.call(this, message); + Error.captureStackTrace(this, this.constructor); + this.name = this.constructor.name; + this.info = 'Cassandra Driver Error'; + // Explicitly set the message property as the Error.call() doesn't set the property on v8 + this.message = message; +} + +util.inherits(DriverError, Error); + +/** + * Represents an error when a query cannot be performed because no host is available or could be reached by the driver. + * @param {Object} innerErrors An object map containing the error per host tried + * @param {String} [message] + * @constructor + */ +function NoHostAvailableError(innerErrors, message) { + DriverError.call(this, message); + this.innerErrors = innerErrors; + this.info = 'Represents an error when a query cannot be performed because no host is available or could be reached by the driver.'; + if (!message) { + this.message = 'All host(s) tried for query failed.'; + if (innerErrors) { + const hostList = Object.keys(innerErrors); + if (hostList.length > 0) { + const host = hostList[0]; + this.message += util.format(' First host tried, %s: %s. See innerErrors.', host, innerErrors[host]); + } + } + } +} + +util.inherits(NoHostAvailableError, DriverError); + +/** + * Represents an error message from the server + * @param {Number} code Cassandra exception code + * @param {String} message + * @constructor + */ +function ResponseError(code, message) { + DriverError.call(this, message); + /** + * The error code as defined in [responseErrorCodes]{@link module:types~responseErrorCodes}. + * @type {Number} + */ + this.code = code; + this.info = 'Represents an error message from the server'; +} + +util.inherits(ResponseError, DriverError); + +/** + * Represents a bug inside the driver or in a Cassandra host. + * @param {String} message + * @constructor + */ +function DriverInternalError(message) { + DriverError.call(this, message); + this.info = 'Represents a bug inside the driver or in a Cassandra host.'; +} + +util.inherits(DriverInternalError, DriverError); + +/** + * Represents an error when trying to authenticate with auth-enabled host + * @param {String} message + * @constructor + */ +function AuthenticationError(message) { + DriverError.call(this, message); + this.info = 'Represents an authentication error from the driver or from a Cassandra node.'; +} + +util.inherits(AuthenticationError, DriverError); + +/** + * Represents an error that is raised when one of the arguments provided to a method is not valid + * @param {String} message + * @constructor + */ +function ArgumentError(message) { + DriverError.call(this, message); + this.info = 'Represents an error that is raised when one of the arguments provided to a method is not valid.'; +} + +util.inherits(ArgumentError, DriverError); + +/** + * Represents a client-side error that is raised when the client didn't hear back from the server within + * {@link ClientOptions.socketOptions.readTimeout}. + * @param {String} message The error message. + * @param {String} [host] Address of the server host that caused the operation to time out. + * @constructor + */ +function OperationTimedOutError(message, host) { + DriverError.call(this, message, this.constructor); + this.info = 'Represents a client-side error that is raised when the client did not hear back from the server ' + + 'within socketOptions.readTimeout'; + + /** + * When defined, it gets the address of the host that caused the operation to time out. + * @type {String|undefined} + */ + this.host = host; +} + +util.inherits(OperationTimedOutError, DriverError); + +/** + * Represents an error that is raised when a feature is not supported in the driver or in the current Cassandra version. + * @param message + * @constructor + */ +function NotSupportedError(message) { + DriverError.call(this, message, this.constructor); + this.info = 'Represents a feature that is not supported in the driver or in the Cassandra version.'; +} + +util.inherits(NotSupportedError, DriverError); + +/** + * Represents a client-side error indicating that all connections to a certain host have reached + * the maximum amount of in-flight requests supported. + * @param {String} address + * @param {Number} maxRequestsPerConnection + * @param {Number} connectionLength + * @constructor + */ +function BusyConnectionError(address, maxRequestsPerConnection, connectionLength) { + const message = util.format('All connections to host %s are busy, %d requests are in-flight on %s', + address, maxRequestsPerConnection, connectionLength === 1 ? 'a single connection': 'each connection'); + DriverError.call(this, message, this.constructor); + this.info = 'Represents a client-side error indicating that all connections to a certain host have reached ' + + 'the maximum amount of in-flight requests supported (pooling.maxRequestsPerConnection)'; +} + +util.inherits(BusyConnectionError, DriverError); + +exports.ArgumentError = ArgumentError; +exports.AuthenticationError = AuthenticationError; +exports.BusyConnectionError = BusyConnectionError; +exports.DriverError = DriverError; +exports.OperationTimedOutError = OperationTimedOutError; +exports.DriverInternalError = DriverInternalError; +exports.NoHostAvailableError = NoHostAvailableError; +exports.NotSupportedError = NotSupportedError; +exports.ResponseError = ResponseError; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/execution-options.js b/node_modules/cassandra-driver/lib/execution-options.js new file mode 100644 index 0000000..cc3df18 --- /dev/null +++ b/node_modules/cassandra-driver/lib/execution-options.js @@ -0,0 +1,619 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const utils = require('./utils'); +const types = require('./types'); +const errors = require('./errors'); + +const proxyExecuteKey = 'ProxyExecute'; + +/** + * A base class that represents a wrapper around the user provided query options with getter methods and proper + * default values. + *

+ * Note that getter methods might return undefined when not set on the query options or default + * {@link Client} options. + *

+ */ +class ExecutionOptions { + + /** + * Creates a new instance of {@link ExecutionOptions}. + */ + constructor() { + } + + /** + * Creates an empty instance, where all methods return undefined, used internally. + * @ignore + * @return {ExecutionOptions} + */ + static empty() { + return new ExecutionOptions(); + } + + /** + * Determines if the stack trace before the query execution should be maintained. + * @abstract + * @returns {Boolean} + */ + getCaptureStackTrace() { + + } + + /** + * Gets the [Consistency level]{@link module:types~consistencies} to be used for the execution. + * @abstract + * @returns {Number} + */ + getConsistency() { + + } + + /** + * Key-value payload to be passed to the server. On the server side, implementations of QueryHandler can use + * this data. + * @abstract + * @returns {Object} + */ + getCustomPayload() { + + } + + /** + * Gets the amount of rows to retrieve per page. + * @abstract + * @returns {Number} + */ + getFetchSize() { + + } + + /** + * When a fixed host is set on the query options and the query plan for the load-balancing policy is not used, it + * gets the host that should handle the query. + * @returns {Host} + */ + getFixedHost() { + + } + + /** + * Gets the type hints for parameters given in the query, ordered as for the parameters. + * @abstract + * @returns {Array|Array} + */ + getHints() { + + } + + /** + * Determines whether the driver must retrieve the following result pages automatically. + *

+ * This setting is only considered by the [Client#eachRow()]{@link Client#eachRow} method. + *

+ * @abstract + * @returns {Boolean} + */ + isAutoPage() { + + } + + /** + * Determines whether its a counter batch. Only valid for [Client#batch()]{@link Client#batch}, it will be ignored by + * other methods. + * @abstract + * @returns {Boolean} A Boolean value, it can't be undefined. + */ + isBatchCounter() { + + } + + /** + * Determines whether the batch should be written to the batchlog. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. + * @abstract + * @returns {Boolean} A Boolean value, it can't be undefined. + */ + isBatchLogged() { + + } + + /** + * Determines whether the query can be applied multiple times without changing the result beyond the initial + * application. + * @abstract + * @returns {Boolean} + */ + isIdempotent() { + + } + + /** + * Determines whether the query must be prepared beforehand. + * @abstract + * @returns {Boolean} A Boolean value, it can't be undefined. + */ + isPrepared() { + + } + + /** + * Determines whether query tracing is enabled for the execution. + * @abstract + * @returns {Boolean} + */ + isQueryTracing() { + + } + + /** + * Gets the keyspace for the query when set at query options level. + *

+ * Note that this method will return undefined when the keyspace is not set at query options level. + * It will only return the keyspace name when the user provided a different keyspace than the current + * {@link Client} keyspace. + *

+ * @abstract + * @returns {String} + */ + getKeyspace() { + + } + + /** + * Gets the load balancing policy used for this execution. + * @returns {LoadBalancingPolicy} A LoadBalancingPolicy instance, it can't be undefined. + */ + getLoadBalancingPolicy() { + + } + + /** + * Gets the Buffer representing the paging state. + * @abstract + * @returns {Buffer} + */ + getPageState() { + + } + + /** + * Internal method that gets the preferred host. + * @abstract + * @ignore + */ + getPreferredHost() { + + } + + /** + * Gets the query options as provided to the execution method without setting the default values. + * @returns {QueryOptions} + */ + getRawQueryOptions() { + + } + + /** + * Gets the timeout in milliseconds to be used for the execution per coordinator. + *

+ * A value of 0 disables client side read timeout for the execution. Default: undefined. + *

+ * @abstract + * @returns {Number} + */ + getReadTimeout() { + + } + + /** + * Gets the [retry policy]{@link module:policies/retry} to be used. + * @abstract + * @returns {RetryPolicy} A RetryPolicy instance, it can't be undefined. + */ + getRetryPolicy() { + + } + + /** + * Internal method to obtain the row callback, for "by row" results. + * @abstract + * @ignore + */ + getRowCallback() { + + } + + /** + * Internal method to get or generate a timestamp for the request execution. + * @ignore + * @returns {Long|null} + */ + getOrGenerateTimestamp() { + + } + + /** + * Gets the index of the parameters that are part of the partition key to determine the routing. + * @abstract + * @ignore + * @returns {Array} + */ + getRoutingIndexes() { + + } + + /** + * Gets the partition key(s) to determine which coordinator should be used for the query. + * @abstract + * @returns {Buffer|Array} + */ + getRoutingKey() { + + } + + /** + * Gets the array of the parameters names that are part of the partition key to determine the + * routing. Only valid for non-prepared requests. + * @abstract + * @ignore + */ + getRoutingNames() { + + } + + /** + * Gets the the consistency level to be used for the serial phase of conditional updates. + * @abstract + * @returns {Number} + */ + getSerialConsistency() { + + } + + /** + * Gets the provided timestamp for the execution in microseconds from the unix epoch (00:00:00, January 1st, 1970). + *

When a timestamp generator is used, this method returns undefined.

+ * @abstract + * @returns {Number|Long|undefined|null} + */ + getTimestamp() { + + } + + /** + * @param {Array} hints + * @abstract + * @ignore + */ + setHints(hints) { + + } + + /** + * Sets the keyspace for the execution. + * @ignore + * @abstract + * @param {String} keyspace + */ + setKeyspace(keyspace) { + + } + + /** + * @abstract + * @ignore + */ + setPageState() { + + } + + /** + * Internal method that sets the preferred host. + * @abstract + * @ignore + */ + setPreferredHost() { + + } + + /** + * Sets the index of the parameters that are part of the partition key to determine the routing. + * @param {Array} routingIndexes + * @abstract + * @ignore + */ + setRoutingIndexes(routingIndexes) { + + } + + /** + * Sets the routing key. + * @abstract + * @ignore + */ + setRoutingKey(value) { + + } +} + +/** + * Internal implementation of {@link ExecutionOptions} that uses the value from the client options and execution + * profile into account. + * @ignore + */ +class DefaultExecutionOptions extends ExecutionOptions { + /** + * Creates a new instance of {@link ExecutionOptions}. + * @param {QueryOptions} queryOptions + * @param {Client} client + * @param {Function|null} rowCallback + */ + constructor(queryOptions, client, rowCallback) { + super(); + + this._queryOptions = queryOptions; + this._rowCallback = rowCallback; + this._routingKey = this._queryOptions.routingKey; + this._hints = this._queryOptions.hints; + this._keyspace = this._queryOptions.keyspace; + this._routingIndexes = this._queryOptions.routingIndexes; + this._pageState = typeof this._queryOptions.pageState === 'string' ? + utils.allocBufferFromString(this._queryOptions.pageState, 'hex') : this._queryOptions.pageState; + this._preferredHost = null; + + this._client = client; + this._defaultQueryOptions = client.options.queryOptions; + this._profile = client.profileManager.getProfile(this._queryOptions.executionProfile); + + // Build a custom payload object designed for DSE-specific functionality + this._customPayload = DefaultExecutionOptions.createCustomPayload(this._queryOptions, this._defaultQueryOptions); + + if (!this._profile) { + throw new errors.ArgumentError(`Execution profile "${this._queryOptions.executionProfile}" not found`); + } + } + + /** + * Creates a payload for given user. + * @param {QueryOptions} userOptions + * @param {QueryOptions} defaultQueryOptions + * @private + */ + static createCustomPayload(userOptions, defaultQueryOptions) { + let customPayload = userOptions.customPayload || defaultQueryOptions.customPayload; + const executeAs = userOptions.executeAs || defaultQueryOptions.executeAs; + + if (executeAs) { + if (!customPayload) { + customPayload = {}; + customPayload[proxyExecuteKey] = utils.allocBufferFromString(executeAs); + } else if (!customPayload[proxyExecuteKey]) { + // Avoid appending to the existing payload object + customPayload = utils.extend({}, customPayload); + customPayload[proxyExecuteKey] = utils.allocBufferFromString(executeAs); + } + } + + return customPayload; + } + + /** + * Creates a new instance {@link ExecutionOptions}, based on the query options. + * @param {QueryOptions|null} queryOptions + * @param {Client} client + * @param {Function|null} [rowCallback] + * @ignore + * @return {ExecutionOptions} + */ + static create(queryOptions, client, rowCallback) { + if (!queryOptions || typeof queryOptions === 'function') { + // queryOptions can be null/undefined and could be of type function when is an optional parameter + queryOptions = utils.emptyObject; + } + return new DefaultExecutionOptions(queryOptions, client, rowCallback); + } + + getCaptureStackTrace() { + return ifUndefined(this._queryOptions.captureStackTrace, this._defaultQueryOptions.captureStackTrace); + } + + getConsistency() { + return ifUndefined3(this._queryOptions.consistency, this._profile.consistency, + this._defaultQueryOptions.consistency); + } + + getCustomPayload() { + return this._customPayload; + } + + getFetchSize() { + return ifUndefined(this._queryOptions.fetchSize, this._defaultQueryOptions.fetchSize); + } + + getFixedHost() { + return this._queryOptions.host; + } + + getHints() { + return this._hints; + } + + isAutoPage() { + return ifUndefined(this._queryOptions.autoPage, this._defaultQueryOptions.autoPage); + } + + isBatchCounter() { + return ifUndefined(this._queryOptions.counter, false); + } + + isBatchLogged() { + return ifUndefined3(this._queryOptions.logged, this._defaultQueryOptions.logged, true); + } + + isIdempotent() { + return ifUndefined(this._queryOptions.isIdempotent, this._defaultQueryOptions.isIdempotent); + } + + /** + * Determines if the query execution must be prepared beforehand. + * @return {Boolean} + */ + isPrepared() { + return ifUndefined(this._queryOptions.prepare, this._defaultQueryOptions.prepare); + } + + isQueryTracing() { + return ifUndefined(this._queryOptions.traceQuery, this._defaultQueryOptions.traceQuery); + } + + getKeyspace() { + return this._keyspace; + } + + getLoadBalancingPolicy() { + return this._profile.loadBalancing; + } + + getOrGenerateTimestamp() { + let result = this.getTimestamp(); + + if (result === undefined) { + const generator = this._client.options.policies.timestampGeneration; + + if ( types.protocolVersion.supportsTimestamp(this._client.controlConnection.protocolVersion) && generator) { + result = generator.next(this._client); + } else { + result = null; + } + } + + return typeof result === 'number' ? types.Long.fromNumber(result) : result; + } + + getPageState() { + return this._pageState; + } + + /** + * Gets the profile defined by the user or the default profile + * @internal + * @ignore + */ + getProfile() { + return this._profile; + } + + getRawQueryOptions() { + return this._queryOptions; + } + + getReadTimeout() { + return ifUndefined3(this._queryOptions.readTimeout, this._profile.readTimeout, + this._client.options.socketOptions.readTimeout); + } + + getRetryPolicy() { + return ifUndefined3(this._queryOptions.retry, this._profile.retry, this._client.options.policies.retry); + } + + getRoutingIndexes() { + return this._routingIndexes; + } + + getRoutingKey() { + return this._routingKey; + } + + getRoutingNames() { + return this._queryOptions.routingNames; + } + + /** + * Internal method to obtain the row callback, for "by row" results. + * @ignore + */ + getRowCallback() { + return this._rowCallback; + } + + getSerialConsistency() { + return ifUndefined3( + this._queryOptions.serialConsistency, this._profile.serialConsistency, this._defaultQueryOptions.serialConsistency); + } + + getTimestamp() { + return this._queryOptions.timestamp; + } + + /** + * Internal property to set the custom payload. + * @ignore + * @internal + * @param {Object} payload + */ + setCustomPayload(payload) { + this._customPayload = payload; + } + + /** + * @param {Array} hints + */ + setHints(hints) { + this._hints = hints; + } + + /** + * @param {String} keyspace + */ + setKeyspace(keyspace) { + this._keyspace = keyspace; + } + + /** + * @param {Buffer} pageState + */ + setPageState(pageState) { + this._pageState = pageState; + } + + /** + * @param {Array} routingIndexes + */ + setRoutingIndexes(routingIndexes) { + this._routingIndexes = routingIndexes; + } + + setRoutingKey(value) { + this._routingKey = value; + } +} + +function ifUndefined(v1, v2) { + return v1 !== undefined ? v1 : v2; +} + +function ifUndefined3(v1, v2, v3) { + if (v1 !== undefined) { + return v1; + } + return v2 !== undefined ? v2 : v3; +} + +module.exports = { ExecutionOptions, DefaultExecutionOptions, proxyExecuteKey }; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/execution-profile.js b/node_modules/cassandra-driver/lib/execution-profile.js new file mode 100644 index 0000000..78c4fc2 --- /dev/null +++ b/node_modules/cassandra-driver/lib/execution-profile.js @@ -0,0 +1,266 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const utils = require('./utils'); +const types = require('./types'); +const promiseUtils = require('./promise-utils'); + +/** + * Creates a new instance of {@link ExecutionProfile}. + * @classdesc + * Represents a set configurations to be used in a statement execution to be used for a single {@link Client} instance. + *

+ * An {@link ExecutionProfile} instance should not be shared across different {@link Client} instances. + *

+ * @param {String} name Name of the execution profile. + *

+ * Use 'default' to specify that the new instance should be the default {@link ExecutionProfile} if no + * profile is specified in the execution. + *

+ * @param {Object} [options] Profile options, when any of the options is not specified the {@link Client} will the use + * the ones defined in the default profile. + * @param {Number} [options.consistency] The consistency level to use for this profile. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @param {Object} [options.graphOptions] + * @param {String} [options.graphOptions.language] The graph language to use for graph queries. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.results] The protocol to use for serializing and deserializing graph results. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.name] The graph name to use for graph queries. + * @param {Number} [options.graphOptions.readConsistency] The consistency level to use for graph read queries. + * @param {String} [options.graphOptions.source] The graph traversal source name to use for graph queries. + * @param {Number} [options.graphOptions.writeConsistency] The consistency level to use for graph write queries. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @example + * const { Client, ExecutionProfile } = require('cassandra-driver'); + * const client = new Client({ + * contactPoints: ['host1', 'host2'], + * profiles: [ + * new ExecutionProfile('metrics-oltp', { + * consistency: consistency.localQuorum, + * retry: myRetryPolicy + * }) + * ] + * }); + * + * client.execute(query, params, { executionProfile: 'metrics-oltp' }, callback); + * @constructor + */ +function ExecutionProfile(name, options) { + if (typeof name !== 'string') { + throw new TypeError('Execution profile name must be a string'); + } + options = options || utils.emptyObject; + const graphOptions = options.graphOptions || utils.emptyObject; + /** + * Name of the execution profile. + * @type {String} + */ + this.name = name; + /** + * Consistency level. + * @type {Number} + */ + this.consistency = options.consistency; + /** + * Load-balancing policy + * @type {LoadBalancingPolicy} + */ + this.loadBalancing = options.loadBalancing; + /** + * Client read timeout. + * @type {Number} + */ + this.readTimeout = options.readTimeout; + /** + * Retry policy. + * @type {RetryPolicy} + */ + this.retry = options.retry; + /** + * Serial consistency level. + * @type {Number} + */ + this.serialConsistency = options.serialConsistency; + /** + * The graph options for this profile. + * @type {Object} + * @property {String} language The graph language. + * @property {String} name The graph name. + * @property {String} readConsistency The consistency to use for graph write queries. + * @property {String} source The graph traversal source. + * @property {String} writeConsistency The consistency to use for graph write queries. + */ + this.graphOptions = { + language: graphOptions.language, + results: graphOptions.results, + name: graphOptions.name, + readConsistency: graphOptions.readConsistency, + source: graphOptions.source, + writeConsistency: graphOptions.writeConsistency + }; +} + +/** + * Contains the logic to handle the different execution profiles of a {@link Client}. + * @ignore + */ +class ProfileManager { + + /** + * @param {ClientOptions} options + */ + constructor(options) { + this._profiles = options.profiles || []; + this._defaultConfiguredRetryPolicy = undefined; + this._setDefault(options); + // A array of unique load balancing policies + this._loadBalancingPolicies = []; + // A dictionary of name keys and profile values + this._profilesMap = {}; + // A dictionary of name keys and custom payload dictionaries as values + this._customPayloadCache = {}; + // A dictionary of name keys and graph options as values + this._graphOptionsCache = {}; + this._profiles.forEach(function (p) { + this._profilesMap[p.name] = p; + // Set required properties + p.loadBalancing = p.loadBalancing || this._defaultProfile.loadBalancing; + // Using array indexOf is not very efficient (O(n)) but the amount of profiles should be limited + // and a handful of load-balancing policies (no hashcode for load-Balancing policies) + if (this._loadBalancingPolicies.indexOf(p.loadBalancing) === -1) { + this._loadBalancingPolicies.push(p.loadBalancing); + } + return p; + }, this); + } + + /** + * @param {Client} client + * @param {HostMap} hosts + */ + async init(client, hosts) { + for (const lbp of this._loadBalancingPolicies) { + await promiseUtils.fromCallback(callback => lbp.init(client, hosts, callback)); + } + } + + /** + * Uses the load-balancing policies to get the relative distance to the host and return the closest one. + * @param {Host} host + */ + getDistance(host) { + let distance = types.distance.ignored; + // this is performance critical: we can't use any other language features than for-loop :( + for (let i = 0; i < this._loadBalancingPolicies.length; i++) { + const d = this._loadBalancingPolicies[i].getDistance(host); + if (d < distance) { + distance = d; + if (distance === types.distance.local) { + break; + } + } + } + + host.setDistance(distance); + return distance; + } + + /** + * @param {String|ExecutionProfile} name + * @returns {ExecutionProfile|undefined} It returns the execution profile by name or the default profile when name is + * undefined. It returns undefined when the profile does not exist. + */ + getProfile(name) { + if (name instanceof ExecutionProfile) { + return name; + } + return this._profilesMap[name || 'default']; + } + + /** @returns {ExecutionProfile} */ + getDefault() { + return this._defaultProfile; + } + + /** @returns {LoadBalancingPolicy} */ + getDefaultLoadBalancing() { + return this._defaultProfile.loadBalancing; + } + + /** + * Gets the cached default graph options for a given profile. If it doesn't exist, it creates new options using the + * handler and inserts it into the cache + * @param {ExecutionProfile} profile + * @param {Function} createHandler + */ + getOrCreateGraphOptions(profile, createHandler) { + let graphOptions = this._graphOptionsCache[profile.name]; + if (!graphOptions) { + graphOptions = (this._graphOptionsCache[profile.name] = createHandler()); + } + return graphOptions; + } + + /** + * @private + * @param {ClientOptions} options + */ + _setDefault(options) { + this._defaultProfile = this._profiles.filter(function (p) { return p.name === 'default'; })[0]; + if (!this._defaultProfile) { + this._profiles.push(this._defaultProfile = new ExecutionProfile('default')); + } + + // Store the default configured retry policy + this._defaultConfiguredRetryPolicy = this._defaultProfile.retry; + + // Set the required properties + this._defaultProfile.loadBalancing = this._defaultProfile.loadBalancing || options.policies.loadBalancing; + this._defaultProfile.retry = this._defaultProfile.retry || options.policies.retry; + } + + /** + * Gets all the execution profiles currently defined. + * @returns {Array.} + */ + getAll() { + return this._profiles; + } + + getDefaultConfiguredRetryPolicy() { + return this._defaultConfiguredRetryPolicy; + } +} + +module.exports = { + ProfileManager, + ExecutionProfile +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/geometry/geometry.js b/node_modules/cassandra-driver/lib/geometry/geometry.js new file mode 100644 index 0000000..b91c8fd --- /dev/null +++ b/node_modules/cassandra-driver/lib/geometry/geometry.js @@ -0,0 +1,133 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const endianness = { + '0': 'BE', + '1': 'LE' +}; + +function Geometry() { + +} + +Geometry.types = { + Point2D: 1, + LineString: 2, + Polygon: 3 +}; + +/** + * @protected + * @param {Number} code + * @returns {String} + * @ignore + */ +Geometry.getEndianness = function (code) { + const value = endianness[code.toString()]; + if (typeof value === 'undefined') { + throw new TypeError('Invalid endianness with code ' + code); + } + return value; +}; + +/** + * Reads an int32 from binary representation based on endianness. + * @protected + * @param {Buffer} buffer + * @param {String} endianness + * @param {Number} offset + * @returns Number + * @ignore + */ +Geometry.readInt32 = function (buffer, endianness, offset) { + if (endianness === 'BE') { + return buffer.readInt32BE(offset, true); + } + return buffer.readInt32LE(offset, true); +}; + +/** + * Reads an 64-bit double from binary representation based on endianness. + * @protected + * @param {Buffer} buffer + * @param {String} endianness + * @param {Number} offset + * @returns Number + * @ignore + */ +Geometry.readDouble = function (buffer, endianness, offset) { + if (endianness === 'BE') { + return buffer.readDoubleBE(offset, true); + } + return buffer.readDoubleLE(offset, true); +}; + +/** + * Writes an 32-bit integer to binary representation based on OS endianness. + * @protected + * @param {Number} val + * @param {Buffer} buffer + * @param {Number} offset + * @ignore + */ +Geometry.prototype.writeInt32 = function (val, buffer, offset) { + if (this.useBESerialization()) { + return buffer.writeInt32BE(val, offset, true); + } + return buffer.writeInt32LE(val, offset, true); +}; + +/** + * Writes an 64-bit double to binary representation based on OS endianness. + * @protected + * @param {Number} val + * @param {Buffer} buffer + * @param {Number} offset + * @ignore + */ +Geometry.prototype.writeDouble = function (val, buffer, offset) { + if (this.useBESerialization()) { + return buffer.writeDoubleBE(val, offset, true); + } + return buffer.writeDoubleLE(val, offset, true); +}; + +/** + * Writes an 8-bit int that represents the OS endianness. + * @protected + * @param {Buffer} buffer + * @param {Number} offset + * @ignore + */ +Geometry.prototype.writeEndianness = function (buffer, offset) { + if (this.useBESerialization()) { + return buffer.writeInt8(0, offset, true); + } + return buffer.writeInt8(1, offset, true); +}; + +/** + * Returns true if the serialization must be done in big-endian format. + * Designed to allow injection of OS endianness. + * @abstract + * @ignore + */ +Geometry.prototype.useBESerialization = function () { + throw new Error('Not Implemented'); +}; + +module.exports = Geometry; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/geometry/index.d.ts b/node_modules/cassandra-driver/lib/geometry/index.d.ts new file mode 100644 index 0000000..82ad292 --- /dev/null +++ b/node_modules/cassandra-driver/lib/geometry/index.d.ts @@ -0,0 +1,67 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export namespace geometry { + class LineString { + constructor(...args: Point[]); + + static fromBuffer(buffer: Buffer): LineString; + + static fromString(textValue: string): LineString; + + equals(other: LineString): boolean; + + toBuffer(): Buffer; + + toJSON(): string; + + toString(): string; + + } + + class Point { + constructor(x: number, y: number); + + static fromBuffer(buffer: Buffer): Point; + + static fromString(textValue: string): Point; + + equals(other: Point): boolean; + + toBuffer(): Buffer; + + toJSON(): string; + + toString(): string; + + } + + class Polygon { + constructor(...args: Point[]); + + static fromBuffer(buffer: Buffer): Polygon; + + static fromString(textValue: string): Polygon; + + equals(other: Polygon): boolean; + + toBuffer(): Buffer; + + toJSON(): string; + + toString(): string; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/geometry/index.js b/node_modules/cassandra-driver/lib/geometry/index.js new file mode 100644 index 0000000..28c9bc7 --- /dev/null +++ b/node_modules/cassandra-driver/lib/geometry/index.js @@ -0,0 +1,30 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +/** + * Geometry module. + *

+ * Contains the classes to represent the set of additional CQL types for geospatial data that come with + * DSE 5.0. + *

+ * @module geometry + */ + +exports.Geometry = require('./geometry'); +exports.LineString = require('./line-string'); +exports.Point = require('./point'); +exports.Polygon = require('./polygon'); \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/geometry/line-string.js b/node_modules/cassandra-driver/lib/geometry/line-string.js new file mode 100644 index 0000000..3e7c8ee --- /dev/null +++ b/node_modules/cassandra-driver/lib/geometry/line-string.js @@ -0,0 +1,197 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const utils = require('../utils'); +const Geometry = require('./geometry'); +const Point = require('./point'); + +/** + * Creates a new {@link LineString} instance. + * @classdesc + * A LineString is a one-dimensional object representing a sequence of points and the line segments connecting them. + * @param {...Point}[point] A sequence of [Point]{@link module:geometry~Point} items as arguments. + * @example + * new LineString(new Point(10.99, 20.02), new Point(14, 26), new Point(34, 1.2)); + * @constructor + * @alias module:geometry~LineString + * @extends {Geometry} + */ +function LineString(point) { + let points = Array.prototype.slice.call(arguments); + if (points.length === 1 && Array.isArray(points) && Array.isArray(points[0])) { + //The first argument is an array of the points + points = points[0]; + } + if (points.length === 1) { + throw new TypeError('LineString can be either empty or contain 2 or more points'); + } + /** + * Returns a frozen Array of points that represent the line. + * @type {Array.} + */ + this.points = Object.freeze(points); +} + +//noinspection JSCheckFunctionSignatures +util.inherits(LineString, Geometry); + +/** + * Creates a {@link LineString} instance from + * a Well-known Text (WKT) + * representation of a line. + * @param {Buffer} buffer + * @returns {LineString} + */ +LineString.fromBuffer = function (buffer) { + if (!buffer || buffer.length < 9) { + throw new TypeError('A linestring buffer should contain at least 9 bytes'); + } + const endianness = Geometry.getEndianness(buffer.readInt8(0, true)); + let offset = 1; + if (Geometry.readInt32(buffer, endianness, offset) !== Geometry.types.LineString) { + throw new TypeError('Binary representation was not a LineString'); + } + offset += 4; + const length = Geometry.readInt32(buffer, endianness, offset); + offset += 4; + if (buffer.length !== offset + length * 16) { + throw new TypeError(util.format('Length of the buffer does not match %d !== %d', buffer.length, offset + length * 8)); + } + const points = new Array(length); + for (let i = 0; i < length; i++) { + points[i] = new Point( + Geometry.readDouble(buffer, endianness, offset), + Geometry.readDouble(buffer, endianness, offset + 8)); + offset += 16; + } + //noinspection JSCheckFunctionSignatures + return new LineString(points); +}; + +/** + * Creates a {@link LineString} instance from + * a Well-known Text (WKT) + * representation of a line. + * @param {String} textValue + * @returns {LineString} + */ +LineString.fromString = function (textValue) { + const wktRegex = /^LINESTRING ?\(([-0-9. ,]+)\)+$/g; + const matches = wktRegex.exec(textValue); + if (!matches || matches.length !== 2) { + throw new TypeError('Invalid WKT: ' + textValue); + } + const points = LineString.parseSegments(matches[1]); + return new LineString(points); +}; + +/** + * Internal method that parses a series of WKT points. + * @param {String} textValue + * @returns {Array} + * @internal + * @ignore + */ +LineString.parseSegments = function (textValue) { + const points = []; + const pointParts = textValue.split(','); + for (let i = 0; i < pointParts.length; i++) { + const p = pointParts[i].trim(); + if (p.length === 0) { + throw new TypeError('Invalid WKT segment: ' + textValue); + } + const xyText = p.split(' ').filter(function (element) { + return (element.trim().length > 0); + }); + if (xyText.length !== 2) { + throw new TypeError('Invalid WKT segment: ' + textValue); + } + points.push(new Point(parseFloat(xyText[0]), parseFloat(xyText[1]))); + } + return points; +}; + +/** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ +LineString.prototype.toBuffer = function () { + const buffer = utils.allocBufferUnsafe(9 + this.points.length * 16); + this.writeEndianness(buffer, 0); + let offset = 1; + this.writeInt32(Geometry.types.LineString, buffer, offset); + offset += 4; + this.writeInt32(this.points.length, buffer, offset); + offset += 4; + this.points.forEach(function (p) { + this.writeDouble(p.x, buffer, offset); + this.writeDouble(p.y, buffer, offset + 8); + offset += 16; + }, this); + return buffer; +}; + +/** + * Returns true if the values of the linestrings are the same, otherwise it returns false. + * @param {LineString} other + * @returns {Boolean} + */ +LineString.prototype.equals = function (other) { + if (!(other instanceof LineString)) { + return false; + } + if (this.points.length !== other.points.length) { + return false; + } + for (let i = 0; i < this.points.length; i++) { + if (!this.points[i].equals(other.points[i])) { + return false; + } + } + return true; +}; + +/** + * Returns Well-known text (WKT) representation of the geometry object. + * @returns {String} + */ +LineString.prototype.toString = function () { + if (this.points.length === 0) { + return 'LINESTRING EMPTY'; + } + return 'LINESTRING (' + + this.points.map(function (p) { + return p.x + ' ' + p.y; + }).join(', ') + + ')'; +}; + +LineString.prototype.useBESerialization = function () { + return false; +}; + +/** + * Returns a JSON representation of this geo-spatial type. + */ +LineString.prototype.toJSON = function () { + return { type: 'LineString', coordinates: this.points.map(function (p) { + return [p.x, p.y]; + })}; +}; + +module.exports = LineString; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/geometry/point.js b/node_modules/cassandra-driver/lib/geometry/point.js new file mode 100644 index 0000000..473b6cd --- /dev/null +++ b/node_modules/cassandra-driver/lib/geometry/point.js @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const utils = require('../utils'); +const Geometry = require('./geometry'); + +/** + * Creates a new {@link Point} instance. + * @classdesc + * A Point is a zero-dimensional object that represents a specific (X,Y) + * location in a two-dimensional XY-Plane. In case of Geographic Coordinate + * Systems, the X coordinate is the longitude and the Y is the latitude. + * @param {Number} x The X coordinate. + * @param {Number} y The Y coordinate. + * @extends {Geometry} + * @alias module:geometry~Point + * @constructor + */ +function Point(x, y) { + if (typeof x !== 'number' || typeof y !== 'number') { + throw new TypeError('X and Y must be numbers'); + } + if (isNaN(x) || isNaN(y)) { + throw new TypeError('X and Y must be numbers'); + } + /** + * Returns the X coordinate of this 2D point. + * @type {Number} + */ + this.x = x; + /** + * Returns the Y coordinate of this 2D point. + * @type {Number} + */ + this.y = y; +} + +//noinspection JSCheckFunctionSignatures +util.inherits(Point, Geometry); + +/** + * Creates a {@link Point} instance from + * a Well-known Text (WKT) + * representation of a 2D point. + * @param {Buffer} buffer + * @returns {Point} + */ +Point.fromBuffer = function (buffer) { + if (!buffer || buffer.length !== 21) { + throw new TypeError('2D Point buffer should contain 21 bytes'); + } + const endianness = Geometry.getEndianness(buffer.readInt8(0, true)); + if (Geometry.readInt32(buffer, endianness, 1) !== Geometry.types.Point2D) { + throw new TypeError('Binary representation was not a point'); + } + return new Point(Geometry.readDouble(buffer, endianness, 5), Geometry.readDouble(buffer, endianness, 13)); +}; + +/** + * Creates a {@link Point} instance from + * a Well-known Text (WKT) + * representation of a 2D point. + * @param {String} textValue + * @returns {Point} + */ +Point.fromString = function (textValue) { + const wktRegex = /^POINT\s?\(([-0-9.]+) ([-0-9.]+)\)$/g; + const matches = wktRegex.exec(textValue); + if (!matches || matches.length !== 3) { + throw new TypeError('2D Point WTK should contain 2 coordinates'); + } + return new Point(parseFloat(matches[1]), parseFloat(matches[2])); +}; + +/** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ +Point.prototype.toBuffer = function () { + const buffer = utils.allocBufferUnsafe(21); + this.writeEndianness(buffer, 0); + this.writeInt32(Geometry.types.Point2D, buffer, 1); + this.writeDouble(this.x, buffer, 5); + this.writeDouble(this.y, buffer, 13); + return buffer; +}; + +/** + * Returns true if the values of the point are the same, otherwise it returns false. + * @param {Point} other + * @returns {Boolean} + */ +Point.prototype.equals = function (other) { + if (!(other instanceof Point)) { + return false; + } + return (this.x === other.x && this.y === other.y); +}; + +/** + * Returns Well-known text (WKT) representation of the geometry object. + * @returns {String} + */ +Point.prototype.toString = function () { + return util.format('POINT (%d %d)', this.x, this.y); +}; + +Point.prototype.useBESerialization = function () { + return false; +}; + +/** + * Returns a JSON representation of this geo-spatial type. + */ +Point.prototype.toJSON = function () { + return { type: 'Point', coordinates: [ this.x, this.y ]}; +}; + +module.exports = Point; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/geometry/polygon.js b/node_modules/cassandra-driver/lib/geometry/polygon.js new file mode 100644 index 0000000..c4536e1 --- /dev/null +++ b/node_modules/cassandra-driver/lib/geometry/polygon.js @@ -0,0 +1,239 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const utils = require('../utils'); +const Geometry = require('./geometry'); +const Point = require('./point'); +const LineString = require('./line-string'); + +/** + * Creates a new {@link Polygon} instance. + * @classdesc + * Represents is a plane geometry figure that is bounded by a finite chain of straight line segments closing in a loop + * to form a closed chain or circuit. + * @param {...Array.}[ringPoints] A sequence of Array of [Point]{@link module:geometry~Point} items as arguments + * representing the rings of the polygon. + * @example + * new Polygon([ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ]); + * @example + * //polygon with a hole + * new Polygon( + * [ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ], + * [ new Point(25, 20), new Point(30, 30), new Point(20, 20), new Point(25, 20) ] + * ); + * @alias module:geometry~Polygon + * @constructor + */ +function Polygon(ringPoints) { + const rings = Array.prototype.slice.call(arguments); + /** + * Returns a frozen Array of array of points that represent the different rings in the polygon. + * @type {Array} + */ + this.rings = Object.freeze(rings); +} + +//noinspection JSCheckFunctionSignatures +util.inherits(Polygon, Geometry); + +/** + * Creates a {@link Polygon} instance from + * a Well-known Text (WKT) + * representation of a polygon. + * @param {Buffer} buffer + * @returns {Polygon} + */ +Polygon.fromBuffer = function (buffer) { + if (!buffer || buffer.length < 9) { + throw new TypeError('A Polygon buffer should contain at least 9 bytes'); + } + const endianness = Geometry.getEndianness(buffer.readInt8(0, true)); + let offset = 1; + if (Geometry.readInt32(buffer, endianness, offset) !== Geometry.types.Polygon) { + throw new TypeError('Binary representation was not a Polygon'); + } + offset += 4; + const ringsLength = Geometry.readInt32(buffer, endianness, offset); + offset += 4; + const ringsArray = new Array(ringsLength); + for (let ringIndex = 0; ringIndex < ringsLength; ringIndex++) { + const pointsLength = Geometry.readInt32(buffer, endianness, offset); + offset += 4; + if (buffer.length < offset + pointsLength * 16) { + throw new TypeError(util.format('Length of the buffer does not match')); + } + const ring = new Array(pointsLength); + for (let i = 0; i < pointsLength; i++) { + ring[i] = new Point( + Geometry.readDouble(buffer, endianness, offset), + Geometry.readDouble(buffer, endianness, offset + 8)); + offset += 16; + } + ringsArray[ringIndex] = ring; + } + //Invoke the constructor with each ring as a parameter + //ringsArray.unshift(null); + //return new (Function.prototype.bind.apply(Polygon, ringsArray)); + return construct(ringsArray); +}; + +/** + * Creates a {@link Polygon} instance from + * a Well-known Text (WKT) + * representation of a shape. + * @param {String} textValue + * @returns {Polygon} + */ +Polygon.fromString = function (textValue) { + const wktRegex = /^POLYGON ?\((\(.*\))\)$/g; + const matches = wktRegex.exec(textValue); + function validateWkt(condition) { + if (condition) { + throw new TypeError('Invalid WKT: ' + textValue); + } + } + validateWkt(!matches || matches.length !== 2); + + const ringsText = matches[1]; + const ringsArray = []; + let ringStart = null; + for (let i = 0; i < ringsText.length; i++) { + const c = ringsText[i]; + if (c === '(') { + validateWkt(ringStart !== null); + ringStart = i+1; + continue; + } + if (c === ')') { + validateWkt(ringStart === null); + ringsArray.push(ringsText.substring(ringStart, i)); + ringStart = null; + continue; + } + validateWkt(ringStart === null && c !== ' ' && c !== ','); + } + return construct(ringsArray.map(LineString.parseSegments)); +}; + +/** + * Creates a new instance of Polygon with each array item as a parameter + * @private + * @param {Array>} argsArray + * @returns {Polygon} + */ +function construct(argsArray) { + function F() { + return Polygon.apply(this, argsArray); + } + F.prototype = Polygon.prototype; + return new F(); +} + +/** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ +Polygon.prototype.toBuffer = function () { + let totalRingsLength = 0; + this.rings.forEach(function (ring) { + totalRingsLength += 4 + ring.length * 16; + }, this); + const buffer = utils.allocBufferUnsafe(9 + totalRingsLength); + this.writeEndianness(buffer, 0); + let offset = 1; + this.writeInt32(Geometry.types.Polygon, buffer, offset); + offset += 4; + this.writeInt32(this.rings.length, buffer, offset); + offset += 4; + this.rings.forEach(function (ring) { + this.writeInt32(ring.length, buffer, offset); + offset += 4; + ring.forEach(function (p) { + this.writeDouble(p.x, buffer, offset); + this.writeDouble(p.y, buffer, offset + 8); + offset += 16; + }, this); + }, this); + return buffer; +}; + +/** + * Returns true if the values of the polygons are the same, otherwise it returns false. + * @param {Polygon} other + * @returns {Boolean} + */ +Polygon.prototype.equals = function (other) { + if (!(other instanceof Polygon)) { + return false; + } + if (this.rings.length !== other.rings.length) { + return false; + } + for (let i = 0; i < this.rings.length; i++) { + const r1 = this.rings[i]; + const r2 = other.rings[i]; + if (r1.length !== r2.length) { + return false; + } + for (let j = 0; j < r1.length; j++) { + if (!r1[i].equals(r2[i])) { + return false; + } + } + } + return true; +}; + +Polygon.prototype.useBESerialization = function () { + return false; +}; + +/** + * Returns Well-known text (WKT) representation of the geometry object. + * @returns {String} + */ +Polygon.prototype.toString = function () { + if (this.rings.length === 0) { + return 'POLYGON EMPTY'; + } + let ringStrings = ''; + this.rings.forEach(function (r, i) { + if (i > 0) { + ringStrings += ', '; + } + ringStrings += '(' + + r.map(function (p) { + return p.x + ' ' + p.y; + }).join(', ') + + ')'; + }); + return 'POLYGON (' + ringStrings + ')'; +}; + +/** + * Returns a JSON representation of this geo-spatial type. + */ +Polygon.prototype.toJSON = function () { + return { type: 'Polygon', coordinates: this.rings.map(function (r) { + return r.map(function (p) { + return [ p.x, p.y ]; + }); + })}; +}; + +module.exports = Polygon; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/host-connection-pool.js b/node_modules/cassandra-driver/lib/host-connection-pool.js new file mode 100644 index 0000000..f03e698 --- /dev/null +++ b/node_modules/cassandra-driver/lib/host-connection-pool.js @@ -0,0 +1,522 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const events = require('events'); + +const Connection = require('./connection'); +const utils = require('./utils'); +const promiseUtils = require('./promise-utils'); +const errors = require('./errors'); +const clientOptions = require('./client-options'); + +// Used to get the index of the connection with less in-flight requests +let connectionIndex = 0; +const connectionIndexOverflow = Math.pow(2, 15); + +let defaultOptions; + +/** + * Represents the possible states of the pool. + * Possible state transitions: + * - From initial to closing: The pool must be closed because the host is ignored. + * - From initial to shuttingDown: The pool is being shutdown as a result of a client shutdown. + * - From closing to initial state: The pool finished closing connections (is now ignored) and it resets to + * initial state in case the host is marked as local/remote in the future. + * - From closing to shuttingDown (rare): It was marked as ignored, now the client is being shutdown. + * - From shuttingDown to shutdown: Finished shutting down, the pool should not be reused. + * @private + */ +const state = { + // Initial state: open / opening / ready to be opened + initial: 0, + // When the pool is being closed as part of a distance change + closing: 1, + // When the pool is being shutdown for good + shuttingDown: 2, + // When the pool has being shutdown + shutDown: 4 +}; + +/** + * Represents a pool of connections to a host + */ +class HostConnectionPool extends events.EventEmitter { + /** + * Creates a new instance of HostConnectionPool. + * @param {Host} host + * @param {Number} protocolVersion Initial protocol version + * @extends EventEmitter + */ + constructor(host, protocolVersion) { + super(); + this._address = host.address; + this._newConnectionTimeout = null; + this._state = state.initial; + this._opening = false; + this._host = host; + this.responseCounter = 0; + this.options = host.options; + this.protocolVersion = protocolVersion; + this.coreConnectionsLength = 1; + /** + * An immutable array of connections + * @type {Array.} + */ + this.connections = utils.emptyArray; + this.setMaxListeners(0); + this.log = utils.log; + } + + getInFlight() { + const length = this.connections.length; + if (length === 1) { + return this.connections[0].getInFlight(); + } + + let sum = 0; + for (let i = 0; i < length; i++) { + sum += this.connections[i].getInFlight(); + } + return sum; + } + + /** + * Gets the least busy connection from the pool. + * @param {Connection} [previousConnection] When provided, the pool should attempt to obtain a different connection. + * @returns {Connection!} + * @throws {Error} + * @throws {BusyConnectionError} + */ + borrowConnection(previousConnection) { + if (this.connections.length === 0) { + throw new Error('No connection available'); + } + + const maxRequests = this.options.pooling.maxRequestsPerConnection; + const c = HostConnectionPool.minInFlight(this.connections, maxRequests, previousConnection); + + if (c.getInFlight() >= maxRequests) { + throw new errors.BusyConnectionError(this._address, maxRequests, this.connections.length); + } + + return c; + } + + /** + * Gets the connection with the minimum number of in-flight requests. + * Only checks for 2 connections (round-robin) and gets the one with minimum in-flight requests, as long as + * the amount of in-flight requests is lower than maxRequests. + * @param {Array.} connections + * @param {Number} maxRequests + * @param {Connection} previousConnection When provided, it will attempt to obtain a different connection. + * @returns {Connection!} + */ + static minInFlight(connections, maxRequests, previousConnection) { + const length = connections.length; + if (length === 1) { + return connections[0]; + } + + // Use a single index for all hosts as a simplified way to balance the load between connections + connectionIndex++; + if (connectionIndex >= connectionIndexOverflow) { + connectionIndex = 0; + } + + let current; + for (let index = connectionIndex; index < connectionIndex + length; index++) { + current = connections[index % length]; + if (current === previousConnection) { + // Increment the index and skip + current = connections[(++index) % length]; + } + + let next = connections[(index + 1) % length]; + if (next === previousConnection) { + // Skip + next = connections[(index + 2) % length]; + } + + if (next.getInFlight() < current.getInFlight()) { + current = next; + } + + if (current.getInFlight() < maxRequests) { + // Check as few connections as possible, as long as the amount of in-flight + // requests is lower than maxRequests + break; + } + } + return current; + } + + /** + * Creates all the connections in the pool and switches the keyspace of each connection if needed. + * @param {string} keyspace + */ + async warmup(keyspace) { + if (this.connections.length < this.coreConnectionsLength) { + while (this.connections.length < this.coreConnectionsLength) { + await this._attemptNewConnection(); + } + + this.log('info', + `Connection pool to host ${this._address} created with ${this.connections.length} connection(s)`); + } else { + this.log('info', `Connection pool to host ${this._address} contains ${this.connections.length} connection(s)`); + } + + if (keyspace) { + try { + for (const connection of this.connections) { + await connection.changeKeyspace(keyspace); + } + } catch (err) { + // Log it and move on, it could be a momentary schema mismatch failure + this.log('warning', `Connection(s) to host ${this._address} could not be switched to keyspace ${keyspace}`); + } + } + } + + /** @returns {Connection} */ + _createConnection() { + const endpointOrServerName = !this.options.sni + ? this._address : this._host.hostId.toString(); + + const c = new Connection(endpointOrServerName, this.protocolVersion, this.options); + this._addListeners(c); + return c; + } + + /** @param {Connection} c */ + _addListeners(c) { + c.on('responseDequeued', () => this.responseCounter++); + + const self = this; + function connectionErrorCallback() { + // The socket is not fully open / can not send heartbeat + self.remove(c); + } + c.on('idleRequestError', connectionErrorCallback); + c.on('socketClose', connectionErrorCallback); + } + + addExistingConnection(c) { + this._addListeners(c); + // Use a copy of the connections array + this.connections = this.connections.slice(0); + this.connections.push(c); + } + + /** + * Prevents reconnection timeout from triggering + */ + clearNewConnectionAttempt() { + if (!this._newConnectionTimeout) { + return; + } + clearTimeout(this._newConnectionTimeout); + this._newConnectionTimeout = null; + } + + /** + * Tries to open a new connection. + * If a connection is being opened, it will resolve when the existing open task completes. + * @returns {Promise} + */ + async _attemptNewConnection() { + if (this._opening) { + // Wait for the event to fire + return await promiseUtils.fromEvent(this, 'open'); + } + + this._opening = true; + + const c = this._createConnection(); + let err; + + try { + await c.openAsync(); + } catch (e) { + err = e; + this.log('warning', `Connection to ${this._address} could not be created: ${err}`, err); + } + + if (this.isClosing()) { + this.log('info', `Connection to ${this._address} opened successfully but pool was being closed`); + err = new Error('Connection closed'); + } + + if (!err) { + // Append the connection to the pool. + // Use a copy of the connections array. + const newConnections = this.connections.slice(0); + newConnections.push(c); + this.connections = newConnections; + this.log('info', `Connection to ${this._address} opened successfully`); + } else { + promiseUtils.toBackground(c.closeAsync()); + } + + // Notify that creation finished by setting the flag and emitting the event + this._opening = false; + this.emit('open', err, c); + + if (err) { + // Opening failed + throw err; + } + } + + attemptNewConnectionImmediate() { + const self = this; + function openConnection() { + self.clearNewConnectionAttempt(); + self.scheduleNewConnectionAttempt(0); + } + + if (this._state === state.initial) { + return openConnection(); + } + + if (this._state === state.closing) { + return this.once('close', openConnection); + } + // In the case the pool its being / has been shutdown for good + // Do not attempt to create a new connection. + } + + /** + * Closes the connection and removes a connection from the pool. + * @param {Connection} connection + */ + remove(connection) { + // locating an object by position in the array is O(n), but normally there should be between 1 to 8 connections. + const index = this.connections.indexOf(connection); + if (index < 0) { + // it was already removed from the connections and it's closing + return; + } + // remove the connection from the pool, using an pool copy + const newConnections = this.connections.slice(0); + newConnections.splice(index, 1); + this.connections = newConnections; + // close the connection + setImmediate(function removeClose() { + connection.close(); + }); + this.emit('remove'); + } + + /** + * @param {Number} delay + */ + scheduleNewConnectionAttempt(delay) { + if (this.isClosing()) { + return; + } + + const self = this; + + this._newConnectionTimeout = setTimeout(function newConnectionTimeoutExpired() { + self._newConnectionTimeout = null; + if (self.connections.length >= self.coreConnectionsLength) { + // new connection can be scheduled while a new connection is being opened + // the pool has the appropriate size + return; + } + + if (delay > 0 && self.options.sni) { + // We use delay > 0 as an indication that it's a reconnection. + // A reconnection schedule can use delay = 0 as well, but it's a good enough signal. + promiseUtils.toBackground(self.options.sni.addressResolver.refresh().then(() => self._attemptNewConnection())); + return; + } + + promiseUtils.toBackground(self._attemptNewConnection()); + }, delay); + } + + hasScheduledNewConnection() { + return !!this._newConnectionTimeout || this._opening; + } + + /** + * Increases the size of the connection pool in the background, if needed. + */ + increaseSize() { + if (this.connections.length < this.coreConnectionsLength && !this.hasScheduledNewConnection()) { + // schedule the next connection in the background + this.scheduleNewConnectionAttempt(0); + } + } + + /** + * Gets the amount of responses and resets the internal counter. + * @returns {number} + */ + getAndResetResponseCounter() { + const temp = this.responseCounter; + this.responseCounter = 0; + return temp; + } + + /** + * Gets a boolean indicating if the pool is being closed / shutting down or has been shutdown. + */ + isClosing() { + return this._state !== state.initial; + } + + /** + * Gracefully waits for all in-flight requests to finish and closes the pool. + */ + drainAndShutdown() { + if (this.isClosing()) { + // Its already closing / shutting down + return; + } + + this._state = state.closing; + this.clearNewConnectionAttempt(); + + if (this.connections.length === 0) { + return this._afterClosing(); + } + + const self = this; + const connections = this.connections; + this.connections = utils.emptyArray; + let closedConnections = 0; + this.log('info', util.format('Draining and closing %d connections to %s', connections.length, this._address)); + let wasClosed = false; + // eslint-disable-next-line prefer-const + let checkShutdownTimeout; + + for (let i = 0; i < connections.length; i++) { + const c = connections[i]; + if (c.getInFlight() === 0) { + getDelayedClose(c)(); + continue; + } + c.emitDrain = true; + c.once('drain', getDelayedClose(c)); + } + + function getDelayedClose(connection) { + return (function delayedClose() { + connection.close(); + if (++closedConnections < connections.length) { + return; + } + if (wasClosed) { + return; + } + wasClosed = true; + if (checkShutdownTimeout) { + clearTimeout(checkShutdownTimeout); + } + self._afterClosing(); + }); + } + + // Check that after sometime (readTimeout + 100ms) the connections have been drained + const delay = (this.options.socketOptions.readTimeout || getDefaultOptions().socketOptions.readTimeout) + 100; + checkShutdownTimeout = setTimeout(function checkShutdown() { + wasClosed = true; + connections.forEach(function connectionEach(c) { + c.close(); + }); + self._afterClosing(); + }, delay); + } + + _afterClosing() { + const self = this; + + function resetState() { + if (self._state === state.shuttingDown) { + self._state = state.shutDown; + } else { + self._state = state.initial; + } + + self.emit('close'); + + if (self._state === state.shutDown) { + self.emit('shutdown'); + } + } + + if (this._opening) { + // The pool is growing, reset the state back to init once the open finished (without any new connection) + return this.once('open', resetState); + } + + resetState(); + } + + /** + * @returns {Promise} + */ + async shutdown() { + this.clearNewConnectionAttempt(); + + if (!this.connections.length) { + this._state = state.shutDown; + return; + } + + const previousState = this._state; + this._state = state.shuttingDown; + + if (previousState === state.closing || previousState === state.shuttingDown) { + // When previous state was closing, it will drain all connections and close them + // When previous state was "shuttingDown", it will close all the connections + // Once it's completed, shutdown event will be emitted + return promiseUtils.fromEvent(this, 'shutdown'); + } + + await this._closeAllConnections(); + + this._state = state.shutDown; + this.emit('shutdown'); + } + + async _closeAllConnections() { + const connections = this.connections; + // point to an empty array + this.connections = utils.emptyArray; + if (connections.length === 0) { + return; + } + + this.log('info', util.format('Closing %d connections to %s', connections.length, this._address)); + + await Promise.all(connections.map(c => c.closeAsync())); + } +} + +/** Lazily loads the default options */ +function getDefaultOptions() { + if (defaultOptions === undefined) { + defaultOptions = clientOptions.defaultOptions(); + } + return defaultOptions; +} + +module.exports = HostConnectionPool; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/host.js b/node_modules/cassandra-driver/lib/host.js new file mode 100644 index 0000000..56c128f --- /dev/null +++ b/node_modules/cassandra-driver/lib/host.js @@ -0,0 +1,658 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const events = require('events'); + +const utils = require('./utils'); +const types = require('./types'); +const HostConnectionPool = require('./host-connection-pool'); +const PrepareHandler = require('./prepare-handler'); +const promiseUtils = require('./promise-utils'); + +const healthResponseCountInterval = 200; + +/** + * Represents a Cassandra node. + * @extends EventEmitter + */ +class Host extends events.EventEmitter { + + /** + * Creates a new Host instance. + */ + constructor(address, protocolVersion, options, metadata) { + super(); + /** + * Gets ip address and port number of the node separated by `:`. + * @type {String} + */ + this.address = address; + this.setDownAt = 0; + this.log = utils.log; + + /** + * Gets the timestamp of the moment when the Host was marked as UP. + * @type {Number|null} + * @ignore + * @internal + */ + this.isUpSince = null; + Object.defineProperty(this, 'options', { value: options, enumerable: false, writable: false }); + + /** + * The host pool. + * @internal + * @ignore + * @type {HostConnectionPool} + */ + Object.defineProperty(this, 'pool', { value: new HostConnectionPool(this, protocolVersion), enumerable: false }); + + this.pool.on('open', err => promiseUtils.toBackground(this._onNewConnectionOpen(err))); + this.pool.on('remove', () => this._checkPoolState()); + + /** + * Gets string containing the Cassandra version. + * @type {String} + */ + this.cassandraVersion = null; + + /** + * Gets data center name of the node. + * @type {String} + */ + this.datacenter = null; + + /** + * Gets rack name of the node. + * @type {String} + */ + this.rack = null; + + /** + * Gets the tokens assigned to the node. + * @type {Array} + */ + this.tokens = null; + + /** + * Gets the id of the host. + *

This identifier is used by the server for internal communication / gossip.

+ * @type {Uuid} + */ + this.hostId = null; + + /** + * Gets string containing the DSE version or null if not set. + * @type {String} + */ + this.dseVersion = null; + + /** + * Gets the DSE Workloads the host is running. + *

+ * This is based on the "workload" or "workloads" columns in {@code system.local} and {@code system.peers}. + *

+ *

+ * Workload labels may vary depending on the DSE version in use;e.g. DSE 5.1 may report two distinct workloads: + * Search and Analytics, while DSE 5.0 would report a single + * SearchAnalytics workload instead. The driver simply returns the workload labels as reported by + * DSE, without any form of pre-processing. + *

+ *

When the information is unavailable, this property returns an empty array.

+ * @type {Array} + */ + this.workloads = utils.emptyArray; + + // the distance as last set using the load balancing policy + this._distance = types.distance.ignored; + this._healthResponseCounter = 0; + + // Make some of the private instance variables not enumerable to prevent from showing when inspecting + Object.defineProperty(this, '_metadata', { value: metadata, enumerable: false }); + Object.defineProperty(this, '_healthResponseCountTimer', { value: null, enumerable: false, writable: true }); + + this.reconnectionSchedule = this.options.policies.reconnection.newSchedule(); + this.reconnectionDelay = 0; + } + + /** + * Marks this host as not available for query coordination, when the host was previously marked as UP, otherwise its + * a no-op. + * @internal + * @ignore + */ + setDown() { + // Multiple events signaling that a host is failing could cause multiple calls to this method + if (this.setDownAt !== 0) { + // the host is already marked as Down + return; + } + if (this.pool.isClosing()) { + // the pool is being closed/shutdown, don't mind + return; + } + this.setDownAt = Date.now(); + if (this.pool.coreConnectionsLength > 0) { + // According to the distance, there should be connections open to it => issue a warning + this.log('warning', `Host ${this.address} considered as DOWN. Reconnection delay ${this.reconnectionDelay}ms.`); + } + else { + this.log('info', `Host ${this.address} considered as DOWN.`); + } + this.emit('down'); + this._checkPoolState(); + } + + /** + * Marks this host as available for querying. + * @param {Boolean} [clearReconnection] + * @internal + * @ignore + */ + setUp(clearReconnection) { + if (!this.setDownAt) { + //The host is already marked as UP + return; + } + this.log('info', `Setting host ${this.address} as UP`); + this.setDownAt = 0; + this.isUpSince = Date.now(); + //if it was unhealthy and now it is not, lets reset the reconnection schedule. + this.reconnectionSchedule = this.options.policies.reconnection.newSchedule(); + if (clearReconnection) { + this.pool.clearNewConnectionAttempt(); + } + this.emit('up'); + } + + /** + * Resets the reconnectionSchedule and tries to issue a reconnection immediately. + * @internal + * @ignore + */ + checkIsUp() { + if (this.isUp()) { + return; + } + this.reconnectionSchedule = this.options.policies.reconnection.newSchedule(); + this.reconnectionDelay = 0; + this.pool.attemptNewConnectionImmediate(); + } + + /** + * @param {Boolean} [waitForPending] When true, it waits for in-flight operations to be finish before closing the + * connections. + * @returns {Promise} + * @internal + * @ignore + */ + shutdown(waitForPending) { + if (this._healthResponseCountTimer) { + clearInterval(this._healthResponseCountTimer); + } + if (waitForPending) { + this.pool.drainAndShutdown(); + // Gracefully draining and shutting down the pool is being done in the background + return Promise.resolve(); + } + return this.pool.shutdown(); + } + + /** + * Determines if the node is UP now (seen as UP by the driver). + * @returns {boolean} + */ + isUp() { + return !this.setDownAt; + } + + /** + * Determines if the host can be considered as UP. + * Deprecated: Use {@link Host#isUp()} instead. + * @returns {boolean} + */ + canBeConsideredAsUp() { + const self = this; + function hasTimePassed() { + return new Date().getTime() - self.setDownAt >= self.reconnectionDelay; + } + return !this.setDownAt || hasTimePassed(); + } + + /** + * Sets the distance of the host relative to the client using the load balancing policy. + * @param {Number} distance + * @internal + * @ignore + */ + setDistance(distance) { + const previousDistance = this._distance; + this._distance = distance || types.distance.local; + if (this.options.pooling.coreConnectionsPerHost) { + this.pool.coreConnectionsLength = this.options.pooling.coreConnectionsPerHost[this._distance] || 0; + } + else { + this.pool.coreConnectionsLength = 1; + } + if (this._distance === previousDistance) { + return this._distance; + } + if (this._healthResponseCountTimer) { + clearInterval(this._healthResponseCountTimer); + } + if (this._distance === types.distance.ignored) { + // this host was local/remote and now must be ignored + this.emit('ignore'); + this.pool.drainAndShutdown(); + } + else { + if (!this.isUp()) { + this.checkIsUp(); + } + // Reset the health check timer + this._healthResponseCountTimer = setInterval(() => { + this._healthResponseCounter = this.pool.getAndResetResponseCounter(); + }, healthResponseCountInterval); + } + return this._distance; + } + + /** + * Changes the protocol version of a given host + * @param {Number} value + * @internal + * @ignore + */ + setProtocolVersion(value) { + this.pool.protocolVersion = value; + } + + /** + * Gets the least busy connection from the pool. + * @param {Connection} [previousConnection] When provided, the pool should attempt to obtain a different connection. + * @returns {Connection!} + * @throws {Error} + * @throws {BusyConnectionError} + * @internal + * @ignore + */ + borrowConnection(previousConnection) { + return this.pool.borrowConnection(previousConnection); + } + + /** + * Creates all the connection in the pool. + * @param {string} keyspace + * @internal + * @ignore + */ + warmupPool(keyspace) { + return this.pool.warmup(keyspace); + } + + /** + * Starts creating the pool in the background. + * @internal + * @ignore + */ + initializePool() { + this.pool.increaseSize(); + } + /** + * Gets any connection that is already opened or null if not found. + * @returns {Connection} + * @internal + * @ignore + */ + getActiveConnection() { + if (!this.isUp() || !this.pool.connections.length) { + return null; + } + return this.pool.connections[0]; + } + + /** + * Internal method to get the amount of responses dequeued in the last interval (between 200ms and 400ms) on all + * connections to the host. + * @returns {Number} + * @internal + * @ignore + */ + getResponseCount() { + // Last interval plus the current count + return this._healthResponseCounter + this.pool.responseCounter; + } + + /** + * Checks the health of a connection in the pool + * @param {Connection} connection + * @internal + * @ignore + */ + checkHealth(connection) { + if (connection.timedOutOperations <= this.options.socketOptions.defunctReadTimeoutThreshold) { + return; + } + this.removeFromPool(connection); + } + + /** + * @param {Connection} connection + * @internal + * @ignore + */ + removeFromPool(connection) { + this.pool.remove(connection); + this._checkPoolState(); + } + + /** + * Internal method that gets the amount of in-flight requests on all connections to the host. + * @internal + * @ignore + */ + getInFlight() { + return this.pool.getInFlight(); + } + + /** + * Validates that the internal state of the connection pool. + * If the pool size is smaller than expected, schedule a new connection attempt. + * If the amount of connections is 0 for not ignored hosts, the host must be down. + * @private + */ + _checkPoolState() { + if (this.pool.isClosing()) { + return; + } + if (this.pool.connections.length < this.pool.coreConnectionsLength) { + // the pool needs to grow / reconnect + if (!this.pool.hasScheduledNewConnection()) { + this.reconnectionDelay = this.reconnectionSchedule.next().value; + this.pool.scheduleNewConnectionAttempt(this.reconnectionDelay); + } + } + const shouldHaveConnections = this._distance !== types.distance.ignored && this.pool.coreConnectionsLength > 0; + if (shouldHaveConnections && this.pool.connections.length === 0) { + // Mark as DOWN, if its UP + this.setDown(); + } + } + + /** + * Executed after an scheduled new connection attempt finished + * @private + */ + async _onNewConnectionOpen(err) { + if (err) { + this._checkPoolState(); + return; + } + if (!this.isUp() && this.options.rePrepareOnUp) { + this.log('info', `Re-preparing all queries on host ${this.address} before setting it as UP`); + const allPrepared = this._metadata.getAllPrepared(); + try { + await PrepareHandler.prepareAllQueries(this, allPrepared); + } + catch (err) { + this.log('warning', `Failed re-preparing on host ${this.address}: ${err}`, err); + } + } + this.setUp(); + this.pool.increaseSize(); + } + + /** + * Returns an array containing the Cassandra Version as an Array of Numbers having the major version in the first + * position. + * @returns {Array.} + */ + getCassandraVersion() { + if (!this.cassandraVersion) { + return utils.emptyArray; + } + return this.cassandraVersion.split('-')[0].split('.').map(x => parseInt(x, 10)); + } + + /** + * Gets the DSE version of the host as an Array, containing the major version in the first position. + * In case the cluster is not a DSE cluster, it returns an empty Array. + * @returns {Array} + */ + getDseVersion() { + if (!this.dseVersion) { + return utils.emptyArray; + } + return this.dseVersion.split('-')[0].split('.').map(x => parseInt(x, 10)); + } +} + +/** + * Represents an associative-array of {@link Host hosts} that can be iterated. + * It creates an internal copy when adding or removing, making it safe to iterate using the values() + * method within async operations. + * @extends events.EventEmitter + * @constructor + */ +class HostMap extends events.EventEmitter{ + constructor() { + super(); + + this._items = new Map(); + this._values = null; + + Object.defineProperty(this, 'length', { get: () => this.values().length, enumerable: true }); + + /** + * Emitted when a host is added to the map + * @event HostMap#add + */ + /** + * Emitted when a host is removed from the map + * @event HostMap#remove + */ + } + + /** + * Executes a provided function once per map element. + * @param callback + */ + forEach(callback) { + const items = this._items; + for (const [ key, value ] of items) { + callback(value, key); + } + } + + /** + * Gets a {@link Host host} by key or undefined if not found. + * @param {String} key + * @returns {Host} + */ + get(key) { + return this._items.get(key); + } + + /** + * Returns an array of host addresses. + * @returns {Array.} + */ + keys() { + return Array.from(this._items.keys()); + } + + /** + * Removes an item from the map. + * @param {String} key The key of the host + * @fires HostMap#remove + */ + remove(key) { + const value = this._items.get(key); + if (value === undefined) { + return; + } + + // Clear cache + this._values = null; + + // Copy the values + const copy = new Map(this._items); + copy.delete(key); + + this._items = copy; + this.emit('remove', value); + } + + /** + * Removes multiple hosts from the map. + * @param {Array.} keys + * @fires HostMap#remove + */ + removeMultiple(keys) { + // Clear value cache + this._values = null; + + // Copy the values + const copy = new Map(this._items); + const removedHosts = []; + + for (const key of keys) { + const h = copy.get(key); + + if (!h) { + continue; + } + + removedHosts.push(h); + copy.delete(key); + } + + this._items = copy; + removedHosts.forEach(h => this.emit('remove', h)); + } + + /** + * Adds a new item to the map. + * @param {String} key The key of the host + * @param {Host} value The host to be added + * @fires HostMap#remove + * @fires HostMap#add + */ + set(key, value) { + // Clear values cache + this._values = null; + + const originalValue = this._items.get(key); + if (originalValue) { + //The internal structure does not change + this._items.set(key, value); + //emit a remove followed by a add + this.emit('remove', originalValue); + this.emit('add', value); + return; + } + + // Copy the values + const copy = new Map(this._items); + copy.set(key, value); + this._items = copy; + this.emit('add', value); + return value; + } + + /** + * Returns a shallow copy of a portion of the items into a new array object. + * Backward-compatibility. + * @param {Number} [begin] + * @param {Number} [end] + * @returns {Array} + * @ignore + */ + slice(begin, end) { + if (!begin && !end) { + // Avoid making a copy of the copy + return this.values(); + } + + return this.values().slice(begin || 0, end); + } + + /** + * Deprecated: Use set() instead. + * @ignore + * @deprecated + */ + push(k, v) { + this.set(k, v); + } + + /** + * Returns a shallow copy of the values of the map. + * @returns {Array.} + */ + values() { + if (!this._values) { + // Cache the values + this._values = Object.freeze(Array.from(this._items.values())); + } + + return this._values; + } + + /** + * Removes all items from the map. + * @returns {Array.} The previous items + */ + clear() { + const previousItems = this.values(); + + // Clear cache + this._values = null; + + // Clear items + this._items = new Map(); + + // Emit events + previousItems.forEach(h => this.emit('remove', h)); + + return previousItems; + } + + inspect() { + return this._items; + } + + toJSON() { + // Node.js 10 and below don't support Object.fromEntries() + if (Object.fromEntries) { + return Object.fromEntries(this._items); + } + + const obj = {}; + for (const [ key, value ] of this._items) { + obj[key] = value; + } + + return obj; + } +} + +module.exports = { + Host, + HostMap +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/insights-client.js b/node_modules/cassandra-driver/lib/insights-client.js new file mode 100644 index 0000000..4c9207d --- /dev/null +++ b/node_modules/cassandra-driver/lib/insights-client.js @@ -0,0 +1,492 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const os = require('os'); +const path = require('path'); +const fs = require('fs'); +const utils = require('./utils'); +const promiseUtils = require('./promise-utils'); +const types = require('./types'); +const requests = require('./requests'); +const { ExecutionOptions } = require('./execution-options'); +const packageInfo = require('../package.json'); +const VersionNumber = require('./types/version-number'); +const { NoAuthProvider } = require('./auth'); + +let kerberosModule; + +try { + // eslint-disable-next-line + kerberosModule = require('kerberos'); +} +catch (err) { + // Kerberos is an optional dependency +} + +const minDse6Version = new VersionNumber(6, 0, 5); +const minDse51Version = new VersionNumber(5, 1, 13); +const dse600Version = new VersionNumber(6, 0, 0); +const rpc = "CALL InsightsRpc.reportInsight(?)"; +const maxStatusErrorLogs = 5; + +/** + * Contains methods and functionality to send events to DSE Insights. + */ +class InsightsClient { + + /** + * Creates a new instance of the {@link InsightsClient} using the driver {@link Client}. + * @param {Client} client + * @param {Object} [options] + * @param {Number} [options.statusEventDelay] + * @param {Function} [options.errorCallback] + */ + constructor(client, options) { + this._client = client; + this._sessionId = types.Uuid.random().toString(); + this._enabled = false; + this._closed = false; + this._firstTimeout = null; + this._recurrentTimeout = null; + this._statusErrorLogs = 0; + + options = options || {}; + + this._statusEventDelay = options.statusEventDelay || 300000; + this._errorCallback = options.errorCallback || utils.noop; + } + + /** + * Initializes the insights client in the background by sending the startup event and scheduling status events at + * regular intervals. + * @returns {undefined} + */ + init() { + this._enabled = this._client.options.monitorReporting.enabled && this._dseSupportsInsights(); + if (!this._enabled) { + return; + } + + promiseUtils.toBackground(this._init()); + } + + async _init() { + try { + await this._sendStartupEvent(); + + if (this._closed) { + // The client was shutdown + return; + } + + // Send the status event the first time with a delay containing some random portion + // Initial delay should be statusEventDelay - (0 to 10%) + const firstDelay = Math.floor(this._statusEventDelay - 0.1 * this._statusEventDelay * Math.random()); + // Schedule the first timer + this._firstTimeout = setTimeout(() => { + // Send the first status event, the promise will never be rejected + this._sendStatusEvent(); + // The following status events are sent at regular intervals + this._recurrentTimeout = setInterval(() => this._sendStatusEvent(), this._statusEventDelay); + }, firstDelay); + } catch (err) { + if (this._closed) { + // Sending failed because the Client was shutdown + return; + } + // We shouldn't try to recover + this._client.log('verbose', `Insights startup message could not be sent (${err})`, err); + this._errorCallback(err); + } + } + + /** + * Sends the startup event. + * @returns {Promise} + * @private + */ + async _sendStartupEvent() { + const message = await this._getStartupMessage(); + const request = new requests.QueryRequest(rpc, [message], ExecutionOptions.empty()); + await this._client.controlConnection.query(request, false); + } + + /** + * Sends the status event. + * @returns {Promise} A promise that is never rejected. + * @private + */ + async _sendStatusEvent() { + const request = new requests.QueryRequest(rpc, [ this._getStatusEvent() ], ExecutionOptions.empty()); + + try { + await this._client.controlConnection.query(request, false); + } catch (err) { + if (this._closed) { + // Sending failed because the Client was shutdown + return; + } + + if (this._statusErrorLogs < maxStatusErrorLogs) { + this._client.log('warning', `Insights status message could not be sent (${err})`, err); + this._statusErrorLogs++; + } + + this._errorCallback(err); + } + } + + /** + * Validates the minimum server version for all nodes in the cluster. + * @private + */ + _dseSupportsInsights() { + if (this._client.hosts.length === 0) { + return false; + } + + return this._client.hosts.values().reduce((acc, host) => { + if (!acc) { + return acc; + } + + const versionArr = host.getDseVersion(); + + if (versionArr.length === 0) { + return false; + } + + const version = new VersionNumber(...versionArr); + + return version.compare(minDse6Version) >= 0 || + (version.compare(dse600Version) < 0 && version.compare(minDse51Version) >= 0); + + }, true); + } + + /** + * @returns {Promise} Returns a json string with the startup message. + * @private + */ + async _getStartupMessage() { + const cc = this._client.controlConnection; + const options = this._client.options; + + + const appInfo = await this._getAppInfo(options); + const message = { + metadata: { + name: 'driver.startup', + insightMappingId: 'v1', + insightType: 'EVENT', + timestamp: Date.now(), + tags: { language: 'nodejs' } + }, + data: { + driverName: packageInfo.description, + driverVersion: packageInfo.version, + clientId: options.id, + sessionId: this._sessionId, + applicationName: appInfo.applicationName, + applicationVersion: appInfo.applicationVersion, + applicationNameWasGenerated: appInfo.applicationNameWasGenerated, + contactPoints: mapToObject(cc.getResolvedContactPoints()), + dataCenters: this._getDataCenters(), + initialControlConnection: cc.host ? cc.host.address : undefined, + protocolVersion: cc.protocolVersion, + localAddress: cc.getLocalAddress(), + hostName: os.hostname(), + executionProfiles: getExecutionProfiles(this._client), + poolSizeByHostDistance: { + local: options.pooling.coreConnectionsPerHost[types.distance.local], + remote: options.pooling.coreConnectionsPerHost[types.distance.remote] + }, + heartbeatInterval: options.pooling.heartBeatInterval, + compression: 'NONE', + reconnectionPolicy: getPolicyInfo(options.policies.reconnection), + ssl: { + enabled: !!options.sslOptions, + certValidation: options.sslOptions ? !!options.sslOptions.rejectUnauthorized : undefined + }, + authProvider: { + type: !(options.authProvider instanceof NoAuthProvider) ? getConstructor(options.authProvider) : undefined, + }, + otherOptions: { + coalescingThreshold: options.socketOptions.coalescingThreshold, + }, + platformInfo: { + os: { + name: os.platform(), + version: os.release(), + arch: os.arch() + }, + cpus: { + length: os.cpus().length, + model: os.cpus()[0].model + }, + runtime: { + node: process.versions['node'], + v8: process.versions['v8'], + uv: process.versions['uv'], + openssl: process.versions['openssl'], + kerberos: kerberosModule ? kerberosModule.version : undefined + } + }, + configAntiPatterns: this._getConfigAntiPatterns(), + periodicStatusInterval: Math.floor(this._statusEventDelay / 1000) + } + }; + + return JSON.stringify(message); + } + + _getConfigAntiPatterns() { + const options = this._client.options; + const result = {}; + + if (options.sslOptions && !options.sslOptions.rejectUnauthorized) { + result.sslWithoutCertValidation = + 'Client-to-node encryption is enabled but server certificate validation is disabled'; + } + + return result; + } + + /** + * Gets an array of data centers the driver connects to. + * Whether the driver connects to a certain host is determined by the host distance (local and remote hosts) + * and the pooling options (whether connection length for remote hosts is greater than 0). + * @returns {Array} + * @private + */ + _getDataCenters() { + const remoteConnectionsLength = this._client.options.pooling.coreConnectionsPerHost[types.distance.remote]; + const dataCenters = new Set(); + + this._client.hosts.values().forEach(h => { + const distance = this._client.profileManager.getDistance(h); + if (distance === types.distance.local || (distance === types.distance.remote && remoteConnectionsLength > 0)) { + dataCenters.add(h.datacenter); + } + }); + + return Array.from(dataCenters); + } + + /** + * Tries to obtain the application name and version from + * @param {DseClientOptions} options + * @returns {Promise} + * @private + */ + async _getAppInfo(options) { + if (typeof options.applicationName === 'string') { + return Promise.resolve({ + applicationName: options.applicationName, + applicationVersion: options.applicationVersion, + applicationNameWasGenerated: false + }); + } + + let readPromise = Promise.resolve(); + + if (require.main && require.main.filename) { + const packageInfoPath = path.dirname(require.main.filename); + readPromise = this._readPackageInfoFile(packageInfoPath); + } + + const text = await readPromise; + let applicationName = 'Default Node.js Application'; + let applicationVersion; + + if (text) { + try { + const packageInfo = JSON.parse(text); + if (packageInfo.name) { + applicationName = packageInfo.name; + applicationVersion = packageInfo.version; + } + } + catch (err) { + // The package.json file could not be parsed + // Use the default name + } + } + + return { + applicationName, + applicationVersion, + applicationNameWasGenerated: true + }; + } + + /** + * @private + * @returns {Promise} A Promise that will never be rejected + */ + _readPackageInfoFile(packageInfoPath) { + return new Promise(resolve => { + fs.readFile(path.join(packageInfoPath, 'package.json'), 'utf8', (err, data) => { + // Swallow error + resolve(data); + }); + }); + } + + /** + * @returns {String} Returns a json string with the startup message. + * @private + */ + _getStatusEvent() { + const cc = this._client.controlConnection; + const options = this._client.options; + const state = this._client.getState(); + const connectedNodes = {}; + + state.getConnectedHosts().forEach(h => { + connectedNodes[h.address] = { + connections: state.getOpenConnections(h), + inFlightQueries: state.getInFlightQueries(h) + }; + }); + + const message = { + metadata: { + name: 'driver.status', + insightMappingId: 'v1', + insightType: 'EVENT', + timestamp: Date.now(), + tags: { language: 'nodejs' } + }, + data: { + clientId: options.id, + sessionId: this._sessionId, + controlConnection: cc.host ? cc.host.address : undefined, + connectedNodes + } + }; + + return JSON.stringify(message); + } + + /** + * Cleans any timer used internally and sets the client as closed. + */ + shutdown() { + if (!this._enabled) { + return; + } + + this._closed = true; + + if (this._firstTimeout !== null) { + clearTimeout(this._firstTimeout); + } + + if (this._recurrentTimeout !== null) { + clearInterval(this._recurrentTimeout); + } + } +} + +module.exports = InsightsClient; + +function mapToObject(map) { + const result = {}; + map.forEach((value, key) => result[key] = value); + return result; +} + +function getPolicyInfo(policy) { + if (!policy) { + return undefined; + } + + const options = policy.getOptions && policy.getOptions(); + + return { + type: policy.constructor.name, + options: (options instanceof Map) ? mapToObject(options) : utils.emptyObject + }; +} + +function getConsistencyString(c) { + if (typeof c !== 'number') { + return undefined; + } + + return types.consistencyToString[c]; +} + +function getConstructor(instance) { + return instance ? instance.constructor.name : undefined; +} + +function getExecutionProfiles(client) { + const executionProfiles = {}; + + const defaultProfile = client.profileManager.getDefault(); + setExecutionProfileProperties(client, executionProfiles, defaultProfile, defaultProfile); + + client.profileManager.getAll() + .filter(p => p !== defaultProfile) + .forEach(profile => setExecutionProfileProperties(client, executionProfiles, profile, defaultProfile)); + + return executionProfiles; +} + +function setExecutionProfileProperties(client, parent, profile, defaultProfile) { + const output = parent[profile.name] = {}; + setExecutionProfileItem(output, profile, defaultProfile, 'readTimeout'); + setExecutionProfileItem(output, profile, defaultProfile, 'loadBalancing', getPolicyInfo); + setExecutionProfileItem(output, profile, defaultProfile, 'retry', getPolicyInfo); + setExecutionProfileItem(output, profile, defaultProfile, 'consistency', getConsistencyString); + setExecutionProfileItem(output, profile, defaultProfile, 'serialConsistency', getConsistencyString); + + if (profile === defaultProfile) { + // Speculative execution policy is included in the profiles as some drivers support + // different spec exec policy per profile, in this case is fixed for all profiles + output.speculativeExecution = getPolicyInfo(client.options.policies.speculativeExecution); + } + + if (profile.graphOptions) { + output.graphOptions = {}; + const defaultGraphOptions = defaultProfile.graphOptions || utils.emptyObject; + setExecutionProfileItem(output.graphOptions, profile.graphOptions, defaultGraphOptions, 'language'); + setExecutionProfileItem(output.graphOptions, profile.graphOptions, defaultGraphOptions, 'name'); + setExecutionProfileItem(output.graphOptions, profile.graphOptions, defaultGraphOptions, 'readConsistency', + getConsistencyString); + setExecutionProfileItem(output.graphOptions, profile.graphOptions, defaultGraphOptions, 'source'); + setExecutionProfileItem(output.graphOptions, profile.graphOptions, defaultGraphOptions, 'writeConsistency', + getConsistencyString); + + if (Object.keys(output.graphOptions).length === 0) { + // Properties that are undefined will not be included in the JSON + output.graphOptions = undefined; + } + } +} + +function setExecutionProfileItem(output, profile, defaultProfile, prop, valueGetter) { + const value = profile[prop]; + valueGetter = valueGetter || (x => x); + + if ((profile === defaultProfile && value !== undefined) || value !== defaultProfile[prop]) { + output[prop] = valueGetter(value); + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/cache.js b/node_modules/cassandra-driver/lib/mapping/cache.js new file mode 100644 index 0000000..ffc4777 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/cache.js @@ -0,0 +1,207 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const qModule = require('./q'); +const QueryOperator = qModule.QueryOperator; +const QueryAssignment = qModule.QueryAssignment; + +/** + * Provides utility methods for obtaining a caching keys based on the specifics of the Mapper methods. + * @ignore + */ +class Cache { + /** + * Gets an iterator of keys to uniquely identify a document shape for a select query. + * @param {Array} docKeys + * @param {Object} doc + * @param {{fields, limit, orderBy}} docInfo + * @returns {Iterator} + */ + static *getSelectKey(docKeys, doc, docInfo) { + yield* Cache._yieldKeyAndOperators(docKeys, doc); + + yield* Cache._getSelectDocInfo(docInfo); + } + /** + * Gets an iterator of keys to uniquely identify a shape for a select all query. + * @param {{fields, limit, orderBy}} docInfo + * @returns {Iterator} + */ + static *getSelectAllKey(docInfo) { + yield 'root'; + + yield* Cache._getSelectDocInfo(docInfo); + } + + /** + * Gets the parts of the key for a select query related to the docInfo. + * @param {{fields, limit, orderBy}} docInfo + * @private + */ + static *_getSelectDocInfo(docInfo) { + if (docInfo) { + if (docInfo.fields && docInfo.fields.length > 0) { + // Use a separator from properties + yield '|f|'; + yield* docInfo.fields; + } + + if (typeof docInfo.limit === 'number') { + yield '|l|'; + } + + if (docInfo.orderBy) { + yield '|o|'; + + // orderBy is uses property names as keys and 'asc'/'desc' as values + const keys = Object.keys(docInfo.orderBy); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + yield key; + yield docInfo.orderBy[key]; + } + } + } + } + + /** + * Gets an iterator of keys to uniquely identify a document shape for an insert query. + * @param {Array} docKeys + * @param {{ifNotExists, ttl, fields}} docInfo + * @returns {Iterator} + */ + static *getInsertKey(docKeys, docInfo) { + // No operator supported on INSERT values + yield* docKeys; + + if (docInfo) { + if (docInfo.fields && docInfo.fields.length > 0) { + // Use a separator from properties + yield '|f|'; + yield* docInfo.fields; + } + + if (typeof docInfo.ttl === 'number') { + yield '|t|'; + } + + if (docInfo.ifNotExists) { + yield '|e|'; + } + } + } + + /** + * Gets an iterator of keys to uniquely identify a document shape for an UPDATE query. + * @param {Array} docKeys + * @param {Object} doc + * @param {{ifExists, when, ttl, fields}} docInfo + */ + static *getUpdateKey(docKeys, doc, docInfo) { + yield* Cache._yieldKeyAndAllQs(docKeys, doc); + + if (docInfo) { + if (docInfo.fields && docInfo.fields.length > 0) { + // Use a separator from properties + yield '|f|'; + yield* docInfo.fields; + } + + if (typeof docInfo.ttl === 'number') { + yield '|t|'; + } + + if (docInfo.ifExists) { + yield '|e|'; + } + + if (docInfo.when) { + yield* Cache._yieldKeyAndOperators(Object.keys(docInfo.when), docInfo.when); + } + } + } + + /** + * Gets an iterator of keys to uniquely identify a document shape for a DELETE query. + * @param {Array} docKeys + * @param {Object} doc + * @param {{ifExists, when, fields, deleteOnlyColumns}} docInfo + * @returns {Iterator} + */ + static *getRemoveKey(docKeys, doc, docInfo) { + yield* Cache._yieldKeyAndOperators(docKeys, doc); + + if (docInfo) { + if (docInfo.fields && docInfo.fields.length > 0) { + // Use a separator from properties + yield '|f|'; + yield* docInfo.fields; + } + + if (docInfo.ifExists) { + yield '|e|'; + } + + if (docInfo.deleteOnlyColumns) { + yield '|dc|'; + } + + if (docInfo.when) { + yield* Cache._yieldKeyAndOperators(Object.keys(docInfo.when), docInfo.when); + } + } + } + + static *_yieldKeyAndOperators(keys, obj) { + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + yield key; + yield* Cache._yieldOperators(obj[key]); + } + } + + static *_yieldOperators(value) { + if (value !== null && value !== undefined && value instanceof QueryOperator) { + yield value.key; + if (value.hasChildValues) { + yield* Cache._yieldOperators(value.value[0]); + yield '|/|'; + yield* Cache._yieldOperators(value.value[1]); + } + } + } + + static *_yieldKeyAndAllQs(keys, obj) { + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + yield key; + const value = obj[key]; + if (value !== null && value !== undefined) { + if (value instanceof QueryOperator) { + yield* Cache._yieldOperators(value); + } + else if (value instanceof QueryAssignment) { + yield value.sign; + yield value.inverted; + } + } + } + } +} + +module.exports = Cache; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/doc-info-adapter.js b/node_modules/cassandra-driver/lib/mapping/doc-info-adapter.js new file mode 100644 index 0000000..32deead --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/doc-info-adapter.js @@ -0,0 +1,162 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const errors = require('../errors'); +const utils = require('../utils'); + +/** + * Provides utility methods to adapt and map user provided docInfo and executionOptions to a predictable object format. + * @ignore + */ +class DocInfoAdapter { + /** + * Returns an Array where each item contains the property name, the column name and the property value (to obtain + * the operator). + * When docInfo.fields is specified, it uses that array to obtain the information. + * @param {Array} docKeys + * @param {null|{fields}} docInfo + * @param {Object} doc + * @param {ModelMappingInfo} mappingInfo + * @returns {Array} + */ + static getPropertiesInfo(docKeys, docInfo, doc, mappingInfo) { + let propertyKeys = docKeys; + if (docInfo && docInfo.fields && docInfo.fields.length > 0) { + propertyKeys = docInfo.fields; + } + + return propertyKeys.map(propertyName => ({ + propertyName, + columnName: mappingInfo.getColumnName(propertyName), + value: doc[propertyName], + fromModel: mappingInfo.getFromModelFn(propertyName) + })); + } + + /** + * @param {{orderBy}} docInfo + * @param {ModelMappingInfo} mappingInfo + * @returns {Array} + */ + static adaptOrderBy(docInfo, mappingInfo){ + if (!docInfo || !docInfo.orderBy) { + return utils.emptyArray; + } + return Object.keys(docInfo.orderBy).map(key => { + const value = docInfo.orderBy[key]; + const ordering = typeof value === 'string' ? value.toUpperCase() : value; + if (ordering !== 'ASC' && ordering !== 'DESC') { + throw new errors.ArgumentError('Order must be either "ASC" or "DESC", obtained: ' + value); + } + return [ mappingInfo.getColumnName(key), ordering ]; + }); + } + + /** + * Returns the QueryOptions for an INSERT/UPDATE/DELETE statement. + * @param {Object|String|undefined} executionOptions + * @param {Boolean} isIdempotent + */ + static adaptOptions(executionOptions, isIdempotent) { + const options = { + prepare: true, + executionProfile: undefined, + timestamp: undefined, + isIdempotent: isIdempotent + }; + + if (typeof executionOptions === 'string') { + options.executionProfile = executionOptions; + } + else if (executionOptions !== null && executionOptions !== undefined) { + options.executionProfile = executionOptions.executionProfile; + options.timestamp = executionOptions.timestamp; + + if (executionOptions.isIdempotent !== undefined) { + options.isIdempotent = executionOptions.isIdempotent; + } + } + return options; + } + + /** + * Returns the QueryOptions for a SELECT statement. + * @param {Object|String|undefined} executionOptions + * @param {Boolean} [overrideIdempotency] + */ + static adaptAllOptions(executionOptions, overrideIdempotency) { + const options = { + prepare: true, + executionProfile: undefined, + fetchSize: undefined, + pageState: undefined, + timestamp: undefined, + isIdempotent: undefined + }; + + if (typeof executionOptions === 'string') { + options.executionProfile = executionOptions; + } + else if (executionOptions !== null && executionOptions !== undefined) { + options.executionProfile = executionOptions.executionProfile; + options.fetchSize = executionOptions.fetchSize; + options.pageState = executionOptions.pageState; + options.timestamp = executionOptions.timestamp; + options.isIdempotent = executionOptions.isIdempotent; + } + + if (overrideIdempotency) { + options.isIdempotent = true; + } + + return options; + } + + /** + * Returns the QueryOptions for a batch statement. + * @param {Object|String|undefined} executionOptions + * @param {Boolean} isIdempotent + * @param {Boolean} isCounter + */ + static adaptBatchOptions(executionOptions, isIdempotent, isCounter) { + const options = { + prepare: true, + executionProfile: undefined, + timestamp: undefined, + logged: undefined, + isIdempotent: isIdempotent, + counter: isCounter + }; + + if (typeof executionOptions === 'string') { + options.executionProfile = executionOptions; + } + else if (executionOptions !== null && executionOptions !== undefined) { + options.executionProfile = executionOptions.executionProfile; + options.timestamp = executionOptions.timestamp; + options.logged = executionOptions.logged !== false; + + if (executionOptions.isIdempotent !== undefined) { + options.isIdempotent = executionOptions.isIdempotent; + } + } + return options; + } +} + +module.exports = DocInfoAdapter; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/index.d.ts b/node_modules/cassandra-driver/lib/mapping/index.d.ts new file mode 100644 index 0000000..694e789 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/index.d.ts @@ -0,0 +1,189 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { types } from '../types'; +import { Client } from '../../'; +import Long = types.Long; + +export namespace mapping { + interface TableMappings { + getColumnName(propName: string): string; + + getPropertyName(columnName: string): string; + + newObjectInstance(): any; + } + + class DefaultTableMappings implements TableMappings { + getColumnName(propName: string): string; + + getPropertyName(columnName: string): string; + + newObjectInstance(): any; + } + + class UnderscoreCqlToCamelCaseMappings implements TableMappings { + getColumnName(propName: string): string; + + getPropertyName(columnName: string): string; + + newObjectInstance(): any; + } + + interface Result extends Iterator { + wasApplied(): boolean; + + first(): T | null; + + forEach(callback: (currentValue: T, index: number) => void, thisArg?: any): void; + + toArray(): T[]; + } + + type MappingExecutionOptions = { + executionProfile?: string; + isIdempotent?: boolean; + logged?: boolean; + timestamp?: number | Long; + fetchSize?: number; + pageState?: number; + } + + interface ModelTables { + name: string; + isView: boolean; + } + + class Mapper { + constructor(client: Client, options?: MappingOptions); + + batch(items: ModelBatchItem[], executionOptions?: string | MappingExecutionOptions): Promise; + + forModel(name: string): ModelMapper; + } + + type MappingOptions = { + models: { [key: string]: ModelOptions }; + } + + type FindDocInfo = { + fields?: string[]; + orderBy?: { [key: string]: string }; + limit?: number; + } + + type InsertDocInfo = { + fields?: string[]; + ttl?: number; + ifNotExists?: boolean; + } + + type UpdateDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { [key: string]: any }; + orderBy?: { [key: string]: string }; + limit?: number; + deleteOnlyColumns?: boolean; + } + + type RemoveDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { [key: string]: any }; + deleteOnlyColumns?: boolean; + } + + type ModelOptions = { + tables?: string[] | ModelTables[]; + mappings?: TableMappings; + columns?: { [key: string]: string|ModelColumnOptions }; + keyspace?: string; + } + + type ModelColumnOptions = { + name: string; + toModel?: (columnValue: any) => any; + fromModel?: (modelValue: any) => any; + }; + + interface ModelBatchItem { + + } + + interface ModelBatchMapper { + insert(doc: any, docInfo?: InsertDocInfo): ModelBatchItem; + + remove(doc: any, docInfo?: RemoveDocInfo): ModelBatchItem; + + update(doc: any, docInfo?: UpdateDocInfo): ModelBatchItem; + } + + interface ModelMapper { + name: string; + batching: ModelBatchMapper; + + get(doc: { [key: string]: any }, docInfo?: { fields?: string[] }, executionOptions?: string | MappingExecutionOptions): Promise; + + find(doc: { [key: string]: any }, docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + + findAll(docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + + insert(doc: { [key: string]: any }, docInfo?: InsertDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + + update(doc: { [key: string]: any }, docInfo?: UpdateDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + + remove(doc: { [key: string]: any }, docInfo?: RemoveDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + + mapWithQuery( + query: string, + paramsHandler: (doc: any) => any[], + executionOptions?: string | MappingExecutionOptions + ): (doc: any, executionOptions?: string | MappingExecutionOptions) => Promise>; + } + + namespace q { + interface QueryOperator { + + } + + function in_(arr: any): QueryOperator; + + function gt(value: any): QueryOperator; + + function gte(value: any): QueryOperator; + + function lt(value: any): QueryOperator; + + function lte(value: any): QueryOperator; + + function notEq(value: any): QueryOperator; + + function and(condition1: any, condition2: any): QueryOperator; + + function incr(value: any): QueryOperator; + + function decr(value: any): QueryOperator; + + function append(value: any): QueryOperator; + + function prepend(value: any): QueryOperator; + + function remove(value: any): QueryOperator; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/index.js b/node_modules/cassandra-driver/lib/mapping/index.js new file mode 100644 index 0000000..e1de6c9 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/index.js @@ -0,0 +1,33 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * Module containing classes and fields related to the Mapper. + * @module mapping + */ + +exports.Mapper = require('./mapper'); +exports.ModelMapper = require('./model-mapper'); +exports.ModelBatchMapper = require('./model-batch-mapper'); +exports.ModelBatchItem = require('./model-batch-item').ModelBatchItem; +exports.Result = require('./result'); +const tableMappingsModule = require('./table-mappings'); +exports.TableMappings = tableMappingsModule.TableMappings; +exports.DefaultTableMappings = tableMappingsModule.DefaultTableMappings; +exports.UnderscoreCqlToCamelCaseMappings = tableMappingsModule.UnderscoreCqlToCamelCaseMappings; +exports.q = require('./q').q; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/mapper.js b/node_modules/cassandra-driver/lib/mapping/mapper.js new file mode 100644 index 0000000..329c87f --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/mapper.js @@ -0,0 +1,193 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const ModelMapper = require('./model-mapper'); +const MappingHandler = require('./mapping-handler'); +const DocInfoAdapter = require('./doc-info-adapter'); +const errors = require('../errors'); +const Result = require('./result'); +const ResultMapper = require('./result-mapper'); +const ModelMappingInfo = require('./model-mapping-info'); +const { ModelBatchItem } = require('./model-batch-item'); + +/** + * Represents an object mapper for Apache Cassandra and DataStax Enterprise. + * @alias module:mapping~Mapper + * @example Creating a Mapper instance with some options for the model 'User' + * const mappingOptions = { + * models: { + * 'User': { + * tables: ['users'], + * mappings: new UnderscoreCqlToCamelCaseMappings(), + * columnNames: { + * 'userid': 'id' + * } + * } + * } + * }; + * const mapper = new Mapper(client, mappingOptions); + * @example Creating a Mapper instance with other possible options for a model + * const mappingOptions = { + * models: { + * 'Video': { + * tables: ['videos', 'user_videos', 'latest_videos', { name: 'my_videos_view', isView: true }], + * mappings: new UnderscoreCqlToCamelCaseMappings(), + * columnNames: { + * 'videoid': 'id' + * }, + * keyspace: 'ks1' + * } + * } + * }; + * const mapper = new Mapper(client, mappingOptions); + */ +class Mapper { + /** + * Creates a new instance of Mapper. + * @param {Client} client The Client instance to use to execute the queries and fetch the metadata. + * @param {MappingOptions} [options] The [MappingOptions]{@link module:mapping~MappingOptions} containing the + * information of the models and table mappings. + */ + constructor(client, options) { + if (!client) { + throw new Error('client must be defined'); + } + + /** + * The Client instance used to create this Mapper instance. + * @type {Client} + */ + this.client = client; + + this._modelMappingInfos = ModelMappingInfo.parse(options, client.keyspace); + this._modelMappers = new Map(); + } + + /** + * Gets a [ModelMapper]{@link module:mapping~ModelMapper} that is able to map documents of a certain model into + * CQL rows. + * @param {String} name The name to identify the model. Note that the name is case-sensitive. + * @returns {ModelMapper} A [ModelMapper]{@link module:mapping~ModelMapper} instance. + */ + forModel(name) { + let modelMapper = this._modelMappers.get(name); + + if (modelMapper === undefined) { + let mappingInfo = this._modelMappingInfos.get(name); + + if (mappingInfo === undefined) { + if (!this.client.keyspace) { + throw new Error(`No mapping information found for model '${name}'. ` + + `Mapper is unable to create default mappings without setting the keyspace`); + } + + mappingInfo = ModelMappingInfo.createDefault(name, this.client.keyspace); + this.client.log('info', `Mapping information for model '${name}' not found, creating default mapping. ` + + `Keyspace: ${mappingInfo.keyspace}; Table: ${mappingInfo.tables[0].name}.`); + } else { + this.client.log('info', `Creating model mapper for '${name}' using mapping information. Keyspace: ${ + mappingInfo.keyspace}; Table${mappingInfo.tables.length > 1? 's' : ''}: ${ + mappingInfo.tables.map(t => t.name)}.`); + } + + modelMapper = new ModelMapper(name, new MappingHandler(this.client, mappingInfo)); + this._modelMappers.set(name, modelMapper); + } + + return modelMapper; + } + + /** + * Executes a batch of queries represented in the items. + * @param {Array} items + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * The mapper uses the generated queries to determine the default value. When an UPDATE is generated with a + * counter column or appending/prepending to a list column, the execution is marked as not idempotent. + *

+ *

+ * Additionally, the mapper uses the safest approach for queries with lightweight transactions (Compare and + * Set) by considering them as non-idempotent. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Boolean} [executionOptions.logged=true] Determines whether the batch should be written to the batchlog. + * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + * @returns {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result}. + */ + batch(items, executionOptions) { + if (!Array.isArray(items) || !(items.length > 0)) { + return Promise.reject( + new errors.ArgumentError('First parameter items should be an Array with 1 or more ModelBatchItem instances')); + } + + const queries = []; + let isIdempotent = true; + let isCounter; + + return Promise + .all(items + .map(item => { + if (!(item instanceof ModelBatchItem)) { + return Promise.reject(new Error( + 'Batch items must be instances of ModelBatchItem, use modelMapper.batching object to create each item')); + } + + return item.pushQueries(queries) + .then(options => { + // The batch is idempotent when all the queries contained are idempotent + isIdempotent = isIdempotent && options.isIdempotent; + + // Let it fail at server level when there is a mix of counter and normal mutations + isCounter = options.isCounter; + }); + })) + .then(() => + this.client.batch(queries, DocInfoAdapter.adaptBatchOptions(executionOptions, isIdempotent, isCounter))) + .then(rs => { + // Results should only be adapted when the batch contains LWT (single table) + const info = items[0].getMappingInfo(); + return new Result(rs, info, ResultMapper.getMutationAdapter(rs)); + }); + } +} + +/** + * Represents the mapping options. + * @typedef {Object} module:mapping~MappingOptions + * @property {Object} models An associative array containing the + * name of the model as key and the table and column information as value. + */ + +/** + * Represents a set of options that applies to a certain model. + * @typedef {Object} module:mapping~ModelOptions + * @property {Array|Array<{name, isView}>} tables An Array containing the name of the tables or An Array + * containing the name and isView property to describe the table. + * @property {TableMappings} mappings The TableMappings implementation instance that is used to convert from column + * names to property names and the other way around. + * @property {Object.} [columnNames] An associative array containing the name of the columns and + * properties that doesn't follow the convention defined in the TableMappings. + * @property {String} [keyspace] The name of the keyspace. Only mandatory when the Client is not using a keyspace. + */ + +module.exports = Mapper; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/mapping-handler.js b/node_modules/cassandra-driver/lib/mapping/mapping-handler.js new file mode 100644 index 0000000..f5bd831 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/mapping-handler.js @@ -0,0 +1,412 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const utils = require('../utils'); +const QueryGenerator = require('./query-generator'); +const ResultMapper = require('./result-mapper'); +const Result = require('./result'); +const Cache = require('./cache'); +const Tree = require('./tree'); +const ObjectSelector = require('./object-selector'); +const DocInfoAdapter = require('./doc-info-adapter'); + +const cacheHighWaterMark = 100; + +/** + * @ignore + */ +class MappingHandler { + /** + * @param {Client} client + * @param {ModelMappingInfo} mappingInfo + */ + constructor(client, mappingInfo) { + this._client = client; + this._cache = { + select: new Tree().on('add', length => this._validateCacheLength(length)), + selectAll: new Tree().on('add', length => this._validateCacheLength(length)), + insert: new Tree().on('add', length => this._validateCacheLength(length)), + update: new Tree().on('add', length => this._validateCacheLength(length)), + remove: new Tree().on('add', length => this._validateCacheLength(length)), + customQueries: new Map() + }; + + /** + * Gets the mapping information of the document. + * @type {ModelMappingInfo} + */ + this.info = mappingInfo; + } + + /** + * Gets a function to be used to execute SELECT the query using the document. + * @param {Object} doc + * @param {{fields, orderBy, limit}} docInfo + * @param {Boolean} allPKsDefined Determines whether all primary keys must be defined in the doc for the query to + * be valid. + * @return {Promise} + */ + getSelectExecutor(doc, docInfo, allPKsDefined) { + const docKeys = Object.keys(doc); + if (docKeys.length === 0) { + return Promise.reject(new Error('Expected object with keys')); + } + + const cacheKey = Cache.getSelectKey(docKeys, doc, docInfo); + // Cache the executor and the result mapper under the same key + // That way, those can get evicted together + const cacheItem = this._cache.select.getOrCreate(cacheKey, () => ({ executor: null, resultAdapter: null })); + + if (cacheItem.executor !== null) { + return Promise.resolve(cacheItem.executor); + } + + const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, null, doc, this.info); + const fieldsInfo = DocInfoAdapter.getPropertiesInfo(utils.emptyArray, docInfo, doc, this.info); + const orderByColumns = DocInfoAdapter.adaptOrderBy(docInfo, this.info); + const limit = docInfo && docInfo.limit; + + return this._client.connect() + .then(() => + ObjectSelector.getForSelect(this._client, this.info, allPKsDefined, propertiesInfo, fieldsInfo, orderByColumns)) + .then(tableName => { + // Part of the closure + const query = QueryGenerator.getSelect(tableName, this.info.keyspace, propertiesInfo, fieldsInfo, + orderByColumns, limit); + const paramsGetter = QueryGenerator.selectParamsGetter(propertiesInfo, limit); + const self = this; + + cacheItem.executor = function selectExecutor(doc, docInfo, executionOptions) { + return self._executeSelect(query, paramsGetter, doc, docInfo, executionOptions, cacheItem); + }; + + return cacheItem.executor; + }); + } + + getSelectAllExecutor(docInfo) { + const cacheKey = Cache.getSelectAllKey(docInfo); + const cacheItem = this._cache.selectAll.getOrCreate(cacheKey, () => ({ executor: null, resultAdapter: null })); + + if (cacheItem.executor !== null) { + return cacheItem.executor; + } + + const fieldsInfo = DocInfoAdapter.getPropertiesInfo(utils.emptyArray, docInfo, utils.emptyObject, this.info); + const orderByColumns = DocInfoAdapter.adaptOrderBy(docInfo, this.info); + const limit = docInfo && docInfo.limit; + + const tableName = ObjectSelector.getForSelectAll(this.info); + + // Part of the closure + const query = QueryGenerator.getSelect( + tableName, this.info.keyspace, utils.emptyArray, fieldsInfo, orderByColumns, limit); + const paramsGetter = QueryGenerator.selectParamsGetter(utils.emptyArray, limit); + const self = this; + + cacheItem.executor = function selectAllExecutor(docInfo, executionOptions) { + return self._executeSelect(query, paramsGetter, null, docInfo, executionOptions, cacheItem); + }; + + return cacheItem.executor; + } + + /** + * Executes a SELECT query and returns the adapted results. + * When a result adapter is not yet created, it gets a new one and caches it. + * @private + */ + _executeSelect(query, paramsGetter, doc, docInfo, executionOptions, cacheItem) { + const options = DocInfoAdapter.adaptAllOptions(executionOptions, true); + + return this._client.execute(query, paramsGetter(doc, docInfo, this.info), options) + .then(rs => { + if (cacheItem.resultAdapter === null) { + cacheItem.resultAdapter = ResultMapper.getSelectAdapter(this.info, rs); + } + return new Result(rs, this.info, cacheItem.resultAdapter); + }); + } + + /** + * Gets a function to be used to execute INSERT the query using the document. + * @param {Object} doc + * @param {{ifNotExists, ttl, fields}} docInfo + * @return {Promise} + */ + getInsertExecutor(doc, docInfo) { + const docKeys = Object.keys(doc); + if (docKeys.length === 0) { + return Promise.reject(new Error('Expected object with keys')); + } + + const cacheKey = Cache.getInsertKey(docKeys, docInfo); + const cacheItem = this._cache.insert.getOrCreate(cacheKey, () => ({ executor: null })); + + if (cacheItem.executor !== null) { + return Promise.resolve(cacheItem.executor); + } + + return this.createInsertQueries(docKeys, doc, docInfo) + .then(queries => { + if (queries.length === 1) { + return this._setSingleExecutor(cacheItem, queries[0]); + } + + return this._setBatchExecutor(cacheItem, queries); + }); + } + + /** + * Creates an Array containing the query and the params getter function for each table affected by the INSERT. + * @param {Array} docKeys + * @param {Object} doc + * @param {{ifNotExists, ttl, fields}} docInfo + * @returns {Promise>} + */ + createInsertQueries(docKeys, doc, docInfo) { + const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, docInfo, doc, this.info); + const ifNotExists = docInfo && docInfo.ifNotExists; + + // Get all the tables affected + return this._client.connect() + .then(() => ObjectSelector.getForInsert(this._client, this.info, propertiesInfo)) + .then(tables => { + + if (tables.length > 1 && ifNotExists) { + throw new Error('Batch with ifNotExists conditions cannot span multiple tables'); + } + + // For each tables affected, Generate query and parameter getters + return tables.map(table => + QueryGenerator.getInsert(table, this.info.keyspace, propertiesInfo, docInfo,ifNotExists)); + }); + } + + /** + * Gets a function to be used to execute the UPDATE queries with the provided document. + * @param {Object} doc + * @param {{ifExists, when, ttl, fields}} docInfo + * @return {Promise} + */ + getUpdateExecutor(doc, docInfo) { + const docKeys = Object.keys(doc); + if (docKeys.length === 0) { + return Promise.reject(new Error('Expected object with keys')); + } + + const cacheKey = Cache.getUpdateKey(docKeys, doc, docInfo); + const cacheItem = this._cache.update.getOrCreate(cacheKey, () => ({ executor: null })); + + if (cacheItem.executor !== null) { + return Promise.resolve(cacheItem.executor); + } + + return this.createUpdateQueries(docKeys, doc, docInfo) + .then(queries => { + if (queries.length === 1) { + return this._setSingleExecutor(cacheItem, queries[0]); + } + + return this._setBatchExecutor(cacheItem, queries); + }); + } + + /** + * Creates an Array containing the query and the params getter function for each table affected by the UPDATE. + * @param {Array} docKeys + * @param {Object} doc + * @param {Object} docInfo + * @returns {Promise>} + */ + createUpdateQueries(docKeys, doc, docInfo) { + const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, docInfo, doc, this.info); + const ifExists = docInfo && docInfo.ifExists; + const when = docInfo && docInfo.when + ? DocInfoAdapter.getPropertiesInfo(Object.keys(docInfo.when), null, docInfo.when, this.info) + : utils.emptyArray; + + if (when.length > 0 && ifExists) { + throw new Error('Both when and ifExists conditions can not be applied to the same statement'); + } + + // Get all the tables affected + return this._client.connect() + .then(() => ObjectSelector.getForUpdate(this._client, this.info, propertiesInfo, when)) + .then(tables => { + + if (tables.length > 1 && (when.length > 0 || ifExists)) { + throw new Error('Batch with when or ifExists conditions cannot span multiple tables'); + } + + // For each table affected, Generate query and parameter getters + return tables.map(table => + QueryGenerator.getUpdate(table, this.info.keyspace, propertiesInfo, docInfo, when, ifExists)); + }); + } + + /** + * Gets a function to be used to execute the DELETE queries with the provided document. + * @param {Object} doc + * @param {{when, ifExists, fields, deleteOnlyColumns}} docInfo + * @return {Promise} + */ + getDeleteExecutor(doc, docInfo) { + const docKeys = Object.keys(doc); + if (docKeys.length === 0) { + return Promise.reject(new Error('Expected object with keys')); + } + + const cacheKey = Cache.getRemoveKey(docKeys, doc, docInfo); + const cacheItem = this._cache.remove.getOrCreate(cacheKey, () => ({ executor: null })); + + if (cacheItem.executor !== null) { + return Promise.resolve(cacheItem.executor); + } + + return this.createDeleteQueries(docKeys, doc, docInfo) + .then(queries => { + if (queries.length === 1) { + return this._setSingleExecutor(cacheItem, queries[0]); + } + + return this._setBatchExecutor(cacheItem, queries); + }); + } + + /** + * Creates an Array containing the query and the params getter function for each table affected by the DELETE. + * @param {Array} docKeys + * @param {Object} doc + * @param {{when, ifExists, fields, deleteOnlyColumns}} docInfo + * @returns {Promise>} + */ + createDeleteQueries(docKeys, doc, docInfo) { + const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, docInfo, doc, this.info); + const ifExists = docInfo && docInfo.ifExists; + const when = docInfo && docInfo.when + ? DocInfoAdapter.getPropertiesInfo(Object.keys(docInfo.when), null, docInfo.when, this.info) + : utils.emptyArray; + + if (when.length > 0 && ifExists) { + throw new Error('Both when and ifExists conditions can not be applied to the same statement'); + } + + // Get all the tables affected + return this._client.connect() + .then(() => ObjectSelector.getForDelete(this._client, this.info, propertiesInfo, when)) + .then(tables => { + + if (tables.length > 1 && (when.length > 0 || ifExists)) { + throw new Error('Batch with when or ifExists conditions cannot span multiple tables'); + } + + // For each tables affected, Generate query and parameter getters + return tables.map(table => + QueryGenerator.getDelete(table, this.info.keyspace, propertiesInfo, docInfo, when, ifExists)); + }); + } + + getExecutorFromQuery(query, paramsHandler, commonExecutionOptions) { + // Use the current instance in the closure + // as there is no guarantee of how the returned function will be invoked + const self = this; + const commonOptions = commonExecutionOptions ? DocInfoAdapter.adaptAllOptions(commonExecutionOptions) : null; + + return (function queryMappedExecutor(doc, executionOptions) { + // When the executionOptions were already specified, + // use it and skip the ones provided in each invocation + const options = commonOptions + ? commonOptions + : DocInfoAdapter.adaptAllOptions(executionOptions); + + return self._client.execute(query, paramsHandler(doc), options).then(rs => { + // Cache the resultAdapter based on the query + let resultAdapter = self._cache.customQueries.get(query); + + if (resultAdapter === undefined) { + const resultAdapterInfo = ResultMapper.getCustomQueryAdapter(self.info, rs); + resultAdapter = resultAdapterInfo.fn; + if (resultAdapterInfo.canCache) { + // Avoid caching conditional updates results as the amount of columns change + // depending on the parameter values. + self._cache.customQueries.set(query, resultAdapter); + + if (self._cache.customQueries.size === cacheHighWaterMark) { + self._client.log('warning', + `Custom queries cache reached ${cacheHighWaterMark} items, this could be caused by ` + + `hard-coding parameter values inside the query, which should be avoided`); + } + } + } + + return new Result(rs, self.info, resultAdapter); + }); + }); + } + + _setSingleExecutor(cacheItem, queryInfo) { + // Parameters and this instance are part of the closure + const self = this; + + // Set the function to execute the request in the cache + cacheItem.executor = function singleExecutor(doc, docInfo, executionOptions) { + const options = DocInfoAdapter.adaptOptions(executionOptions, queryInfo.isIdempotent); + + return self._client.execute(queryInfo.query, queryInfo.paramsGetter(doc, docInfo, self.info), options) + .then(rs => new Result(rs, self.info, ResultMapper.getMutationAdapter(rs))); + }; + + return cacheItem.executor; + } + + _setBatchExecutor(cacheItem, queries) { + // Parameters and the following fields are part of the closure + const self = this; + const isIdempotent = queries.reduce((acc, q) => acc && q.isIdempotent, true); + + // Set the function to execute the batch request in the cache + cacheItem.executor = function batchExecutor(doc, docInfo, executionOptions) { + // Use the params getter function to obtain the parameters each time + const queryAndParams = queries.map(q => ({ + query: q.query, + params: q.paramsGetter(doc, docInfo, self.info) + })); + + const options = DocInfoAdapter.adaptOptions(executionOptions, isIdempotent); + + // Execute using a Batch + return self._client.batch(queryAndParams, options) + .then(rs => new Result(rs, self.info, ResultMapper.getMutationAdapter(rs))); + }; + + return cacheItem.executor; + } + + _validateCacheLength(length) { + if (length !== cacheHighWaterMark) { + return; + } + + this._client.log('warning', `ModelMapper cache reached ${cacheHighWaterMark} items, this could be caused by ` + + `building the object to map in different ways (with different shapes) each time. Use the same or few object ` + + `structures for a model and represent unset values with undefined or types.unset`); + } +} + +module.exports = MappingHandler; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/model-batch-item.js b/node_modules/cassandra-driver/lib/mapping/model-batch-item.js new file mode 100644 index 0000000..f04528e --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/model-batch-item.js @@ -0,0 +1,191 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const Cache = require('./cache'); + +/** + * Represents a query or a set of queries used to perform a mutation in a batch. + * @alias module:mapping~ModelBatchItem + */ +class ModelBatchItem { + /** + * @param {Object} doc + * @param {Object} docInfo + * @param {MappingHandler} handler + * @param {Tree} cache + */ + constructor(doc, docInfo, handler, cache) { + this.doc = doc; + this.docInfo = docInfo; + this.handler = handler; + this.cache = cache; + } + + /** + * @ignore + * @returns > + */ + getQueries() { + const docKeys = Object.keys(this.doc); + const cacheItem = this.cache.getOrCreate(this.getCacheKey(docKeys), () => ({ queries: null })); + + if (cacheItem.queries === null) { + cacheItem.queries = this.createQueries(docKeys); + } + + return cacheItem.queries; + } + + /** + * Gets the cache key for this item. + * @abstract + * @param {Array} docKeys + * @returns {Iterator} + */ + getCacheKey(docKeys) { + throw new Error('getCacheKey must be implemented'); + } + + /** + * Gets the Promise to create the queries. + * @abstract + * @param {Array} docKeys + * @returns {Promise} + */ + createQueries(docKeys) { + throw new Error('getCacheKey must be implemented'); + } + + /** + * Pushes the queries and parameters represented by this instance to the provided array. + * @internal + * @ignore + * @param {Array} arr + * @return {Promise<{isIdempotent, isCounter}>} + */ + pushQueries(arr) { + let isIdempotent = true; + let isCounter; + + return this.getQueries().then(queries => { + queries.forEach(q => { + // It's idempotent if all the queries contained are idempotent + isIdempotent = isIdempotent && q.isIdempotent; + + // Either all queries are counter mutation or we let it fail at server level + isCounter = q.isCounter; + + arr.push({ query: q.query, params: q.paramsGetter(this.doc, this.docInfo, this.getMappingInfo()) }); + }); + + return { isIdempotent, isCounter }; + }); + } + + /** + * Gets the mapping information for this batch item. + * @internal + * @ignore + */ + getMappingInfo() { + return this.handler.info; + } +} + +/** + * Represents a single or a set of INSERT queries in a batch. + * @ignore + * @internal + */ +class InsertModelBatchItem extends ModelBatchItem { + /** + * @param {Object} doc + * @param {Object} docInfo + * @param {MappingHandler} handler + * @param {Tree} cache + */ + constructor(doc, docInfo, handler, cache) { + super(doc, docInfo, handler, cache); + } + + /** @override */ + getCacheKey(docKeys) { + return Cache.getInsertKey(docKeys, this.docInfo); + } + + /** @override */ + createQueries(docKeys) { + return this.handler.createInsertQueries(docKeys, this.doc, this.docInfo); + } +} + +/** + * Represents a single or a set of UPDATE queries in a batch. + * @ignore + * @internal + */ +class UpdateModelBatchItem extends ModelBatchItem { + /** + * @param {Object} doc + * @param {Object} docInfo + * @param {MappingHandler} handler + * @param {Tree} cache + */ + constructor(doc, docInfo, handler, cache) { + super(doc, docInfo, handler, cache); + } + + /** @override */ + getCacheKey(docKeys) { + return Cache.getUpdateKey(docKeys, this.doc, this.docInfo); + } + + /** @override */ + createQueries(docKeys) { + return this.handler.createUpdateQueries(docKeys, this.doc, this.docInfo); + } +} + +/** + * Represents a single or a set of DELETE queries in a batch. + * @ignore + * @internal + */ +class RemoveModelBatchItem extends ModelBatchItem { + /** + * @param {Object} doc + * @param {Object} docInfo + * @param {MappingHandler} handler + * @param {Tree} cache + */ + constructor(doc, docInfo, handler, cache) { + super(doc, docInfo, handler, cache); + } + + /** @override */ + getCacheKey(docKeys) { + return Cache.getRemoveKey(docKeys, this.doc, this.docInfo); + } + + /** @override */ + createQueries(docKeys) { + return this.handler.createDeleteQueries(docKeys, this.doc, this.docInfo); + } +} + +module.exports = { ModelBatchItem, InsertModelBatchItem, UpdateModelBatchItem, RemoveModelBatchItem }; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/model-batch-mapper.js b/node_modules/cassandra-driver/lib/mapping/model-batch-mapper.js new file mode 100644 index 0000000..4928072 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/model-batch-mapper.js @@ -0,0 +1,125 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const Tree = require('./tree'); +const moduleBatchItemModule = require('./model-batch-item'); +const InsertModelBatchItem = moduleBatchItemModule.InsertModelBatchItem; +const UpdateModelBatchItem = moduleBatchItemModule.UpdateModelBatchItem; +const RemoveModelBatchItem = moduleBatchItemModule.RemoveModelBatchItem; + +/** + * Provides utility methods to group multiple mutations on a single batch. + * @alias module:mapping~ModelBatchMapper + */ +class ModelBatchMapper { + /** + * Creates a new instance of model batch mapper. + *

+ * An instance of this class is exposed as a singleton in the batching field of the + * [ModelMapper]{@link module:mapping~ModelMapper}. Note that new instances should not be create with this + * constructor. + *

+ * @param {MappingHandler} handler + * @ignore + */ + constructor(handler) { + this._handler = handler; + this._cache = { + insert: new Tree(), + update: new Tree(), + remove: new Tree() + }; + } + + /** + * Gets a [ModelBatchItem]{@link module:mapping~ModelBatchItem} containing the queries for the INSERT mutation to be + * used in a batch execution. + * @param {Object} doc An object containing the properties to insert. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * INSERT cql statements generated. If specified, it must include the columns to insert and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifNotExists] When set, it only inserts if the row does not exist prior to the insertion. + *

Please note that using IF NOT EXISTS will incur a non negligible performance cost so this should be used + * sparingly.

+ * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query + * or a set of queries to be included in a batch. + */ + insert(doc, docInfo) { + return new InsertModelBatchItem(doc, docInfo, this._handler, this._cache.insert); + } + + /** + * Gets a [ModelBatchItem]{@link module:mapping~ModelBatchItem} containing the queries for the UPDATE mutation to be + * used in a batch execution. + * @param {Object} doc An object containing the properties to update. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * UPDATE cql statements generated. If specified, it must include the columns to update and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifExists] When set, it only updates if the row already exists on the server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the UPDATE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query + * or a set of queries to be included in a batch. + */ + update(doc, docInfo) { + return new UpdateModelBatchItem(doc, docInfo, this._handler, this._cache.update); + } + + /** + * Gets a [ModelBatchItem]{@link module:mapping~ModelBatchItem} containing the queries for the DELETE mutation to be + * used in a batch execution. + * @param {Object} doc A document containing the primary keys values of the document to delete. + * @param {Object} [docInfo] An object containing the additional doc information. + * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the DELETE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + * When the CQL query is generated, this would be used to generate the `IF` clause. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Boolean} [docInfo.ifExists] When set, it only issues the DELETE command if the row already exists on the + * server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * DELETE cql statement generated. If specified, it must include the columns to delete and the primary keys. + * @param {Boolean} [docInfo.deleteOnlyColumns] Determines that, when more document properties are specified + * besides the primary keys, the generated DELETE statement should be used to delete some column values but leave + * the row. When this is enabled and more properties are specified, a DELETE statement will have the following form: + * "DELETE col1, col2 FROM table1 WHERE pk1 = ? AND pk2 = ?" + * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query + * or a set of queries to be included in a batch. + */ + remove(doc, docInfo) { + return new RemoveModelBatchItem(doc, docInfo, this._handler, this._cache.update); + } +} + +module.exports = ModelBatchMapper; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/model-mapper.js b/node_modules/cassandra-driver/lib/mapping/model-mapper.js new file mode 100644 index 0000000..8a4d041 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/model-mapper.js @@ -0,0 +1,306 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const ModelBatchMapper = require('./model-batch-mapper'); + +/** + * Represents an object mapper for a specific model. + * @alias module:mapping~ModelMapper + */ +class ModelMapper { + constructor(name, handler) { + /** + * Gets the name identifier of the model. + * @type {String} + */ + this.name = name; + this._handler = handler; + /** + * Gets a [ModelBatchMapper]{@link module:mapping~ModelBatchMapper} instance containing utility methods to group + * multiple doc mutations in a single batch. + * @type {ModelBatchMapper} + */ + this.batching = new ModelBatchMapper(this._handler); + } + + /** + * Gets the first document matching the provided filter or null when not found. + *

+ * Note that all partition and clustering keys must be defined in order to use this method. + *

+ * @param {Object} doc The object containing the properties that map to the primary keys. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * SELECT cql statement generated, in order to restrict the amount of columns retrieved. + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @return {Promise} + * @example Get a video by id + * videoMapper.get({ id }) + * @example Get a video by id, selecting specific columns + * videoMapper.get({ id }, fields: ['name', 'description']) + */ + get(doc, docInfo, executionOptions) { + if (executionOptions === undefined && typeof docInfo === 'string') { + executionOptions = docInfo; + docInfo = null; + } + + return this._handler.getSelectExecutor(doc, docInfo, true) + .then(executor => executor(doc, docInfo, executionOptions)) + .then(result => result.first()); + } + + /** + * Executes a SELECT query based on the filter and returns the result as an iterable of documents. + * @param {Object} doc An object containing the properties that map to the primary keys to filter. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * SELECT cql statement generated, in order to restrict the amount of columns retrieved. + * @param {Object} [docInfo.orderBy] An associative array containing the column names as key and + * the order string (asc or desc) as value used to set the order of the results server-side. + * @param {Number} [docInfo.limit] Restricts the result of the query to a maximum number of rows on the + * server. + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Number} [executionOptions.fetchSize] The amount of rows to retrieve per page. + * @param {Number} [executionOptions.pageState] A Buffer instance or a string token representing the paging state. + *

When provided, the query will be executed starting from a given paging state.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Get user's videos + * const result = await videoMapper.find({ userId }); + * for (let video of result) { + * console.log(video.name); + * } + * @example Get user's videos from a certain date + * videoMapper.find({ userId, addedDate: q.gte(date)}); + * @example Get user's videos in reverse order + * videoMapper.find({ userId }, { orderBy: { addedDate: 'desc' }}); + */ + find(doc, docInfo, executionOptions) { + if (executionOptions === undefined && typeof docInfo === 'string') { + executionOptions = docInfo; + docInfo = null; + } + + return this._handler.getSelectExecutor(doc, docInfo, false) + .then(executor => executor(doc, docInfo, executionOptions)); + } + + /** + * Executes a SELECT query without a filter and returns the result as an iterable of documents. + *

+ * This is only recommended to be used for tables with a limited amount of results. Otherwise, breaking up the + * token ranges on the client side should be used. + *

+ * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * SELECT cql statement generated, in order to restrict the amount of columns retrieved. + * @param {Object} [docInfo.orderBy] An associative array containing the column names as key and + * the order string (asc or desc) as value used to set the order of the results server-side. + * @param {Number} [docInfo.limit] Restricts the result of the query to a maximum number of rows on the + * server. + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Number} [executionOptions.fetchSize] The mount of rows to retrieve per page. + * @param {Number} [executionOptions.pageState] A Buffer instance or a string token representing the paging state. + *

When provided, the query will be executed starting from a given paging state.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + */ + findAll(docInfo, executionOptions) { + if (executionOptions === undefined && typeof docInfo === 'string') { + executionOptions = docInfo; + docInfo = null; + } + + const executor = this._handler.getSelectAllExecutor(docInfo); + return executor(docInfo, executionOptions); + } + + /** + * Inserts a document. + *

+ * When the model is mapped to multiple tables, it will insert a row in each table when all the primary keys + * are specified. + *

+ * @param {Object} doc An object containing the properties to insert. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * INSERT cql statements generated. If specified, it must include the columns to insert and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifNotExists] When set, it only inserts if the row does not exist prior to the insertion. + *

Please note that using IF NOT EXISTS will incur a non negligible performance cost so this should be used + * sparingly.

+ * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * By default all generated INSERT statements are considered idempotent, except in the case of lightweight + * transactions. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Insert a video + * videoMapper.insert({ id, name }); + */ + insert(doc, docInfo, executionOptions) { + if (executionOptions === undefined && typeof docInfo === 'string') { + executionOptions = docInfo; + docInfo = null; + } + + return this._handler.getInsertExecutor(doc, docInfo) + .then(executor => executor(doc, docInfo, executionOptions)); + } + + /** + * Updates a document. + *

+ * When the model is mapped to multiple tables, it will update a row in each table when all the primary keys + * are specified. + *

+ * @param {Object} doc An object containing the properties to update. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * UPDATE cql statements generated. If specified, it must include the columns to update and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifExists] When set, it only updates if the row already exists on the server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the UPDATE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * The mapper uses the generated queries to determine the default value. When an UPDATE is generated with a + * counter column or appending/prepending to a list column, the execution is marked as not idempotent. + *

+ *

+ * Additionally, the mapper uses the safest approach for queries with lightweight transactions (Compare and + * Set) by considering them as non-idempotent. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Update the name of a video + * videoMapper.update({ id, name }); + */ + update(doc, docInfo, executionOptions) { + if (executionOptions === undefined && typeof docInfo === 'string') { + executionOptions = docInfo; + docInfo = null; + } + + return this._handler.getUpdateExecutor(doc, docInfo) + .then(executor => executor(doc, docInfo, executionOptions)); + } + + /** + * Deletes a document. + * @param {Object} doc A document containing the primary keys values of the document to delete. + * @param {Object} [docInfo] An object containing the additional doc information. + * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the DELETE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + * When the CQL query is generated, this would be used to generate the `IF` clause. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Boolean} [docInfo.ifExists] When set, it only issues the DELETE command if the row already exists on the + * server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * DELETE cql statement generated. If specified, it must include the columns to delete and the primary keys. + * @param {Boolean} [docInfo.deleteOnlyColumns] Determines that, when more document properties are specified + * besides the primary keys, the generated DELETE statement should be used to delete some column values but leave + * the row. When this is enabled and more properties are specified, a DELETE statement will have the following form: + * "DELETE col1, col2 FROM table1 WHERE pk1 = ? AND pk2 = ?" + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * By default all generated DELETE statements are considered idempotent, except in the case of lightweight + * transactions. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Delete a video + * videoMapper.remove({ id }); + */ + remove(doc, docInfo, executionOptions) { + if (executionOptions === undefined && typeof docInfo === 'string') { + executionOptions = docInfo; + docInfo = null; + } + + return this._handler.getDeleteExecutor(doc, docInfo) + .then(executor => executor(doc, docInfo, executionOptions)); + } + + /** + * Uses the provided query and param getter function to execute a query and map the results. + * Gets a function that takes the document, executes the query and returns the mapped results. + * @param {String} query The query to execute. + * @param {Function} paramsHandler The function to execute to extract the parameters of a document. + * @param {Object|String} [executionOptions] When provided, the options for all executions generated with this + * method will use the provided options and it will not consider the executionOptions per call. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Number} [executionOptions.fetchSize] Amount of rows to retrieve per page. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times + * without changing the result beyond the initial application. + * @param {Number} [executionOptions.pageState] Buffer or string token representing the paging state. + *

When provided, the query will be executed starting from a given paging state.

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Function} Returns a function that takes the document and execution options as parameters and returns a + * Promise the resolves to a [Result]{@link module:mapping~Result} instance. + */ + mapWithQuery(query, paramsHandler, executionOptions) { + return this._handler.getExecutorFromQuery(query, paramsHandler, executionOptions); + } +} + +module.exports = ModelMapper; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/model-mapping-info.js b/node_modules/cassandra-driver/lib/mapping/model-mapping-info.js new file mode 100644 index 0000000..23be040 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/model-mapping-info.js @@ -0,0 +1,194 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const tableMappingsModule = require('./table-mappings'); +const TableMappings = tableMappingsModule.TableMappings; +const DefaultTableMappings = tableMappingsModule.DefaultTableMappings; + +/** + * Represents the parsed user information of the table mappings of a model. + * @ignore + */ +class ModelMappingInfo { + /** + * @param {String} keyspace + * @param {Array<{name, isView}>} tables + * @param {TableMappings} mappings + * @param {Map} columns + */ + constructor(keyspace, tables, mappings, columns) { + this.keyspace = keyspace; + this.tables = tables; + this._mappings = mappings; + this._columns = columns; + + // Define a map of column information per property name + /** @type {Map} */ + this._documentProperties = new Map(); + for (const modelColumnInfo of columns.values()) { + this._documentProperties.set(modelColumnInfo.propertyName, modelColumnInfo); + } + } + + getColumnName(propName) { + const modelColumnInfo = this._documentProperties.get(propName); + if (modelColumnInfo !== undefined) { + // There is an specific name transformation between the column name and the property name + return modelColumnInfo.columnName; + } + // Rely on the TableMappings (i.e. maybe there is a convention defined for this property) + return this._mappings.getColumnName(propName); + } + + getPropertyName(columnName) { + const modelColumnInfo = this._columns.get(columnName); + if (modelColumnInfo !== undefined) { + // There is an specific name transformation between the column name and the property name + return modelColumnInfo.propertyName; + } + // Rely on the TableMappings (i.e. maybe there is a convention defined for this column) + return this._mappings.getPropertyName(columnName); + } + + getFromModelFn(propName) { + const modelColumnInfo = this._documentProperties.get(propName); + return modelColumnInfo !== undefined ? modelColumnInfo.fromModel : null; + } + + getToModelFn(columnName) { + const modelColumnInfo = this._columns.get(columnName); + return modelColumnInfo !== undefined ? modelColumnInfo.toModel : null; + } + + newInstance() { + return this._mappings.newObjectInstance(); + } + + /** + * Parses the user options into a map of model names and ModelMappingInfo. + * @param {MappingOptions} options + * @param {String} currentKeyspace + * @returns {Map} + */ + static parse(options, currentKeyspace) { + const result = new Map(); + if (!options || !options.models) { + return result; + } + + Object.keys(options.models).forEach(modelName => { + const modelOptions = options.models[modelName]; + result.set(modelName, ModelMappingInfo._create(modelName, currentKeyspace, modelOptions)); + }); + + return result; + } + + static _create(modelName, currentKeyspace, modelOptions) { + if (!currentKeyspace && (!modelOptions || !modelOptions.keyspace)) { + throw new Error( + 'You should specify the keyspace of the model in the MappingOptions when the Client is not using a keyspace'); + } + + if (!modelOptions) { + return ModelMappingInfo.createDefault(modelName, currentKeyspace); + } + + let tables; + + if (modelOptions.tables && modelOptions.tables.length > 0) { + tables = modelOptions.tables.map(item => { + const table = { name: null, isView: false }; + if (typeof item === 'string') { + table.name = item; + } else if (item) { + table.name = item.name; + table.isView = !!item.isView; + } + + if (!table.name) { + throw new Error(`Table name not specified for model '${modelName}'`); + } + + return table; + }); + } else { + tables = [ { name: modelName, isView: false }]; + } + + if (modelOptions.mappings && !(modelOptions.mappings instanceof TableMappings)) { + throw new Error('mappings should be an instance of TableMappings'); + } + + const columns = new Map(); + if (modelOptions.columns !== null && typeof modelOptions.columns === 'object') { + Object.keys(modelOptions.columns).forEach(columnName => { + columns.set(columnName, ModelColumnInfo.parse(columnName, modelOptions.columns[columnName])); + }); + } + + return new ModelMappingInfo( + modelOptions.keyspace || currentKeyspace, + tables, + modelOptions.mappings || new DefaultTableMappings(), + columns + ); + } + + static createDefault(modelName, currentKeyspace) { + return new ModelMappingInfo( + currentKeyspace, + [ { name: modelName, isView: false }], + new DefaultTableMappings(), + new Map()); + } +} + +class ModelColumnInfo { + constructor(columnName, propertyName, toModel, fromModel) { + this.columnName = columnName; + this.propertyName = propertyName; + + if (toModel && typeof toModel !== 'function') { + throw new TypeError(`toModel type for property '${propertyName}' should be a function (obtained ${ + typeof toModel})`); + } + + if (fromModel && typeof fromModel !== 'function') { + throw new TypeError(`fromModel type for property '${propertyName}' should be a function (obtained ${ + typeof fromModel})`); + } + + this.toModel = toModel; + this.fromModel = fromModel; + } + + static parse(columnName, value) { + if (!value) { + return new ModelColumnInfo(columnName, columnName); + } + + if (typeof value === 'string') { + return new ModelColumnInfo(columnName, value); + } + + return new ModelColumnInfo(columnName, value.name || columnName, value.toModel, value.fromModel); + } +} + +module.exports = ModelMappingInfo; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/object-selector.js b/node_modules/cassandra-driver/lib/mapping/object-selector.js new file mode 100644 index 0000000..de25df6 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/object-selector.js @@ -0,0 +1,321 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const keyMatches = { + all: 1, + none: 0, + some: -1 +}; + +/** + * Provides utility methods to choose the correct tables and views that should be included in a statement. + * @ignore + */ +class ObjectSelector { + /** + * Gets the table/view that should be used to execute the SELECT query. + * @param {Client} client + * @param {ModelMappingInfo} info + * @param {Boolean} allPKsDefined + * @param {Array} propertiesInfo + * @param {Array} fieldsInfo + * @param {Array>} orderByColumns + * @return {Promise} A promise that resolves to a table names. + */ + static getForSelect(client, info, allPKsDefined, propertiesInfo, fieldsInfo, orderByColumns) { + return Promise.all( + info.tables.map(t => { + if (t.isView) { + return client.metadata.getMaterializedView(info.keyspace, t.name); + } + return client.metadata.getTable(info.keyspace, t.name); + })) + .then(tables => { + for (let i = 0; i < tables.length; i++) { + const table = tables[i]; + if (table === null) { + throw new Error(`Table "${info.tables[i].name}" could not be retrieved`); + } + + if (keysAreIncluded(table.partitionKeys, propertiesInfo) !== keyMatches.all) { + // Not all the partition keys are covered + continue; + } + + + if (allPKsDefined) { + if (keysAreIncluded(table.clusteringKeys, propertiesInfo) !== keyMatches.all) { + // All clustering keys should be included as allPKsDefined flag is set + continue; + } + } + + if (propertiesInfo.length > table.partitionKeys.length) { + // Check that the Where clause is composed by partition and clustering keys + const allPropertiesArePrimaryKeys = propertiesInfo + .reduce( + (acc, p) => acc && ( + contains(table.partitionKeys, c => c.name === p.columnName) || + contains(table.clusteringKeys, c => c.name === p.columnName) + ), + true); + + if (!allPropertiesArePrimaryKeys) { + continue; + } + } + + // All fields must be contained + const containsAllFields = fieldsInfo + .reduce((acc, p) => acc && table.columnsByName[p.columnName] !== undefined, true); + + if (!containsAllFields) { + continue; + } + + // CQL: + // - "ORDER BY" is currently only supported on the clustered columns of the PRIMARY KEY + // - "ORDER BY" currently only support the ordering of columns following their declared order in + // the PRIMARY KEY + // + // In the mapper, we validate that the ORDER BY columns appear in the same order as in the clustering keys + const containsAllOrderByColumns = orderByColumns + .reduce((acc, order, index) => { + if (!acc) { + return false; + } + + const ck = table.clusteringKeys[index]; + + return ck && ck.name === order[0]; + }, true); + + if (!containsAllOrderByColumns) { + continue; + } + + return table.name; + } + + let message = `No table matches the filter (${allPKsDefined ? 'all PKs have to be specified' : 'PKs'}): [${ + propertiesInfo.map(p => p.columnName)}]`; + + if (fieldsInfo.length > 0) { + message += `; fields: [${fieldsInfo.map(p => p.columnName)}]`; + } + if (orderByColumns.length > 0) { + message += `; orderBy: [${orderByColumns.map(item => item[0])}]`; + } + + throw new Error(message); + }); + } + + /** Returns the name of the first table */ + static getForSelectAll(info) { + return info.tables[0].name; + } + + /** + * Gets the tables that should be used to execute the INSERT query. + * @param {Client} client + * @param {ModelMappingInfo} info + * @param {Array} propertiesInfo + * @return {Promise>} A promise that resolves to an Array of tables. + */ + static getForInsert(client, info, propertiesInfo) { + return Promise.all(info.tables.filter(t => !t.isView).map(t => client.metadata.getTable(info.keyspace, t.name))) + .then(tables => { + const filteredTables = tables + .filter((table, i) => { + if (table === null) { + throw new Error(`Table "${info.tables[i].name}" could not be retrieved`); + } + + if (keysAreIncluded(table.partitionKeys, propertiesInfo) !== keyMatches.all) { + // Not all the partition keys are covered + return false; + } + + const clusteringKeyMatches = keysAreIncluded(table.clusteringKeys, propertiesInfo); + + // All clustering keys should be included or it can be inserting a static column value + if (clusteringKeyMatches === keyMatches.all) { + return true; + } + + if (clusteringKeyMatches === keyMatches.some) { + return false; + } + + const staticColumns = staticColumnCount(table); + return propertiesInfo.length === table.partitionKeys.length + staticColumns && staticColumns > 0; + }); + + if (filteredTables.length === 0) { + throw new Error(`No table matches (all PKs have to be specified) fields: [${ + propertiesInfo.map(p => p.columnName)}]`); + } + + return filteredTables; + }); + } + + /** + * Gets the tables that should be used to execute the UPDATE query. + * @param {Client} client + * @param {ModelMappingInfo} info + * @param {Array} propertiesInfo + * @param {Array} when + * @return {Promise>} A promise that resolves to an Array of tables. + */ + static getForUpdate(client, info, propertiesInfo, when) { + return Promise.all(info.tables.filter(t => !t.isView).map(t => client.metadata.getTable(info.keyspace, t.name))) + .then(tables => { + const filteredTables = tables + .filter((table, i) => { + if (table === null) { + throw new Error(`Table "${info.tables[i].name}" could not be retrieved`); + } + + if (keysAreIncluded(table.partitionKeys, propertiesInfo) !== keyMatches.all) { + // Not all the partition keys are covered + return false; + } + + const clusteringKeyMatches = keysAreIncluded(table.clusteringKeys, propertiesInfo); + + // All clustering keys should be included or it can be updating a static column value + if (clusteringKeyMatches === keyMatches.some) { + return false; + } + + if (clusteringKeyMatches === keyMatches.none && !hasStaticColumn(table)) { + return false; + } + + const applicableColumns = propertiesInfo + .reduce((acc, p) => acc + (table.columnsByName[p.columnName] !== undefined ? 1 : 0), 0); + + if (applicableColumns <= table.partitionKeys.length + table.clusteringKeys.length) { + if (!hasStaticColumn(table) || applicableColumns <= table.partitionKeys.length) { + // UPDATE statement does not contain columns to SET + return false; + } + } + + // "when" conditions should be contained in the table + return when.reduce((acc, p) => acc && table.columnsByName[p.columnName] !== undefined, true); + }); + + if (filteredTables.length === 0) { + let message = `No table matches (all PKs and columns to set have to be specified) fields: [${ + propertiesInfo.map(p => p.columnName)}]`; + + if (when.length > 0) { + message += `; condition: [${when.map(p => p.columnName)}]`; + } + + throw new Error(message); + } + + return filteredTables; + }); + } + + /** + * Gets the tables that should be used to execute the DELETE query. + * @param {Client} client + * @param {ModelMappingInfo} info + * @param {Array} propertiesInfo + * @param {Array} when + * @return {Promise>} A promise that resolves to an Array of tables. + */ + static getForDelete(client, info, propertiesInfo, when) { + return Promise.all(info.tables.filter(t => !t.isView).map(t => client.metadata.getTable(info.keyspace, t.name))) + .then(tables => { + const filteredTables = tables + .filter((table, i) => { + if (table === null) { + throw new Error(`Table "${info.tables[i].name}" could not be retrieved`); + } + + // All partition and clustering keys from the table should be included in the document + const keyNames = table.partitionKeys.concat(table.clusteringKeys).map(k => k.name); + const columns = propertiesInfo.map(p => p.columnName); + + for (let i = 0; i < keyNames.length; i++) { + if (columns.indexOf(keyNames[i]) === -1) { + return false; + } + } + + // "when" conditions should be contained in the table + return when.reduce((acc, p) => acc && table.columnsByName[p.columnName] !== undefined, true); + }); + + if (filteredTables.length === 0) { + let message = `No table matches (all PKs have to be specified) fields: [${ + propertiesInfo.map(p => p.columnName)}]`; + + if (when.length > 0) { + message += `; condition: [${when.map(p => p.columnName)}]`; + } + + throw new Error(message); + } + + return filteredTables; + }); + } +} + +function contains(arr, fn) { + return arr.filter(fn).length > 0; +} + +/** + * Returns the amount of matches for a given key + * @private + * @param {Array} keys + * @param {Array} propertiesInfo + */ +function keysAreIncluded(keys, propertiesInfo) { + if (keys.length === 0) { + return keyMatches.all; + } + + // Filtering by name might look slow / ineffective to using hash maps + // but we expect `keys` and `propertiesInfo` to contain only few items + const matches = propertiesInfo.reduce((acc, p) => acc + (contains(keys, k => p.columnName === k.name) ? 1 : 0), 0); + if (matches === 0) { + return keyMatches.none; + } + + return matches === keys.length ? keyMatches.all : keyMatches.some; +} + +function hasStaticColumn(table) { + return staticColumnCount(table) > 0; +} + +function staticColumnCount(table) { + return table.columns.reduce((acc, column) => acc + (column.isStatic ? 1 : 0), 0); +} + +module.exports = ObjectSelector; diff --git a/node_modules/cassandra-driver/lib/mapping/q.js b/node_modules/cassandra-driver/lib/mapping/q.js new file mode 100644 index 0000000..7e4c8ce --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/q.js @@ -0,0 +1,154 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const errors = require('../errors'); + +/** + * Represents a CQL query operator, like >=, IN, <, ... + * @ignore + */ +class QueryOperator { + /** + * Creates a new instance of QueryOperator. + * @param {String} key + * @param value + * @param [hasChildValues] + * @param [isInOperator] + */ + constructor(key, value, hasChildValues, isInOperator) { + /** + * The CQL key representing the operator + * @type {string} + */ + this.key = key; + + /** + * The value to be used as parameter. + */ + this.value = value; + + /** + * Determines whether a query operator can have child values or operators (AND, OR) + */ + this.hasChildValues = hasChildValues; + + /** + * Determines whether this instance represents CQL "IN" operator. + */ + this.isInOperator = isInOperator; + } +} + +/** + * Represents a CQL assignment operation, like col = col + x. + * @ignore + */ +class QueryAssignment { + constructor(sign, value, inverted) { + /** + * Gets the sign of the assignment operation. + */ + this.sign = sign; + + /** + * Gets the value to be assigned. + */ + this.value = value; + + /** + * Determines whether the assignment should be inverted (prepends), e.g: col = x + col + * @type {boolean} + */ + this.inverted = !!inverted; + } +} + +/** + * Contains functions that represents operators in a query. + * @alias module:mapping~q + * @type {Object} + * @property {function} in_ Represents the CQL operator "IN". + * @property {function} gt Represents the CQL operator greater than ">". + * @property {function} gte Represents the CQL operator greater than or equals to ">=" . + * @property {function} lt Represents the CQL operator less than "<" . + * @property {function} lte Represents the CQL operator less than or equals to "<=" . + * @property {function} notEq Represents the CQL operator not equals to "!=" . + * @property {function} and When applied to a property, it represents two CQL conditions on the same column separated + * by the logical AND operator, e.g: "col1 >= x col < y" + * @property {function} incr Represents the CQL increment assignment used for counters, e.g: "col = col + x" + * @property {function} decr Represents the CQL decrement assignment used for counters, e.g: "col = col - x" + * @property {function} append Represents the CQL append assignment used for collections, e.g: "col = col + x" + * @property {function} prepend Represents the CQL prepend assignment used for lists, e.g: "col = x + col" + * @property {function} remove Represents the CQL remove assignment used for collections, e.g: "col = col - x" + */ +const q = { + in_: function in_(arr) { + if (!Array.isArray(arr)) { + throw new errors.ArgumentError('IN operator supports only Array values'); + } + return new QueryOperator('IN', arr, false, true); + }, + + gt: function gt(value) { + return new QueryOperator('>', value); + }, + + gte: function gte(value) { + return new QueryOperator('>=', value); + }, + + lt: function lt(value) { + return new QueryOperator('<', value); + }, + + lte: function lte(value) { + return new QueryOperator('<=', value); + }, + + notEq: function notEq(value) { + return new QueryOperator('!=', value); + }, + + and: function (condition1, condition2) { + return new QueryOperator('AND', [ condition1, condition2 ], true); + }, + + incr: function incr(value) { + return new QueryAssignment('+', value); + }, + + decr: function decr(value) { + return new QueryAssignment('-', value); + }, + + append: function append(value) { + return new QueryAssignment('+', value); + }, + + prepend: function prepend(value) { + return new QueryAssignment('+', value, true); + }, + + remove: function remove(value) { + return new QueryAssignment('-', value); + } +}; + +exports.q = q; +exports.QueryAssignment = QueryAssignment; +exports.QueryOperator = QueryOperator; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/query-generator.js b/node_modules/cassandra-driver/lib/mapping/query-generator.js new file mode 100644 index 0000000..daf5221 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/query-generator.js @@ -0,0 +1,446 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const vm = require('vm'); +const qModule = require('./q'); +const QueryOperator = qModule.QueryOperator; +const QueryAssignment = qModule.QueryAssignment; +const types = require('../types'); +const dataTypes = types.dataTypes; + +const vmFileName = 'gen-param-getter.js'; + +/** + * Provides methods to generate a query and parameter handlers. + * @ignore + */ +class QueryGenerator { + /** + * Gets the SELECT query given the doc. + * @param {String} tableName + * @param {String} keyspace + * @param {Array} propertiesInfo + * @param {Array} fieldsInfo + * @param {Array} orderByColumns + * @param {Number|undefined} limit + * @return {string} + */ + static getSelect(tableName, keyspace, propertiesInfo, fieldsInfo, orderByColumns, limit) { + let query = 'SELECT '; + query += fieldsInfo.length > 0 ? fieldsInfo.map(p => p.columnName).join(', ') : '*'; + query += ` FROM ${keyspace}.${tableName}`; + + if (propertiesInfo.length > 0) { + query += ' WHERE '; + query += QueryGenerator._getConditionWithOperators(propertiesInfo); + } + + if (orderByColumns.length > 0) { + query += ' ORDER BY '; + query += orderByColumns.map(order => order[0] + ' ' + order[1]).join(', '); + } + + if (typeof limit === 'number') { + query += ' LIMIT ?'; + } + + return query; + } + + static selectParamsGetter(propertiesInfo, limit) { + let scriptText = '(function getParametersSelect(doc, docInfo, mappingInfo) {\n'; + scriptText += ' return ['; + + scriptText += QueryGenerator._valueGetterExpression(propertiesInfo); + + if (typeof limit === 'number') { + if (propertiesInfo.length > 0) { + scriptText += ', '; + } + scriptText += `docInfo['limit']`; + } + + // Finish return statement + scriptText += '];\n})'; + + const script = new vm.Script(scriptText, { filename: vmFileName }); + return script.runInThisContext(); + } + + /** + * Gets the INSERT query and function to obtain the parameters, given the doc. + * @param {TableMetadata} table + * @param {String} keyspace + * @param {Array} propertiesInfo + * @param {Object} docInfo + * @param {Boolean|undefined} ifNotExists + * @return {{query: String, paramsGetter: Function, isIdempotent: Boolean}} + */ + static getInsert(table, keyspace, propertiesInfo, docInfo, ifNotExists) { + const ttl = docInfo && docInfo.ttl; + + // Not all columns are contained in the table + const filteredPropertiesInfo = propertiesInfo + .filter(pInfo => table.columnsByName[pInfo.columnName] !== undefined); + + return ({ + query: QueryGenerator._getInsertQuery(table.name, keyspace, filteredPropertiesInfo, ifNotExists, ttl), + paramsGetter: QueryGenerator._insertParamsGetter(filteredPropertiesInfo, docInfo), + isIdempotent: !ifNotExists + }); + } + + /** + * Gets the query for an insert statement. + * @param {String} tableName + * @param {String} keyspace + * @param {Array} propertiesInfo + * @param {Boolean} ifNotExists + * @param {Number|undefined} ttl + * @return {String} + */ + static _getInsertQuery(tableName, keyspace, propertiesInfo, ifNotExists, ttl) { + let query = `INSERT INTO ${keyspace}.${tableName} (`; + query += propertiesInfo.map(pInfo => pInfo.columnName).join(', '); + query += ') VALUES ('; + query += propertiesInfo.map(() => '?').join(', '); + query += ')'; + + if (ifNotExists === true) { + query += ' IF NOT EXISTS'; + } + + if (typeof ttl === 'number') { + query += ' USING TTL ?'; + } + return query; + } + + static _insertParamsGetter(propertiesInfo, docInfo) { + let scriptText = '(function getParametersInsert(doc, docInfo, mappingInfo) {\n'; + scriptText += ' return ['; + + scriptText += QueryGenerator._valueGetterExpression(propertiesInfo); + + if (docInfo && typeof docInfo.ttl === 'number') { + scriptText += `, docInfo['ttl']`; + } + + // Finish return statement + scriptText += '];\n})'; + + const script = new vm.Script(scriptText, { filename: vmFileName }); + return script.runInThisContext(); + } + + /** + * Gets the UPDATE query and function to obtain the parameters, given the doc. + * @param {TableMetadata} table + * @param {String} keyspace + * @param {Array} propertiesInfo + * @param {Object} docInfo + * @param {Array} when + * @param {Boolean|undefined} ifExists + * @return {{query: String, paramsGetter: Function, isIdempotent: Boolean, isCounter}} + */ + static getUpdate(table, keyspace, propertiesInfo, docInfo, when, ifExists) { + const ttl = docInfo && docInfo.ttl; + const primaryKeys = new Set(table.partitionKeys.concat(table.clusteringKeys).map(c => c.name)); + let isIdempotent = true; + let isCounter = false; + + // Not all columns are contained in the table + const filteredPropertiesInfo = propertiesInfo.filter(pInfo => { + const column = table.columnsByName[pInfo.columnName]; + if (column === undefined) { + return false; + } + + if (column.type.code === dataTypes.list && pInfo.value instanceof QueryAssignment) { + // Its not idempotent when list append/prepend + isIdempotent = false; + } else if (column.type.code === dataTypes.counter) { + // Any update on a counter table is not idempotent + isIdempotent = false; + isCounter = true; + } + + return true; + }); + + return { + query: QueryGenerator._getUpdateQuery( + table.name, keyspace, primaryKeys, filteredPropertiesInfo, when, ifExists, ttl), + isIdempotent: isIdempotent && when.length === 0 && !ifExists, + paramsGetter: QueryGenerator._updateParamsGetter(primaryKeys, filteredPropertiesInfo, when, ttl), + isCounter + }; + } + + /** + * Gets the query for an UPDATE statement. + * @param {String} tableName + * @param {String} keyspace + * @param {Set} primaryKeys + * @param {Array} propertiesInfo + * @param {Object} when + * @param {Boolean} ifExists + * @param {Number|undefined} ttl + */ + static _getUpdateQuery(tableName, keyspace, primaryKeys, propertiesInfo, when, ifExists, ttl) { + let query = `UPDATE ${keyspace}.${tableName} `; + + if (typeof ttl === 'number') { + query += 'USING TTL ? '; + } + + query += 'SET '; + + query += propertiesInfo + .filter(p => !primaryKeys.has(p.columnName)) + .map(p => { + if (p.value instanceof QueryAssignment) { + if (p.value.inverted) { + // e.g: prepend "col1 = ? + col1" + return `${p.columnName} = ? ${p.value.sign} ${p.columnName}`; + } + // e.g: increment "col1 = col1 + ?" + return `${p.columnName} = ${p.columnName} ${p.value.sign} ?`; + } + + return p.columnName + ' = ?'; + }) + .join(', '); + + query += ' WHERE '; + query += propertiesInfo.filter(p => primaryKeys.has(p.columnName)).map(p => p.columnName + ' = ?').join(' AND '); + + if (ifExists === true) { + query += ' IF EXISTS'; + } + else if (when.length > 0) { + query += ' IF ' + QueryGenerator._getConditionWithOperators(when); + } + + return query; + } + + /** + * Returns a function to obtain the parameter values from a doc for an UPDATE statement. + * @param {Set} primaryKeys + * @param {Array} propertiesInfo + * @param {Array} when + * @param {Number|undefined} ttl + * @returns {Function} + */ + static _updateParamsGetter(primaryKeys, propertiesInfo, when, ttl) { + let scriptText = '(function getParametersUpdate(doc, docInfo, mappingInfo) {\n'; + scriptText += ' return ['; + + if (typeof ttl === 'number') { + scriptText += `docInfo['ttl'], `; + } + + // Assignment clause + scriptText += QueryGenerator._assignmentGetterExpression(propertiesInfo.filter(p => !primaryKeys.has(p.columnName))); + scriptText += ', '; + + // Where clause + scriptText += QueryGenerator._valueGetterExpression(propertiesInfo.filter(p => primaryKeys.has(p.columnName))); + + // Condition clause + if (when.length > 0) { + scriptText += ', ' + QueryGenerator._valueGetterExpression(when, 'docInfo.when'); + } + + // Finish return statement + scriptText += '];\n})'; + + const script = new vm.Script(scriptText, { filename: vmFileName }); + return script.runInThisContext(); + } + + /** + * Gets the DELETE query and function to obtain the parameters, given the doc. + * @param {TableMetadata} table + * @param {String} keyspace + * @param {Array} propertiesInfo + * @param {Object} docInfo + * @param {Array} when + * @param {Boolean|undefined} ifExists + * @return {{query: String, paramsGetter: Function, isIdempotent}} + */ + static getDelete(table, keyspace, propertiesInfo, docInfo, when, ifExists) { + const deleteOnlyColumns = docInfo && docInfo.deleteOnlyColumns; + const primaryKeys = new Set(table.partitionKeys.concat(table.clusteringKeys).map(c => c.name)); + + const filteredPropertiesInfo = propertiesInfo + .filter(pInfo => table.columnsByName[pInfo.columnName] !== undefined); + + + return ({ + query: QueryGenerator._getDeleteQuery( + table.name, keyspace, primaryKeys, filteredPropertiesInfo, when, ifExists, deleteOnlyColumns), + paramsGetter: QueryGenerator._deleteParamsGetter(primaryKeys, filteredPropertiesInfo, when), + isIdempotent: when.length === 0 && !ifExists + }); + } + + /** + * Gets the query for an UPDATE statement. + * @param {String} tableName + * @param {String} keyspace + * @param {Set} primaryKeys + * @param {Array} propertiesInfo + * @param {Array} when + * @param {Boolean} ifExists + * @param {Boolean} deleteOnlyColumns + * @private + * @return {String} + */ + static _getDeleteQuery(tableName, keyspace, primaryKeys, propertiesInfo, when, ifExists, deleteOnlyColumns) { + let query = 'DELETE'; + + if (deleteOnlyColumns) { + const columnsToDelete = propertiesInfo.filter(p => !primaryKeys.has(p.columnName)) + .map(p => p.columnName) + .join(', '); + + if (columnsToDelete !== '') { + query += ' ' + columnsToDelete; + } + } + + query += ` FROM ${keyspace}.${tableName} WHERE `; + query += propertiesInfo.filter(p => primaryKeys.has(p.columnName)).map(p => p.columnName + ' = ?').join(' AND '); + + if (ifExists === true) { + query += ' IF EXISTS'; + } + else if (when.length > 0) { + query += ' IF ' + QueryGenerator._getConditionWithOperators(when); + } + + return query; + } + /** + * Returns a function to obtain the parameter values from a doc for an UPDATE statement. + * @param {Set} primaryKeys + * @param {Array} propertiesInfo + * @param {Array} when + * @returns {Function} + */ + static _deleteParamsGetter(primaryKeys, propertiesInfo, when) { + let scriptText = '(function getParametersDelete(doc, docInfo, mappingInfo) {\n'; + scriptText += ' return ['; + + // Where clause + scriptText += QueryGenerator._valueGetterExpression(propertiesInfo.filter(p => primaryKeys.has(p.columnName))); + + // Condition clause + if (when.length > 0) { + scriptText += ', ' + QueryGenerator._valueGetterExpression(when, 'docInfo.when'); + } + + // Finish return statement + scriptText += '];\n})'; + + const script = new vm.Script(scriptText, { filename: vmFileName }); + return script.runInThisContext(); + } + + /** + * Gets a string containing the doc properties to get. + * @param {Array} propertiesInfo + * @param {String} [objectName='doc'] + * @return {string} + * @private + */ + static _valueGetterExpression(propertiesInfo, objectName) { + objectName = objectName || 'doc'; + + return propertiesInfo + .map(p => + QueryGenerator._valueGetterSingle(`${objectName}['${p.propertyName}']`, p.propertyName, p.value, p.fromModel)) + .join(', '); + } + + static _valueGetterSingle(prefix, propName, value, fromModelFn) { + let valueGetter = prefix; + + if (value instanceof QueryOperator) { + if (value.hasChildValues) { + return `${QueryGenerator._valueGetterSingle(`${prefix}.value[0]`, propName, value.value[0], fromModelFn)}` + + `, ${QueryGenerator._valueGetterSingle(`${prefix}.value[1]`, propName, value.value[1], fromModelFn)}`; + } + + valueGetter = `${prefix}.value`; + + if (value.isInOperator && fromModelFn) { + // Transform each individual value + return `${valueGetter}.map(v => ${QueryGenerator._getMappingFunctionCall(propName, 'v')})`; + } + } + + return !fromModelFn ? valueGetter : QueryGenerator._getMappingFunctionCall(propName, valueGetter); + } + + /** + * Gets a string containing the doc properties to SET, considering QueryAssignment instances. + * @param {Array} propertiesInfo + * @param {String} [prefix='doc'] + * @return {string} + * @private + */ + static _assignmentGetterExpression(propertiesInfo, prefix) { + prefix = prefix || 'doc'; + + return propertiesInfo + .map(p => { + const valueGetter = `${prefix}['${p.propertyName}']${p.value instanceof QueryAssignment ? '.value' : ''}`; + if (p.fromModel) { + return QueryGenerator._getMappingFunctionCall(p.propertyName, valueGetter); + } + return valueGetter; + }) + .join(', '); + } + + static _getConditionWithOperators(propertiesInfo) { + return propertiesInfo + .map(p => QueryGenerator._getSingleCondition(p.columnName, p.value)) + .join(' AND '); + } + + static _getMappingFunctionCall(propName, valueGetter) { + return `mappingInfo.getFromModelFn('${propName}')(${valueGetter})`; + } + + static _getSingleCondition(columnName, value) { + if (value instanceof QueryOperator) { + if (value.hasChildValues) { + return `${QueryGenerator._getSingleCondition(columnName, value.value[0])}` + + ` ${value.key} ${QueryGenerator._getSingleCondition(columnName, value.value[1])}`; + } + return `${columnName} ${value.key} ?`; + } + return `${columnName} = ?`; + } +} + +module.exports = QueryGenerator; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/result-mapper.js b/node_modules/cassandra-driver/lib/mapping/result-mapper.js new file mode 100644 index 0000000..c364e5a --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/result-mapper.js @@ -0,0 +1,112 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const vm = require('vm'); +const utils = require('../utils'); +const types = require('../types'); + +/** + * @ignore + */ +class ResultMapper { + /** + * Gets a generated function to adapt the row to a document. + * @param {ModelMappingInfo} info + * @param {ResultSet} rs + * @returns {Function} + */ + static getSelectAdapter(info, rs) { + const columns = rs.columns; + if (!columns) { + throw new Error('Expected ROWS result obtained VOID'); + } + + let scriptText = '(function rowAdapter(row, info) {\n' + + ' const item = info.newInstance();\n'; + + for (const c of columns) { + scriptText += ` item['${info.getPropertyName(c.name)}'] = `; + + if (!info.getToModelFn(c.name)) { + scriptText += `row['${c.name}'];\n`; + } else { + scriptText += `info.getToModelFn('${c.name}')(row['${c.name}']);\n`; + } + } + + scriptText += ' return item;\n})'; + + const script = new vm.Script(scriptText, { filename: 'gen-result-mapper.js'}); + return script.runInThisContext(); + } + + /** + * Gets a function used to adapt VOID results or conditional updates. + * @param {ResultSet} rs + * @returns {Function} + */ + static getMutationAdapter(rs) { + if (rs.columns === null) { + // VOID result + return utils.noop; + } + + if ( + rs.columns.length === 1 && rs.columns[0].name === '[applied]' && + rs.columns[0].type.code === types.dataTypes.boolean) { + return utils.noop; + } + + return ResultMapper._getConditionalRowAdapter(rs); + } + + static _getConditionalRowAdapter(rs) { + return (function conditionalRowAdapter(row, info) { + const item = info.newInstance(); + + // Skip the first column ("[applied]") + for (let i = 1; i < rs.columns.length; i++) { + const c = rs.columns[i]; + item[info.getPropertyName(c.name)] = row[c.name]; + } + + return item; + }); + } + + /** + * @param {ModelMappingInfo} info + * @param {ResultSet} rs + * @returns {{canCache: Boolean, fn: Function}} + */ + static getCustomQueryAdapter(info, rs) { + if (rs.columns === null || rs.columns.length === 0) { + // VOID result + return { canCache: true, fn: utils.noop }; + } + + if (rs.columns[0].name === '[applied]' && rs.columns[0].type.code === types.dataTypes.boolean) { + // Conditional update results adapter functions should not be cached + return { canCache: false, fn: ResultMapper._getConditionalRowAdapter(rs) }; + } + + return { canCache: true, fn: ResultMapper.getSelectAdapter(info, rs) }; + } +} + +module.exports = ResultMapper; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/result.js b/node_modules/cassandra-driver/lib/mapping/result.js new file mode 100644 index 0000000..6f1ead4 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/result.js @@ -0,0 +1,136 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const util = require('util'); +const utils = require('../utils'); +const inspectMethod = util.inspect.custom || 'inspect'; + +/** + * Represents the result of an execution as an iterable of objects in the Mapper. + * @alias module:mapping~Result + */ +class Result { + /** + * Creates a new instance of Result. + * @param {ResultSet} rs + * @param {ModelMappingInfo} info + * @param {Function} rowAdapter + */ + constructor(rs, info, rowAdapter) { + this._rs = rs; + this._info = info; + this._rowAdapter = rowAdapter; + + /** + * When there is a single cell containing the result of the a LWT operation, hide the result from the user. + * @private + */ + this._isEmptyLwt = (rs.columns !== null + && rs.columns.length === 1 && this._rs.rowLength === 1 && rs.columns[0].name === '[applied]'); + + /** + * Gets the amount of the documents contained in this Result instance. + *

+ * When the results are paged, it returns the length of the current paged results not the total amount of + * rows in the table matching the query. + *

+ * @type {Number} + */ + this.length = this._isEmptyLwt ? 0 : (rs.rowLength || 0); + + /** + * A string token representing the current page state of query. + *

+ * When provided, it can be used in the following executions to continue paging and retrieve the remained of the + * result for the query. + *

+ * @type {String} + * @default null + */ + this.pageState = rs.pageState; + } + + /** + * When this instance is the result of a conditional update query, it returns whether it was successful. + * Otherwise, it returns true. + *

+ * For consistency, this method always returns true for non-conditional queries (although there is + * no reason to call the method in that case). This is also the case for conditional DDL statements + * (CREATE KEYSPACE... IF NOT EXISTS, CREATE TABLE... IF NOT EXISTS), for which the server doesn't return + * information whether it was applied or not. + *

+ */ + wasApplied() { + return this._rs.wasApplied(); + } + + /** + * Gets the first document in this result or null when the result is empty. + */ + first() { + if (!this._rs.rowLength || this._isEmptyLwt) { + return null; + } + return this._rowAdapter(this._rs.rows[0], this._info); + } + + /** + * Returns a new Iterator object that contains the document values. + */ + *[Symbol.iterator]() { + if (this._isEmptyLwt) { + // Empty iterator + return; + } + + for (let i = 0; i < this._rs.rows.length; i++) { + yield this._rowAdapter(this._rs.rows[i], this._info); + } + } + + /** + * Converts the current instance to an Array of documents. + * @return {Array} + */ + toArray() { + if (this._isEmptyLwt || !this._rs.rows) { + return utils.emptyArray; + } + + return this._rs.rows.map(row => this._rowAdapter(row, this._info)); + } + + /** + * Executes a provided function once per result element. + * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. + * @param {Object} [thisArg] Value to use as this when executing callback. + */ + forEach(callback, thisArg) { + let index = 0; + thisArg = thisArg || this; + for (const doc of this) { + callback.call(thisArg, doc, index++); + } + } + + [inspectMethod]() { + return this.toArray(); + } +} + +module.exports = Result; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/table-mappings.js b/node_modules/cassandra-driver/lib/mapping/table-mappings.js new file mode 100644 index 0000000..06010a1 --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/table-mappings.js @@ -0,0 +1,122 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * Contains a set of methods to represent a row into a document and a document into a row. + * @alias module:mapping~TableMappings + * @interface + */ +class TableMappings { + /** + * Method that is called by the mapper to create the instance of the document. + * @return {Object} + */ + newObjectInstance() { + return {}; + } + + /** + * Gets the name of the column based on the document property name. + * @param {String} propName The name of the property. + * @returns {String} + */ + getColumnName(propName) { + return propName; + } + + /** + * Gets the name of the document property based on the column name. + * @param {String} columnName The name of the column. + * @returns {String} + */ + getPropertyName(columnName) { + return columnName; + } +} + +/** + * A [TableMappings]{@link module:mapping~TableMappings} implementation that converts CQL column names in all-lowercase + * identifiers with underscores (snake case) to camel case (initial lowercase letter) property names. + *

+ * The conversion is performed without any checks for the source format, you should make sure that the source + * format is snake case for CQL identifiers and camel case for properties. + *

+ * @alias module:mapping~UnderscoreCqlToCamelCaseMappings + * @implements {module:mapping~TableMappings} + */ +class UnderscoreCqlToCamelCaseMappings extends TableMappings { + /** + * Creates a new instance of {@link UnderscoreCqlToCamelCaseMappings} + */ + constructor() { + super(); + } + + /** + * Converts a property name in camel case to snake case. + * @param {String} propName Name of the property to convert to snake case. + * @return {String} + */ + getColumnName(propName) { + return propName.replace(/[a-z][A-Z]/g, (match, offset) => match.charAt(0) + '_' + match.charAt(1)).toLowerCase(); + } + + /** + * Converts a column name in snake case to camel case. + * @param {String} columnName The column name to convert to camel case. + * @return {String} + */ + getPropertyName(columnName) { + return columnName.replace(/_[a-z]/g, (match, offset) => ((offset === 0) ? match : match.substr(1).toUpperCase())); + } +} + +/** + * Default implementation of [TableMappings]{@link module:mapping~TableMappings} that doesn't perform any conversion. + * @alias module:mapping~DefaultTableMappings + * @implements {module:mapping~TableMappings} + */ +class DefaultTableMappings extends TableMappings { + /** + * Creates a new instance of {@link DefaultTableMappings}. + */ + constructor() { + super(); + } + + /** @override */ + getColumnName(propName) { + return super.getColumnName(propName); + } + + /** @override */ + getPropertyName(columnName) { + return super.getPropertyName(columnName); + } + + /** + * Creates a new object instance, using object initializer. + */ + newObjectInstance() { + return super.newObjectInstance(); + } +} + +exports.TableMappings = TableMappings; +exports.UnderscoreCqlToCamelCaseMappings = UnderscoreCqlToCamelCaseMappings; +exports.DefaultTableMappings = DefaultTableMappings; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/mapping/tree.js b/node_modules/cassandra-driver/lib/mapping/tree.js new file mode 100644 index 0000000..e6adece --- /dev/null +++ b/node_modules/cassandra-driver/lib/mapping/tree.js @@ -0,0 +1,151 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const EventEmitter = require('events'); + +/** + * Represents a tree node where the key is composed by 1 or more strings. + * @ignore + */ +class Node extends EventEmitter { + /** + * Creates a new instance of {@link Node}. + * @param {Array} key + * @param {Object} value + * @param {Array} [edges] + */ + constructor(key, value, edges) { + super(); + this.key = key; + this.value = value; + this.edges = edges || []; + } +} + +/** + * A radix tree where each node contains a key, a value and edges. + * @ignore + */ +class Tree extends Node { + constructor() { + super([], null); + this.length = 0; + } + + /** + * Gets the existing item in the tree or creates a new one with the value provided by valueHandler + * @param {Iterator} keyIterator + * @param {Function} valueHandler + * @return {Object} + */ + getOrCreate(keyIterator, valueHandler) { + if (typeof keyIterator.next !== 'function') { + keyIterator = keyIterator[Symbol.iterator](); + } + let node = this; + let isMatch = false; + let item = keyIterator.next(); + while (true) { + let newBranch; + // Check node keys at position 1 and above + for (let i = 1; i < node.key.length; i++) { + if (item.done || node.key[i] !== item.value) { + // We should branch out + newBranch = this._createBranch(node, i, item.done, valueHandler); + break; + } + item = keyIterator.next(); + } + + if (item.done) { + isMatch = true; + break; + } + + if (newBranch !== undefined) { + break; + } + + const edges = node.edges; + let nextNode; + for (let i = 0; i < edges.length; i++) { + const e = edges[i]; + if (e.key[0] === item.value) { + // its a match + nextNode = e; + item = keyIterator.next(); + break; + } + } + + if (nextNode === undefined) { + // Current node is the root for a new leaf + break; + } + else { + node = nextNode; + } + } + + if (!isMatch) { + // Create using "node" as the root + const value = valueHandler(); + node.edges.push(new Node(iteratorToArray(item.value, keyIterator), value)); + this._onItemAdded(); + return value; + } + if (node.value === null && node.edges.length > 0) { + node.value = valueHandler(); + } + return node.value; + } + + _createBranch(node, index, useNewValue, valueHandler) { + const newBranch = new Node(node.key.slice(index), node.value, node.edges); + node.key = node.key.slice(0, index); + node.edges = [ newBranch ]; + if (useNewValue) { + // The previous node value has moved to a leaf + // The node containing the new leaf should use the new value + node.value = valueHandler(); + this._onItemAdded(); + } + else { + // Clear the value as it was copied in the branch + node.value = null; + } + return newBranch; + } + + _onItemAdded() { + this.length++; + this.emit('add', this.length); + } +} + +function iteratorToArray(value, iterator) { + const values = [ value ]; + let item = iterator.next(); + while (!item.done) { + values.push(item.value); + item = iterator.next(); + } + return values; +} + +module.exports = Tree; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/aggregate.js b/node_modules/cassandra-driver/lib/metadata/aggregate.js new file mode 100644 index 0000000..6677f5f --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/aggregate.js @@ -0,0 +1,78 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +/** + * Creates a new Aggregate. + * @classdesc Describes a CQL aggregate. + * @alias module:metadata~Aggregate + * @constructor + */ +function Aggregate() { + /** + * Name of the aggregate. + * @type {String} + */ + this.name = null; + /** + * Name of the keyspace where the aggregate is declared. + */ + this.keyspaceName = null; + /** + * Signature of the aggregate. + * @type {Array.} + */ + this.signature = null; + /** + * List of the CQL aggregate argument types. + * @type {Array.<{code, info}>} + */ + this.argumentTypes = null; + /** + * State Function. + * @type {String} + */ + this.stateFunction = null; + /** + * State Type. + * @type {{code, info}} + */ + this.stateType = null; + /** + * Final Function. + * @type {String} + */ + this.finalFunction = null; + this.initConditionRaw = null; + /** + * Initial state value of this aggregate. + * @type {String} + */ + this.initCondition = null; + /** + * Type of the return value. + * @type {{code: number, info: (Object|Array|null)}} + */ + this.returnType = null; + /** + * Indicates whether or not this aggregate is deterministic. This means that + * given a particular input, the aggregate will always produce the same output. + * @type {Boolean} + */ + this.deterministic = null; +} + +module.exports = Aggregate; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/client-state.js b/node_modules/cassandra-driver/lib/metadata/client-state.js new file mode 100644 index 0000000..6bf7c37 --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/client-state.js @@ -0,0 +1,114 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const errors = require('../errors'); + +/** + * Represents the state of a {@link Client}. + *

+ * Exposes information on the connections maintained by a Client at a specific time. + *

+ * @alias module:metadata~ClientState + * @constructor + */ +class ClientState { + + /** + * Creates a new instance of ClientState. + * @param {Array} hosts + * @param {Object.} openConnections + * @param {Object.} inFlightQueries + */ + constructor(hosts, openConnections, inFlightQueries) { + this._hosts = hosts; + this._openConnections = openConnections; + this._inFlightQueries = inFlightQueries; + } + + /** + * Get an array of hosts to which the client is connected to. + * @return {Array} + */ + getConnectedHosts() { + return this._hosts; + } + + /** + * Gets the amount of open connections to a given host. + * @param {Host} host + * @return {Number} + */ + getOpenConnections(host) { + if (!host) { + throw new errors.ArgumentError('Host is not defined'); + } + + return this._openConnections[host.address] || 0; + } + + /** + * Gets the amount of queries that are currently being executed through a given host. + *

+ * This corresponds to the number of queries that have been sent by the Client to server Host on one of its connections + * but haven't yet obtained a response. + *

+ * @param {Host} host + * @return {Number} + */ + getInFlightQueries(host) { + if (!host) { + throw new errors.ArgumentError('Host is not defined'); + } + + return this._inFlightQueries[host.address] || 0; + } + + /** + * Returns the string representation of the instance. + */ + toString() { + return util.format('{"hosts": %j, "openConnections": %j, "inFlightQueries": %j}', + this._hosts.map(function (h) { return h.address; }), this._openConnections, this._inFlightQueries); + } + + /** + * Creates a new instance from the provided client. + * @param {Client} client + * @internal + * @ignore + */ + static from(client) { + const openConnections = {}; + const inFlightQueries = {}; + const hostArray = []; + + client.hosts.forEach(host => { + if (host.pool.connections.length === 0) { + return; + } + + hostArray.push(host); + openConnections[host.address] = host.pool.connections.length; + inFlightQueries[host.address] = host.getInFlight(); + }); + + return new ClientState(hostArray, openConnections, inFlightQueries); + } +} + +module.exports = ClientState; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/data-collection.js b/node_modules/cassandra-driver/lib/metadata/data-collection.js new file mode 100644 index 0000000..1bab680 --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/data-collection.js @@ -0,0 +1,173 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const events = require('events'); +/** + * Creates a new instance of DataCollection + * @param {String} name Name of the data object. + * @classdesc Describes a table or a view + * @alias module:metadata~DataCollection + * @constructor + * @abstract + */ +function DataCollection(name) { + events.EventEmitter.call(this); + this.setMaxListeners(0); + //private + Object.defineProperty(this, 'loading', { value: false, enumerable: false, writable: true }); + Object.defineProperty(this, 'loaded', { value: false, enumerable: false, writable: true }); + /** + * Name of the object + * @type {String} + */ + this.name = name; + /** + * False-positive probability for SSTable Bloom filters. + * @type {number} + */ + this.bloomFilterFalsePositiveChance = 0; + /** + * Level of caching: all, keys_only, rows_only, none + * @type {String} + */ + this.caching = null; + /** + * A human readable comment describing the table. + * @type {String} + */ + this.comment = null; + /** + * Specifies the time to wait before garbage collecting tombstones (deletion markers) + * @type {number} + */ + this.gcGraceSeconds = 0; + /** + * Compaction strategy class used for the table. + * @type {String} + */ + this.compactionClass = null; + /** + * Associative-array containing the compaction options keys and values. + * @type {Object} + */ + this.compactionOptions = null; + /** + * Associative-array containing the compression options. + * @type {Object} + */ + this.compression = null; + /** + * Specifies the probability of read repairs being invoked over all replicas in the current data center. + * @type {number} + */ + this.localReadRepairChance = 0; + /** + * Specifies the probability with which read repairs should be invoked on non-quorum reads. The value must be + * between 0 and 1. + * @type {number} + */ + this.readRepairChance = 0; + /** + * An associative Array containing extra metadata for the table. + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Object} + */ + this.extensions = null; + /** + * When compression is enabled, this option defines the probability + * with which checksums for compressed blocks are checked during reads. + * The default value for this options is 1.0 (always check). + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Number|null} + */ + this.crcCheckChance = null; + /** + * Whether the populate I/O cache on flush is set on this table. + * @type {Boolean} + */ + this.populateCacheOnFlush = false; + /** + * Returns the default TTL for this table. + * @type {Number} + */ + this.defaultTtl = 0; + /** + * * Returns the speculative retry option for this table. + * @type {String} + */ + this.speculativeRetry = 'NONE'; + /** + * Returns the minimum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + this.minIndexInterval = 128; + /** + * Returns the maximum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + this.maxIndexInterval = 2048; + /** + * Array describing the table columns. + * @type {Array} + */ + this.columns = null; + /** + * An associative Array of columns by name. + * @type {Object} + */ + this.columnsByName = null; + /** + * Array describing the columns that are part of the partition key. + * @type {Array} + */ + this.partitionKeys = []; + /** + * Array describing the columns that form the clustering key. + * @type {Array} + */ + this.clusteringKeys = []; + /** + * Array describing the clustering order of the columns in the same order as the clusteringKeys. + * @type {Array} + */ + this.clusteringOrder = []; + /** + * An associative Array containing nodesync options for this table. + *

+ * For DSE versions prior to 6.0.0, this method always returns {@code null}. If nodesync + * was not explicitly configured for this table this method will also return {@code null}. + *

+ * @type {Object} + */ + this.nodesync = null; +} + +util.inherits(DataCollection, events.EventEmitter); + +module.exports = DataCollection; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/event-debouncer.js b/node_modules/cassandra-driver/lib/metadata/event-debouncer.js new file mode 100644 index 0000000..4ea8ddc --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/event-debouncer.js @@ -0,0 +1,164 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const utils = require('../utils'); +const promiseUtils = require('../promise-utils'); + +const _queueOverflowThreshold = 1000; + +/** + * Debounce protocol events by acting on those events with a sliding delay. + * @ignore + * @constructor + */ +class EventDebouncer { + + /** + * Creates a new instance of the event debouncer. + * @param {Number} delay + * @param {Function} logger + */ + constructor(delay, logger) { + this._delay = delay; + this._logger = logger; + this._queue = null; + this._timeout = null; + } + + /** + * Adds a new event to the queue and moves the delay. + * @param {{ handler: Function, all: boolean|undefined, keyspace: String|undefined, + * cqlObject: String|null|undefined }} event + * @param {Boolean} processNow + * @returns {Promise} + */ + eventReceived(event, processNow) { + return new Promise((resolve, reject) => { + event.callback = promiseUtils.getCallback(resolve, reject); + this._queue = this._queue || { callbacks: [], keyspaces: {} }; + const delay = !processNow ? this._delay : 0; + if (event.all) { + // when an event marked with all is received, it supersedes all the rest of events + // a full update (hosts + keyspaces + tokens) is going to be made + this._queue.mainEvent = event; + } + if (this._queue.callbacks.length === _queueOverflowThreshold) { + // warn once + this._logger('warn', util.format('Event debouncer queue exceeded %d events', _queueOverflowThreshold)); + } + this._queue.callbacks.push(event.callback); + if (this._queue.mainEvent) { + // a full refresh is scheduled and the callback was added, nothing else to do. + return this._slideDelay(delay); + } + // Insert at keyspace level + let keyspaceEvents = this._queue.keyspaces[event.keyspace]; + if (!keyspaceEvents) { + keyspaceEvents = this._queue.keyspaces[event.keyspace] = { events: [] }; + } + if (event.cqlObject === undefined) { + // a full refresh of the keyspace, supersedes all child keyspace events + keyspaceEvents.mainEvent = event; + } + keyspaceEvents.events.push(event); + this._slideDelay(delay); + }); + } + + /** + * @param {Number} delay + * @private + * */ + _slideDelay(delay) { + const self = this; + function process() { + const q = self._queue; + self._queue = null; + self._timeout = null; + processQueue(q); + } + if (delay === 0) { + // no delay, process immediately + if (this._timeout) { + clearTimeout(this._timeout); + } + return process(); + } + const previousTimeout = this._timeout; + // Add the new timeout before removing the previous one performs better + this._timeout = setTimeout(process, delay); + if (previousTimeout) { + clearTimeout(previousTimeout); + } + } + + /** + * Clears the timeout and invokes all pending callback. + */ + shutdown() { + if (!this._queue) { + return; + } + this._queue.callbacks.forEach(function (cb) { + cb(); + }); + this._queue = null; + clearTimeout(this._timeout); + this._timeout = null; + } +} + +/** + * @param {{callbacks: Array, keyspaces: Object, mainEvent: Object}} q + * @private + */ +function processQueue (q) { + if (q.mainEvent) { + // refresh all by invoking 1 handler and invoke all pending callbacks + return promiseUtils.toCallback(q.mainEvent.handler(), (err) => { + for (let i = 0; i < q.callbacks.length; i++) { + q.callbacks[i](err); + } + }); + } + + utils.each(Object.keys(q.keyspaces), function eachKeyspace(name, next) { + const keyspaceEvents = q.keyspaces[name]; + if (keyspaceEvents.mainEvent) { + // refresh a keyspace + return promiseUtils.toCallback(keyspaceEvents.mainEvent.handler(), function mainEventCallback(err) { + for (let i = 0; i < keyspaceEvents.events.length; i++) { + keyspaceEvents.events[i].callback(err); + } + + next(); + }); + } + + // deal with individual handlers and callbacks + keyspaceEvents.events.forEach(event => { + // sync handlers + event.handler(); + event.callback(); + }); + + next(); + }); +} + +module.exports = EventDebouncer; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/index.d.ts b/node_modules/cassandra-driver/lib/metadata/index.d.ts new file mode 100644 index 0000000..80c0d96 --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/index.d.ts @@ -0,0 +1,211 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { types } from '../types'; +import { EmptyCallback, Host, token, ValueCallback } from '../../'; +import dataTypes = types.dataTypes; +import Uuid = types.Uuid; +import InetAddress = types.InetAddress; + +export namespace metadata { + + interface Aggregate { + argumentTypes: Array<{ code: dataTypes, info: any }>; + finalFunction: string; + initCondition: string; + keyspaceName: string; + returnType: string; + signature: string[]; + stateFunction: string; + stateType: string; + } + + interface ClientState { + getConnectedHosts(): Host[]; + + getInFlightQueries(host: Host): number; + + getOpenConnections(host: Host): number; + + toString(): string; + } + + interface DataTypeInfo { + code: dataTypes; + info: string | DataTypeInfo | DataTypeInfo[]; + options: { + frozen: boolean; + reversed: boolean; + }; + } + + interface ColumnInfo { + name: string; + type: DataTypeInfo; + } + + enum IndexKind { + custom = 0, + keys, + composites + } + + interface Index { + kind: IndexKind; + name: string; + options: object; + target: string; + + isCompositesKind(): boolean; + + isCustomKind(): boolean; + + isKeysKind(): boolean; + } + + interface DataCollection { + bloomFilterFalsePositiveChance: number; + caching: string; + clusteringKeys: ColumnInfo[]; + clusteringOrder: string[]; + columns: ColumnInfo[]; + columnsByName: { [key: string]: ColumnInfo }; + comment: string; + compactionClass: string; + compactionOptions: { [option: string]: any; }; + compression: { + class?: string; + [option: string]: any; + }; + crcCheckChange?: number; + defaultTtl: number; + extensions: { [option: string]: any; }; + gcGraceSeconds: number; + localReadRepairChance: number; + maxIndexInterval?: number; + minIndexInterval?: number; + name: string; + partitionKeys: ColumnInfo[]; + populateCacheOnFlush: boolean; + readRepairChance: number; + speculativeRetry: string; + } + + interface MaterializedView extends DataCollection { + tableName: string; + whereClause: string; + includeAllColumns: boolean; + } + + interface TableMetadata extends DataCollection { + indexes: Index[]; + indexInterval?: number; + isCompact: boolean; + memtableFlushPeriod: number; + replicateOnWrite: boolean; + cdc?: boolean; + virtual: boolean; + } + + interface QueryTrace { + requestType: string; + coordinator: InetAddress; + parameters: { [key: string]: any }; + startedAt: number | types.Long; + duration: number; + clientAddress: string; + events: Array<{ id: Uuid; activity: any; source: any; elapsed: any; thread: any }>; + } + + interface SchemaFunction { + argumentNames: string[]; + argumentTypes: Array<{ code: dataTypes, info: any }>; + body: string; + calledOnNullInput: boolean; + keyspaceName: string; + language: string; + name: string; + returnType: string; + signature: string[]; + } + + interface Udt { + name: string; + fields: ColumnInfo[] + } + + interface Metadata { + keyspaces: { [name: string]: { name: string, strategy: string }}; + + clearPrepared(): void; + + getAggregate(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>, callback: ValueCallback): void; + + getAggregate(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>): Promise; + + getAggregates(keyspaceName: string, name: string, callback: ValueCallback): void; + + getAggregates(keyspaceName: string, name: string): Promise; + + getFunction(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>, callback: ValueCallback): void; + + getFunction(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>): Promise; + + getFunctions(keyspaceName: string, name: string, callback: ValueCallback): void; + + getFunctions(keyspaceName: string, name: string): Promise; + + getMaterializedView(keyspaceName: string, name: string, callback: ValueCallback): void; + + getMaterializedView(keyspaceName: string, name: string, callback: EmptyCallback): Promise; + + getReplicas(keyspaceName: string, token: Buffer | token.Token | token.TokenRange): Host[]; + + getTable(keyspaceName: string, name: string, callback: ValueCallback): void; + + getTable(keyspaceName: string, name: string): Promise; + + getTokenRanges(): Set; + + getTokenRangesForHost(keyspaceName: string, host: Host): Set | null; + + getTrace(traceId: Uuid, consistency: types.consistencies, callback: ValueCallback): void; + + getTrace(traceId: Uuid, consistency: types.consistencies): Promise; + + getTrace(traceId: Uuid, callback: ValueCallback): void; + + getTrace(traceId: Uuid): Promise; + + getUdt(keyspaceName: string, name: string, callback: ValueCallback): void; + + getUdt(keyspaceName: string, name: string): Promise; + + newToken(components: Buffer[] | Buffer | string): token.Token; + + newTokenRange(start: token.Token, end: token.Token): token.TokenRange; + + refreshKeyspace(name: string, callback: EmptyCallback): void; + + refreshKeyspace(name: string): Promise; + + refreshKeyspaces(waitReconnect: boolean, callback: EmptyCallback): void; + + refreshKeyspaces(waitReconnect?: boolean): Promise; + + refreshKeyspaces(callback: EmptyCallback): void; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/index.js b/node_modules/cassandra-driver/lib/metadata/index.js new file mode 100644 index 0000000..ec587fc --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/index.js @@ -0,0 +1,1024 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const events = require('events'); +const util = require('util'); + +/** + * Module containing classes and fields related to metadata. + * @module metadata + */ + +const t = require('../tokenizer'); +const utils = require('../utils'); +const errors = require('../errors'); +const types = require('../types'); +const requests = require('../requests'); +const schemaParserFactory = require('./schema-parser'); +const promiseUtils = require('../promise-utils'); +const { TokenRange } = require('../token'); +const { ExecutionOptions } = require('../execution-options'); + +/** + * @const + * @private + */ +const _selectTraceSession = "SELECT * FROM system_traces.sessions WHERE session_id=%s"; +/** + * @const + * @private + */ +const _selectTraceEvents = "SELECT * FROM system_traces.events WHERE session_id=%s"; +/** + * @const + * @private + */ +const _selectSchemaVersionPeers = "SELECT schema_version FROM system.peers"; +/** + * @const + * @private + */ +const _selectSchemaVersionLocal = "SELECT schema_version FROM system.local"; +/** + * @const + * @private + */ +const _traceMaxAttemps = 5; +/** + * @const + * @private + */ +const _traceAttemptDelay = 400; + +/** + * Represents cluster and schema information. + * The metadata class acts as a internal state of the driver. + */ +class Metadata { + + /** + * Creates a new instance of {@link Metadata}. + * @param {ClientOptions} options + * @param {ControlConnection} controlConnection Control connection used to retrieve information. + */ + constructor(options, controlConnection) { + if (!options) { + throw new errors.ArgumentError('Options are not defined'); + } + + Object.defineProperty(this, 'options', { value: options, enumerable: false, writable: false }); + Object.defineProperty(this, 'controlConnection', { value: controlConnection, enumerable: false, writable: false }); + this.keyspaces = {}; + this.initialized = false; + this._isDbaas = false; + this._schemaParser = schemaParserFactory.getByVersion(options, controlConnection, this.getUdt.bind(this)); + this.log = utils.log; + this._preparedQueries = new PreparedQueries(options.maxPrepared, (...args) => this.log(...args)); + } + + /** + * Sets the cassandra version + * @internal + * @ignore + * @param {Array.} version + */ + setCassandraVersion(version) { + this._schemaParser = schemaParserFactory.getByVersion( + this.options, this.controlConnection, this.getUdt.bind(this), version, this._schemaParser); + } + + /** + * Determines whether the cluster is provided as a service. + * @returns {boolean} true when the cluster is provided as a service (DataStax Astra), false when it's a + * different deployment (on-prem). + */ + isDbaas() { + return this._isDbaas; + } + + /** + * Sets the product type as DBaaS. + * @internal + * @ignore + */ + setProductTypeAsDbaas() { + this._isDbaas = true; + } + + /** + * @ignore + * @param {String} partitionerName + */ + setPartitioner(partitionerName) { + if (/RandomPartitioner$/.test(partitionerName)) { + return this.tokenizer = new t.RandomTokenizer(); + } + if (/ByteOrderedPartitioner$/.test(partitionerName)) { + return this.tokenizer = new t.ByteOrderedTokenizer(); + } + return this.tokenizer = new t.Murmur3Tokenizer(); + } + + /** + * Populates the information regarding primary replica per token, datacenters (+ racks) and sorted token ring. + * @ignore + * @param {HostMap} hosts + */ + buildTokens(hosts) { + if (!this.tokenizer) { + return this.log('error', 'Tokenizer could not be determined'); + } + //Get a sorted array of tokens + const allSorted = []; + //Get a map of + const primaryReplicas = {}; + //Depending on the amount of tokens, this could be an expensive operation + const hostArray = hosts.values(); + const stringify = this.tokenizer.stringify; + const datacenters = {}; + hostArray.forEach((h) => { + if (!h.tokens) { + return; + } + h.tokens.forEach((tokenString) => { + const token = this.tokenizer.parse(tokenString); + utils.insertSorted(allSorted, token, (t1, t2) => t1.compare(t2)); + primaryReplicas[stringify(token)] = h; + }); + let dc = datacenters[h.datacenter]; + if (!dc) { + dc = datacenters[h.datacenter] = { + hostLength: 0, + racks: new utils.HashSet() + }; + } + dc.hostLength++; + dc.racks.add(h.rack); + }); + //Primary replica for given token + this.primaryReplicas = primaryReplicas; + //All the tokens in ring order + this.ring = allSorted; + // Build TokenRanges. + const tokenRanges = new Set(); + if (this.ring.length === 1) { + // If there is only one token, return the range ]minToken, minToken] + const min = this.tokenizer.minToken(); + tokenRanges.add(new TokenRange(min, min, this.tokenizer)); + } + else { + for (let i = 0; i < this.ring.length; i++) { + const start = this.ring[i]; + const end = this.ring[(i + 1) % this.ring.length]; + tokenRanges.add(new TokenRange(start, end, this.tokenizer)); + } + } + this.tokenRanges = tokenRanges; + //Compute string versions as it's potentially expensive and frequently reused later + this.ringTokensAsStrings = new Array(allSorted.length); + for (let i = 0; i < allSorted.length; i++) { + this.ringTokensAsStrings[i] = stringify(allSorted[i]); + } + //Datacenter metadata (host length and racks) + this.datacenters = datacenters; + } + + /** + * Gets the keyspace metadata information and updates the internal state of the driver. + *

+ * If a callback is provided, the callback is invoked when the keyspaces metadata refresh completes. + * Otherwise, it returns a Promise. + *

+ * @param {String} name Name of the keyspace. + * @param {Function} [callback] Optional callback. + */ + refreshKeyspace(name, callback) { + return promiseUtils.optionalCallback(this._refreshKeyspace(name), callback); + } + + /** + * @param {String} name + * @private + */ + async _refreshKeyspace(name) { + if (!this.initialized) { + throw this._uninitializedError(); + } + this.log('info', util.format('Retrieving keyspace %s metadata', name)); + try { + const ksInfo = await this._schemaParser.getKeyspace(name); + if (!ksInfo) { + // the keyspace was dropped + delete this.keyspaces[name]; + return null; + } + // Tokens are lazily init on the keyspace, once a replica from that keyspace is retrieved. + this.keyspaces[ksInfo.name] = ksInfo; + return ksInfo; + } + catch (err) { + this.log('error', 'There was an error while trying to retrieve keyspace information', err); + throw err; + } + } + + /** + * Gets the metadata information of all the keyspaces and updates the internal state of the driver. + *

+ * If a callback is provided, the callback is invoked when the keyspace metadata refresh completes. + * Otherwise, it returns a Promise. + *

+ * @param {Boolean|Function} [waitReconnect] Determines if it should wait for reconnection in case the control connection is not + * connected at the moment. Default: true. + * @param {Function} [callback] Optional callback. + */ + refreshKeyspaces(waitReconnect, callback) { + if (typeof waitReconnect === 'function' || typeof waitReconnect === 'undefined') { + callback = waitReconnect; + waitReconnect = true; + } + if (!this.initialized) { + const err = this._uninitializedError(); + if (callback) { + return callback(err); + } + return Promise.reject(err); + } + return promiseUtils.optionalCallback(this.refreshKeyspacesInternal(waitReconnect), callback); + } + + /** + * @param {Boolean} waitReconnect + * @returns {Promise>} + * @ignore + * @internal + */ + async refreshKeyspacesInternal(waitReconnect) { + this.log('info', 'Retrieving keyspaces metadata'); + try { + this.keyspaces = await this._schemaParser.getKeyspaces(waitReconnect); + return this.keyspaces; + } + catch (err) { + this.log('error', 'There was an error while trying to retrieve keyspaces information', err); + throw err; + } + } + + _getKeyspaceReplicas(keyspace) { + if (!keyspace.replicas) { + //Calculate replicas the first time for the keyspace + keyspace.replicas = + keyspace.tokenToReplica(this.tokenizer, this.ringTokensAsStrings, this.primaryReplicas, this.datacenters); + } + return keyspace.replicas; + } + + /** + * Gets the host list representing the replicas that contain the given partition key, token or token range. + *

+ * It uses the pre-loaded keyspace metadata to retrieve the replicas for a token for a given keyspace. + * When the keyspace metadata has not been loaded, it returns null. + *

+ * @param {String} keyspaceName + * @param {Buffer|Token|TokenRange} token Can be Buffer (serialized partition key), Token or TokenRange + * @returns {Array} + */ + getReplicas(keyspaceName, token) { + if (!this.ring) { + return null; + } + if (Buffer.isBuffer(token)) { + token = this.tokenizer.hash(token); + } + if (token instanceof TokenRange) { + token = token.end; + } + let keyspace; + if (keyspaceName) { + keyspace = this.keyspaces[keyspaceName]; + if (!keyspace) { + // the keyspace was not found, the metadata should be loaded beforehand + return null; + } + } + let i = utils.binarySearch(this.ring, token, (t1, t2) => t1.compare(t2)); + if (i < 0) { + i = ~i; + } + if (i >= this.ring.length) { + //it circled back + i = i % this.ring.length; + } + const closestToken = this.ringTokensAsStrings[i]; + if (!keyspaceName) { + return [this.primaryReplicas[closestToken]]; + } + const replicas = this._getKeyspaceReplicas(keyspace); + return replicas[closestToken]; + } + + /** + * Gets the token ranges that define data distribution in the ring. + * + * @returns {Set} The ranges of the ring or empty set if schema metadata is not enabled. + */ + getTokenRanges() { + return this.tokenRanges; + } + + /** + * Gets the token ranges that are replicated on the given host, for + * the given keyspace. + * + * @param {String} keyspaceName The name of the keyspace to get ranges for. + * @param {Host} host The host. + * @returns {Set|null} Ranges for the keyspace on this host or null if keyspace isn't found or hasn't been loaded. + */ + getTokenRangesForHost(keyspaceName, host) { + if (!this.ring) { + return null; + } + let keyspace; + if (keyspaceName) { + keyspace = this.keyspaces[keyspaceName]; + if (!keyspace) { + // the keyspace was not found, the metadata should be loaded beforehand + return null; + } + } + // If the ring has only 1 token, just return the ranges as we should only have a single node cluster. + if (this.ring.length === 1) { + return this.getTokenRanges(); + } + const replicas = this._getKeyspaceReplicas(keyspace); + const ranges = new Set(); + // for each range, find replicas for end token, if replicas include host, add range. + this.tokenRanges.forEach((tokenRange) => { + const replicasForToken = replicas[this.tokenizer.stringify(tokenRange.end)]; + if (replicasForToken.indexOf(host) !== -1) { + ranges.add(tokenRange); + } + }); + return ranges; + } + + /** + * Constructs a Token from the input buffer(s) or string input. If a string is passed in + * it is assumed this matches the token representation reported by cassandra. + * @param {Array|Buffer|String} components + * @returns {Token} constructed token from the input buffer. + */ + newToken(components) { + if (!this.tokenizer) { + throw new Error('Partitioner not established. This should only happen if metadata was disabled or you have not connected yet.'); + } + if (Array.isArray(components)) { + return this.tokenizer.hash(Buffer.concat(components)); + } + else if (util.isString(components)) { + return this.tokenizer.parse(components); + } + return this.tokenizer.hash(components); + } + + /** + * Constructs a TokenRange from the given start and end tokens. + * @param {Token} start + * @param {Token} end + * @returns TokenRange build range spanning from start (exclusive) to end (inclusive). + */ + newTokenRange(start, end) { + if (!this.tokenizer) { + throw new Error('Partitioner not established. This should only happen if metadata was disabled or you have not connected yet.'); + } + return new TokenRange(start, end, this.tokenizer); + } + + /** + * Gets the metadata information already stored associated to a prepared statement + * @param {String} keyspaceName + * @param {String} query + * @internal + * @ignore + */ + getPreparedInfo(keyspaceName, query) { + return this._preparedQueries.getOrAdd(keyspaceName, query); + } + + /** + * Clears the internal state related to the prepared statements. + * Following calls to the Client using the prepare flag will re-prepare the statements. + */ + clearPrepared() { + this._preparedQueries.clear(); + } + + /** @ignore */ + getPreparedById(id) { + return this._preparedQueries.getById(id); + } + + /** @ignore */ + setPreparedById(info) { + return this._preparedQueries.setById(info); + } + + /** @ignore */ + getAllPrepared() { + return this._preparedQueries.getAll(); + } + + /** @ignore */ + _uninitializedError() { + return new Error('Metadata has not been initialized. This could only happen if you have not connected yet.'); + } + + /** + * Gets the definition of an user-defined type. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same UDT definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace. + * @param {String} name Name of the UDT. + * @param {Function} [callback] The callback to invoke when retrieval completes. + */ + getUdt(keyspaceName, name, callback) { + return promiseUtils.optionalCallback(this._getUdt(keyspaceName, name), callback); + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @returns {Promise} + * @private + */ + async _getUdt(keyspaceName, name) { + if (!this.initialized) { + throw this._uninitializedError(); + } + let cache; + if (this.options.isMetadataSyncEnabled) { + const keyspace = this.keyspaces[keyspaceName]; + if (!keyspace) { + return null; + } + cache = keyspace.udts; + } + return await this._schemaParser.getUdt(keyspaceName, name, cache); + } + + /** + * Gets the definition of a table. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same table definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace. + * @param {String} name Name of the Table. + * @param {Function} [callback] The callback with the err as a first parameter and the {@link TableMetadata} as + * second parameter. + */ + getTable(keyspaceName, name, callback) { + return promiseUtils.optionalCallback(this._getTable(keyspaceName, name), callback); + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @private + */ + async _getTable(keyspaceName, name) { + if (!this.initialized) { + throw this._uninitializedError(); + } + let cache; + let virtual; + if (this.options.isMetadataSyncEnabled) { + const keyspace = this.keyspaces[keyspaceName]; + if (!keyspace) { + return null; + } + cache = keyspace.tables; + virtual = keyspace.virtual; + } + return await this._schemaParser.getTable(keyspaceName, name, cache, virtual); + } + + /** + * Gets the definition of CQL functions for a given name. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same function definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace. + * @param {String} name Name of the Function. + * @param {Function} [callback] The callback with the err as a first parameter and the array of {@link SchemaFunction} + * as second parameter. + */ + getFunctions(keyspaceName, name, callback) { + return promiseUtils.optionalCallback(this._getFunctionsWrapper(keyspaceName, name), callback); + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @private + */ + async _getFunctionsWrapper(keyspaceName, name) { + if (!keyspaceName || !name) { + throw new errors.ArgumentError('You must provide the keyspace name and cql function name to retrieve the metadata'); + } + const functionsMap = await this._getFunctions(keyspaceName, name, false); + return Array.from(functionsMap.values()); + } + + /** + * Gets a definition of CQL function for a given name and signature. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same function definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the Function + * @param {Array.|Array.<{code, info}>} signature Array of types of the parameters. + * @param {Function} [callback] The callback with the err as a first parameter and the {@link SchemaFunction} as second + * parameter. + */ + getFunction(keyspaceName, name, signature, callback) { + return promiseUtils.optionalCallback(this._getSingleFunction(keyspaceName, name, signature, false), callback); + } + + /** + * Gets the definition of CQL aggregate for a given name. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same aggregates definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the Function + * @param {Function} [callback] The callback with the err as a first parameter and the array of {@link Aggregate} as + * second parameter. + */ + getAggregates(keyspaceName, name, callback) { + return promiseUtils.optionalCallback(this._getAggregates(keyspaceName, name), callback); + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @private + */ + async _getAggregates(keyspaceName, name) { + if (!keyspaceName || !name) { + throw new errors.ArgumentError('You must provide the keyspace name and cql aggregate name to retrieve the metadata'); + } + const functionsMap = await this._getFunctions(keyspaceName, name, true); + return Array.from(functionsMap.values()); + } + + /** + * Gets a definition of CQL aggregate for a given name and signature. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same aggregate definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the aggregate + * @param {Array.|Array.<{code, info}>} signature Array of types of the parameters. + * @param {Function} [callback] The callback with the err as a first parameter and the {@link Aggregate} as second parameter. + */ + getAggregate(keyspaceName, name, signature, callback) { + return promiseUtils.optionalCallback(this._getSingleFunction(keyspaceName, name, signature, true), callback); + } + + /** + * Gets the definition of a CQL materialized view for a given name. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * Note that, unlike the rest of the {@link Metadata} methods, this method does not cache the result for following + * calls, as the current version of the Cassandra native protocol does not support schema change events for + * materialized views. Each call to this method will produce one or more queries to the cluster. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the materialized view + * @param {Function} [callback] The callback with the err as a first parameter and the {@link MaterializedView} as + * second parameter. + */ + getMaterializedView(keyspaceName, name, callback) { + return promiseUtils.optionalCallback(this._getMaterializedView(keyspaceName, name), callback); + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @returns {Promise} + * @private + */ + async _getMaterializedView(keyspaceName, name) { + if (!this.initialized) { + throw this._uninitializedError(); + } + let cache; + if (this.options.isMetadataSyncEnabled) { + const keyspace = this.keyspaces[keyspaceName]; + if (!keyspace) { + return null; + } + cache = keyspace.views; + } + return await this._schemaParser.getMaterializedView(keyspaceName, name, cache); + } + + /** + * Gets a map of cql function definitions or aggregates based on signature. + * @param {String} keyspaceName + * @param {String} name Name of the function or aggregate + * @param {Boolean} aggregate + * @returns {Promise} + * @private + */ + async _getFunctions(keyspaceName, name, aggregate) { + if (!this.initialized) { + throw this._uninitializedError(); + } + let cache; + if (this.options.isMetadataSyncEnabled) { + const keyspace = this.keyspaces[keyspaceName]; + if (!keyspace) { + return new Map(); + } + cache = aggregate ? keyspace.aggregates : keyspace.functions; + } + return await this._schemaParser.getFunctions(keyspaceName, name, aggregate, cache); + } + + /** + * Gets a single cql function or aggregate definition + * @param {String} keyspaceName + * @param {String} name + * @param {Array} signature + * @param {Boolean} aggregate + * @returns {Promise} + * @private + */ + async _getSingleFunction(keyspaceName, name, signature, aggregate) { + if (!keyspaceName || !name) { + throw new errors.ArgumentError('You must provide the keyspace name and cql function name to retrieve the metadata'); + } + if (!Array.isArray(signature)) { + throw new errors.ArgumentError('Signature must be an array of types'); + } + signature = signature.map(item => { + if (typeof item === 'string') { + return item; + } + return types.getDataTypeNameByCode(item); + }); + const functionsMap = await this._getFunctions(keyspaceName, name, aggregate); + return functionsMap.get(signature.join(',')) || null; + } + + /** + * Gets the trace session generated by Cassandra when query tracing is enabled for the + * query. The trace itself is stored in Cassandra in the sessions and + * events table in the system_traces keyspace and can be + * retrieve manually using the trace identifier. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ * @param {Uuid} traceId Identifier of the trace session. + * @param {Number} [consistency] The consistency level to obtain the trace. + * @param {Function} [callback] The callback with the err as first parameter and the query trace as second parameter. + */ + getTrace(traceId, consistency, callback) { + if (!callback && typeof consistency === 'function') { + // Both callback and consistency are optional parameters + // In this case, the second parameter is the callback + callback = consistency; + consistency = null; + } + + return promiseUtils.optionalCallback(this._getTrace(traceId, consistency), callback); + } + + /** + * @param {Uuid} traceId + * @param {Number} consistency + * @returns {Promise} + * @private + */ + async _getTrace(traceId, consistency) { + if (!this.initialized) { + throw this._uninitializedError(); + } + + let trace; + let attempts = 0; + const info = ExecutionOptions.empty(); + info.getConsistency = () => consistency; + + const sessionRequest = new requests.QueryRequest(util.format(_selectTraceSession, traceId), null, info); + const eventsRequest = new requests.QueryRequest(util.format(_selectTraceEvents, traceId), null, info); + + while (!trace && (attempts++ < _traceMaxAttemps)) { + const sessionResponse = await this.controlConnection.query(sessionRequest); + const sessionRow = sessionResponse.rows[0]; + + if (!sessionRow || typeof sessionRow['duration'] !== 'number') { + await promiseUtils.delay(_traceAttemptDelay); + continue; + } + + trace = { + requestType: sessionRow['request'], + coordinator: sessionRow['coordinator'], + parameters: sessionRow['parameters'], + startedAt: sessionRow['started_at'], + duration: sessionRow['duration'], + clientAddress: sessionRow['client'], + events: null + }; + + const eventsResponse = await this.controlConnection.query(eventsRequest); + trace.events = eventsResponse.rows.map(row => ({ + id: row['event_id'], + activity: row['activity'], + source: row['source'], + elapsed: row['source_elapsed'], + thread: row['thread'] + })); + } + + if (!trace) { + throw new Error(`Trace ${traceId.toString()} could not fully retrieved after ${_traceMaxAttemps} attempts`); + } + + return trace; + } + + /** + * Checks whether hosts that are currently up agree on the schema definition. + *

+ * This method performs a one-time check only, without any form of retry; therefore + * protocolOptions.maxSchemaAgreementWaitSeconds setting does not apply in this case. + *

+ * @param {Function} [callback] A function that is invoked with a value + * true when all hosts agree on the schema and false when there is no agreement or when + * the check could not be performed (for example, if the control connection is down). + * @returns {Promise} Returns a Promise when a callback is not provided. The promise resolves to + * true when all hosts agree on the schema and false when there is no agreement or when + * the check could not be performed (for example, if the control connection is down). + */ + checkSchemaAgreement(callback) { + return promiseUtils.optionalCallback(this._checkSchemaAgreement(), callback); + } + + /** + * Async-only version of check schema agreement. + * @private + */ + async _checkSchemaAgreement() { + const connection = this.controlConnection.connection; + if (!connection) { + return false; + } + try { + return await this.compareSchemaVersions(connection); + } + catch (err) { + return false; + } + } + + /** + * Uses the metadata to fill the user provided parameter hints + * @param {String} keyspace + * @param {Array} hints + * @internal + * @ignore + */ + async adaptUserHints(keyspace, hints) { + if (!Array.isArray(hints)) { + return; + } + const udts = []; + // Check for udts and get the metadata + for (let i = 0; i < hints.length; i++) { + const hint = hints[i]; + if (typeof hint !== 'string') { + continue; + } + + const type = types.dataTypes.getByName(hint); + this._checkUdtTypes(udts, type, keyspace); + hints[i] = type; + } + + for (const type of udts) { + const udtInfo = await this.getUdt(type.info.keyspace, type.info.name); + if (!udtInfo) { + throw new TypeError('User defined type not found: ' + type.info.keyspace + '.' + type.info.name); + } + type.info = udtInfo; + } + } + + /** + * @param {Array} udts + * @param {{code, info}} type + * @param {string} keyspace + * @private + */ + _checkUdtTypes(udts, type, keyspace) { + if (type.code === types.dataTypes.udt) { + const udtName = type.info.split('.'); + type.info = { + keyspace: udtName[0], + name: udtName[1] + }; + if (!type.info.name) { + if (!keyspace) { + throw new TypeError('No keyspace specified for udt: ' + udtName.join('.')); + } + //use the provided keyspace + type.info.name = type.info.keyspace; + type.info.keyspace = keyspace; + } + udts.push(type); + return; + } + + if (!type.info) { + return; + } + if (type.code === types.dataTypes.list || type.code === types.dataTypes.set) { + return this._checkUdtTypes(udts, type.info, keyspace); + } + if (type.code === types.dataTypes.map) { + this._checkUdtTypes(udts, type.info[0], keyspace); + this._checkUdtTypes(udts, type.info[1], keyspace); + } + } + + /** + * Uses the provided connection to query the schema versions and compare them. + * @param {Connection} connection + * @internal + * @ignore + */ + async compareSchemaVersions(connection) { + const versions = new Set(); + const response1 = await connection.send(new requests.QueryRequest(_selectSchemaVersionLocal), null); + if (response1 && response1.rows && response1.rows.length === 1) { + versions.add(response1.rows[0]['schema_version'].toString()); + } + const response2 = await connection.send(new requests.QueryRequest(_selectSchemaVersionPeers), null); + if (response2 && response2.rows) { + for (const row of response2.rows) { + const value = row['schema_version']; + if (!value) { + continue; + } + versions.add(value.toString()); + } + } + return versions.size === 1; + } +} + +/** + * Allows to store prepared queries and retrieval by query or query id. + * @ignore + */ +class PreparedQueries { + + /** + * @param {Number} maxPrepared + * @param {Function} logger + */ + constructor(maxPrepared, logger) { + this.length = 0; + this._maxPrepared = maxPrepared; + this._mapByKey = new Map(); + this._mapById = new Map(); + this._logger = logger; + } + + _getKey(keyspace, query) { + return (keyspace || '') + query; + } + + getOrAdd(keyspace, query) { + const key = this._getKey(keyspace, query); + let info = this._mapByKey.get(key); + if (info) { + return info; + } + + this._validateOverflow(); + + info = new events.EventEmitter(); + info.setMaxListeners(0); + info.query = query; + // The keyspace in which it was prepared + info.keyspace = keyspace; + this._mapByKey.set(key, info); + this.length++; + return info; + } + + _validateOverflow() { + if (this.length < this._maxPrepared) { + return; + } + + const toRemove = []; + this._logger('warning', + 'Prepared statements exceeded maximum. This could be caused by preparing queries that contain parameters'); + + const toRemoveLength = this.length - this._maxPrepared + 1; + + for (const [key, info] of this._mapByKey) { + if (!info.queryId) { + // Only remove queries that contain queryId + continue; + } + + const length = toRemove.push([key, info]); + if (length >= toRemoveLength) { + break; + } + } + + for (const [key, info] of toRemove) { + this._mapByKey.delete(key); + this._mapById.delete(info.queryId.toString('hex')); + this.length--; + } + } + + setById(info) { + this._mapById.set(info.queryId.toString('hex'), info); + } + + getById(id) { + return this._mapById.get(id.toString('hex')); + } + + clear() { + this._mapByKey = new Map(); + this._mapById = new Map(); + this.length = 0; + } + + getAll() { + return Array.from(this._mapByKey.values()).filter(info => !!info.queryId); + } +} + +module.exports = Metadata; diff --git a/node_modules/cassandra-driver/lib/metadata/materialized-view.js b/node_modules/cassandra-driver/lib/metadata/materialized-view.js new file mode 100644 index 0000000..455a66a --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/materialized-view.js @@ -0,0 +1,48 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const DataCollection = require('./data-collection'); +/** + * Creates a new MaterializedView. + * @param {String} name Name of the View. + * @classdesc Describes a CQL materialized view. + * @alias module:metadata~MaterializedView + * @augments {module:metadata~DataCollection} + * @constructor + */ +function MaterializedView(name) { + DataCollection.call(this, name); + /** + * Name of the table. + * @type {String} + */ + this.tableName = null; + /** + * View where clause. + * @type {String} + */ + this.whereClause = null; + /** + * Determines if all the table columns where are included in the view. + * @type {boolean} + */ + this.includeAllColumns = false; +} + +util.inherits(MaterializedView, DataCollection); + +module.exports = MaterializedView; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/schema-function.js b/node_modules/cassandra-driver/lib/metadata/schema-function.js new file mode 100644 index 0000000..40105c0 --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/schema-function.js @@ -0,0 +1,97 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +/** + * Creates a new SchemaFunction. + * @classdesc Describes a CQL function. + * @alias module:metadata~SchemaFunction + * @constructor + */ +function SchemaFunction() { + /** + * Name of the cql function. + * @type {String} + */ + this.name = null; + /** + * Name of the keyspace where the cql function is declared. + */ + this.keyspaceName = null; + /** + * Signature of the function. + * @type {Array.} + */ + this.signature = null; + /** + * List of the function argument names. + * @type {Array.} + */ + this.argumentNames = null; + /** + * List of the function argument types. + * @type {Array.<{code, info}>} + */ + this.argumentTypes = null; + /** + * Body of the function. + * @type {String} + */ + this.body = null; + /** + * Determines if the function is called when the input is null. + * @type {Boolean} + */ + this.calledOnNullInput = null; + /** + * Name of the programming language, for example: java, javascript, ... + * @type {String} + */ + this.language = null; + /** + * Type of the return value. + * @type {{code: number, info: (Object|Array|null)}} + */ + this.returnType = null; + /** + * Indicates whether or not this function is deterministic. This means that + * given a particular input, the function will always produce the same output. + * @type {Boolean} + */ + this.deterministic = null; + /** + * Indicates whether or not this function is monotonic on all of its + * arguments. This means that it is either entirely non-increasing or + * non-decreasing. Even if the function is not monotonic on all of its + * arguments, it's possible to specify that it is monotonic on one of + * its arguments, meaning that partial applications of the function over + * that argument will be monotonic. + * + * Monotonicity is required to use the function in a GROUP BY clause. + * @type {Boolean} + */ + this.monotonic = null; + /** + * The argument names that the function is monotonic on. + * + * If {@link monotonic} is true, this will return all argument names. + * Otherwise, this will return either one argument or an empty array. + * @type {Array.} + */ + this.monotonicOn = null; +} + +module.exports = SchemaFunction; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/schema-index.js b/node_modules/cassandra-driver/lib/metadata/schema-index.js new file mode 100644 index 0000000..8787bad --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/schema-index.js @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const utils = require('../utils'); +const types = require('../types'); + +/** @private */ +const kind = { + custom: 0, + keys: 1, + composites: 2 +}; +/** + * Creates a new Index instance. + * @classdesc Describes a CQL index. + * @param {String} name + * @param {String} target + * @param {Number|String} kind + * @param {Object} options + * @alias module:metadata~Index + * @constructor + */ +function Index(name, target, kind, options) { + /** + * Name of the index. + * @type {String} + */ + this.name = name; + /** + * Target of the index. + * @type {String} + */ + this.target = target; + /** + * A numeric value representing index kind (0: custom, 1: keys, 2: composite); + * @type {Number} + */ + this.kind = typeof kind === 'string' ? getKindByName(kind) : kind; + /** + * An associative array containing the index options + * @type {Object} + */ + this.options = options; +} + +/** + * Determines if the index is of composites kind + * @returns {Boolean} + */ +Index.prototype.isCompositesKind = function () { + return this.kind === kind.composites; +}; + +/** + * Determines if the index is of keys kind + * @returns {Boolean} + */ +Index.prototype.isKeysKind = function () { + return this.kind === kind.keys; +}; + +/** + * Determines if the index is of custom kind + * @returns {Boolean} + */ +Index.prototype.isCustomKind = function () { + return this.kind === kind.custom; +}; + +/** + * Parses Index information from rows in the 'system_schema.indexes' table + * @deprecated It will be removed in the next major version. + * @param {Array.} indexRows + * @returns {Array.} + */ +Index.fromRows = function (indexRows) { + if (!indexRows || indexRows.length === 0) { + return utils.emptyArray; + } + return indexRows.map(function (row) { + const options = row['options']; + return new Index(row['index_name'], options['target'], getKindByName(row['kind']), options); + }); +}; + +/** + * Parses Index information from rows in the legacy 'system.schema_columns' table. + * @deprecated It will be removed in the next major version. + * @param {Array.} columnRows + * @param {Object.} columnsByName + * @returns {Array.} + */ +Index.fromColumnRows = function (columnRows, columnsByName) { + const result = []; + for (let i = 0; i < columnRows.length; i++) { + const row = columnRows[i]; + const indexName = row['index_name']; + if (!indexName) { + continue; + } + const c = columnsByName[row['column_name']]; + let target; + const options = JSON.parse(row['index_options']); + if (options !== null && options['index_keys'] !== undefined) { + target = util.format("keys(%s)", c.name); + } + else if (options !== null && options['index_keys_and_values'] !== undefined) { + target = util.format("entries(%s)", c.name); + } + else if (c.type.options.frozen && (c.type.code === types.dataTypes.map || c.type.code === types.dataTypes.list || + c.type.code === types.dataTypes.set)) { + target = util.format("full(%s)", c.name); + } + else { + target = c.name; + } + result.push(new Index(indexName, target, getKindByName(row['index_type']), options)); + } + return result; +}; + +/** + * Gets the number representing the kind based on the name + * @param {String} name + * @returns {Number} + * @private + */ +function getKindByName(name) { + if (!name) { + return kind.custom; + } + return kind[name.toLowerCase()]; +} + +module.exports = Index; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/schema-parser.js b/node_modules/cassandra-driver/lib/metadata/schema-parser.js new file mode 100644 index 0000000..af05a3b --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/schema-parser.js @@ -0,0 +1,1177 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; +const util = require('util'); +const events = require('events'); +const types = require('../types'); +const utils = require('../utils'); +const errors = require('../errors'); +const promiseUtils = require('../promise-utils'); +const TableMetadata = require('./table-metadata'); +const Aggregate = require('./aggregate'); +const SchemaFunction = require('./schema-function'); +const Index = require('./schema-index'); +const MaterializedView = require('./materialized-view'); +const { format } = util; + +/** + * @module metadata/schemaParser + * @ignore + */ + +const _selectAllKeyspacesV1 = "SELECT * FROM system.schema_keyspaces"; +const _selectSingleKeyspaceV1 = "SELECT * FROM system.schema_keyspaces where keyspace_name = '%s'"; +const _selectAllKeyspacesV2 = "SELECT * FROM system_schema.keyspaces"; +const _selectSingleKeyspaceV2 = "SELECT * FROM system_schema.keyspaces where keyspace_name = '%s'"; +const _selectTableV1 = "SELECT * FROM system.schema_columnfamilies WHERE keyspace_name='%s' AND columnfamily_name='%s'"; +const _selectTableV2 = "SELECT * FROM system_schema.tables WHERE keyspace_name='%s' AND table_name='%s'"; +const _selectColumnsV1 = "SELECT * FROM system.schema_columns WHERE keyspace_name='%s' AND columnfamily_name='%s'"; +const _selectColumnsV2 = "SELECT * FROM system_schema.columns WHERE keyspace_name='%s' AND table_name='%s'"; +const _selectIndexesV2 = "SELECT * FROM system_schema.indexes WHERE keyspace_name='%s' AND table_name='%s'"; +const _selectUdtV1 = "SELECT * FROM system.schema_usertypes WHERE keyspace_name='%s' AND type_name='%s'"; +const _selectUdtV2 = "SELECT * FROM system_schema.types WHERE keyspace_name='%s' AND type_name='%s'"; +const _selectFunctionsV1 = "SELECT * FROM system.schema_functions WHERE keyspace_name = '%s' AND function_name = '%s'"; +const _selectFunctionsV2 = "SELECT * FROM system_schema.functions WHERE keyspace_name = '%s' AND function_name = '%s'"; +const _selectAggregatesV1 = "SELECT * FROM system.schema_aggregates WHERE keyspace_name = '%s' AND aggregate_name = '%s'"; +const _selectAggregatesV2 = "SELECT * FROM system_schema.aggregates WHERE keyspace_name = '%s' AND aggregate_name = '%s'"; +const _selectMaterializedViewV2 = "SELECT * FROM system_schema.views WHERE keyspace_name = '%s' AND view_name = '%s'"; + +const _selectAllVirtualKeyspaces = "SELECT * FROM system_virtual_schema.keyspaces"; +const _selectSingleVirtualKeyspace = "SELECT * FROM system_virtual_schema.keyspaces where keyspace_name = '%s'"; +const _selectVirtualTable = "SELECT * FROM system_virtual_schema.tables where keyspace_name = '%s' and table_name='%s'"; +const _selectVirtualColumns = "SELECT * FROM system_virtual_schema.columns where keyspace_name = '%s' and table_name='%s'"; + + +/** + * @abstract + * @param {ClientOptions} options The client options + * @param {ControlConnection} cc + * @constructor + * @ignore + */ +class SchemaParser { + constructor(options, cc) { + this.cc = cc; + this.encodingOptions = options.encoding; + this.selectTable = null; + this.selectColumns = null; + this.selectIndexes = null; + this.selectUdt = null; + this.selectAggregates = null; + this.selectFunctions = null; + this.supportsVirtual = false; + } + + /** + * @param name + * @param durableWrites + * @param strategy + * @param strategyOptions + * @param virtual + * @returns {{name, durableWrites, strategy, strategyOptions, tokenToReplica, udts, tables, functions, aggregates}} + * @protected + */ + _createKeyspace(name, durableWrites, strategy, strategyOptions, virtual) { + return { + name, + durableWrites, + strategy, + strategyOptions, + virtual: virtual === true, + udts: {}, + tables: {}, + functions: {}, + aggregates: {}, + views: {}, + tokenToReplica: getTokenToReplicaMapper(strategy, strategyOptions), + graphEngine: undefined + }; + } + + /** + * @abstract + * @param {String} name + * @returns {Promise} + */ + getKeyspace(name) { + } + + /** + * @abstract + * @param {Boolean} waitReconnect + * @returns {Promise>} + */ + getKeyspaces(waitReconnect) { + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @param {Object} cache + * @param {Boolean} virtual + * @returns {Promise} + */ + async getTable(keyspaceName, name, cache, virtual) { + let tableInfo = cache && cache[name]; + if (!tableInfo) { + tableInfo = new TableMetadata(name); + if (cache) { + cache[name] = tableInfo; + } + } + if (tableInfo.loaded) { + return tableInfo; + } + if (tableInfo.loading) { + // Wait for it to emit + return promiseUtils.fromEvent(tableInfo, 'load'); + } + try { + // its not cached and not being retrieved + tableInfo.loading = true; + let indexRows; + let virtualTable = virtual; + const selectTable = virtualTable ? _selectVirtualTable : this.selectTable; + const query = util.format(selectTable, keyspaceName, name); + let tableRow = await this._getFirstRow(query); + // if we weren't sure if table was virtual or not, query virtual schema. + if (!tableRow && this.supportsVirtual && virtualTable === undefined) { + const query = util.format(_selectVirtualTable, keyspaceName, name); + try { + tableRow = await this._getFirstRow(query); + } + catch (err) { + // we can't error here as we can't be sure if the node + // supports virtual tables, in this case it is adequate + // to act as if there was no matching table. + } + if (tableRow) { + // We are fetching a virtual table + virtualTable = true; + } + } + if (!tableRow) { + tableInfo.loading = false; + tableInfo.emit('load', null, null); + return null; + } + const selectColumns = virtualTable ? _selectVirtualColumns : this.selectColumns; + const columnRows = await this._getRows(util.format(selectColumns, keyspaceName, name)); + if (this.selectIndexes && !virtualTable) { + indexRows = await this._getRows(util.format(this.selectIndexes, keyspaceName, name)); + } + await this._parseTableOrView(tableInfo, tableRow, columnRows, indexRows, virtualTable); + tableInfo.loaded = true; + tableInfo.emit('load', null, tableInfo); + return tableInfo; + } + catch (err) { + tableInfo.emit('load', err, null); + throw err; + } + finally { + tableInfo.loading = false; + } + } + + async _getFirstRow(query) { + const rows = await this._getRows(query); + return rows[0]; + } + + async _getRows(query) { + const response = await this.cc.query(query); + return response.rows; + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @param {Object} cache + * @returns {Promise} + */ + async getUdt(keyspaceName, name, cache) { + let udtInfo = cache && cache[name]; + if (!udtInfo) { + udtInfo = new events.EventEmitter(); + if (cache) { + cache[name] = udtInfo; + } + udtInfo.setMaxListeners(0); + udtInfo.loading = false; + udtInfo.name = name; + udtInfo.keyspace = keyspaceName; + udtInfo.fields = null; + } + if (udtInfo.fields) { + return udtInfo; + } + if (udtInfo.loading) { + return promiseUtils.fromEvent(udtInfo, 'load'); + } + udtInfo.loading = true; + const query = format(this.selectUdt, keyspaceName, name); + try { + const row = await this._getFirstRow(query); + if (!row) { + udtInfo.loading = false; + udtInfo.emit('load', null, null); + return null; + } + await this._parseUdt(udtInfo, row); + udtInfo.emit('load', null, udtInfo); + return udtInfo; + } + catch (err) { + udtInfo.emit('load', err); + throw err; + } + finally { + udtInfo.loading = false; + } + } + + /** + * Parses the udt information from the row + * @param udtInfo + * @param {Row} row + * @returns {Promise} + * @abstract + */ + _parseUdt(udtInfo, row) { + } + + /** + * Builds the metadata based on the table and column rows + * @abstract + * @param {module:metadata~TableMetadata} tableInfo + * @param {Row} tableRow + * @param {Array.} columnRows + * @param {Array.} indexRows + * @param {Boolean} virtual + * @returns {Promise} + * @throws {Error} + */ + async _parseTableOrView(tableInfo, tableRow, columnRows, indexRows, virtual) { + } + + /** + * @abstract + * @param {String} keyspaceName + * @param {String} name + * @param {Object} cache + * @returns {Promise} + */ + getMaterializedView(keyspaceName, name, cache) { + } + + /** + * @param {String} keyspaceName + * @param {String} name + * @param {Boolean} aggregate + * @param {Object} cache + * @returns {Promise} + */ + async getFunctions(keyspaceName, name, aggregate, cache) { + /** @type {String} */ + let query = this.selectFunctions; + let parser = row => this._parseFunction(row); + if (aggregate) { + query = this.selectAggregates; + parser = row => this._parseAggregate(row); + } + // if it's not already loaded, get all functions with that name + // cache it by name and, within name, by signature + let functionsInfo = cache && cache[name]; + if (!functionsInfo) { + functionsInfo = new events.EventEmitter(); + if (cache) { + cache[name] = functionsInfo; + } + functionsInfo.setMaxListeners(0); + } + if (functionsInfo.values) { + return functionsInfo.values; + } + if (functionsInfo.loading) { + return promiseUtils.fromEvent(functionsInfo, 'load'); + } + functionsInfo.loading = true; + try { + const rows = await this._getRows(format(query, keyspaceName, name)); + const funcs = await Promise.all(rows.map(parser)); + const result = new Map(); + if (rows.length > 0) { + // Cache positive hits + functionsInfo.values = result; + } + + funcs.forEach(f => functionsInfo.values.set(f.signature.join(','), f)); + functionsInfo.emit('load', null, result); + return result; + } + catch (err) { + functionsInfo.emit('load', err); + throw err; + } + finally { + functionsInfo.loading = false; + } + } + + /** + * @abstract + * @param {Row} row + * @returns {Promise} + */ + _parseAggregate(row) { + } + + /** + * @abstract + * @param {Row} row + * @returns {Promise} + */ + _parseFunction(row) { + } + + /** @returns {Map} */ + _asMap(obj) { + if (!obj) { + return new Map(); + } + if (this.encodingOptions.map && obj instanceof this.encodingOptions.map) { + // Its already a Map or a polyfill of a Map + return obj; + } + return new Map(Object.keys(obj).map(k => [k, obj[k]])); + } + + _mapAsObject(map) { + if (!map) { + return map; + } + if (this.encodingOptions.map && map instanceof this.encodingOptions.map) { + const result = {}; + map.forEach((value, key) => result[key] = value); + return result; + } + return map; + } +} + +/** + * Used to parse schema information for Cassandra versions 1.2.x, and 2.x + * @ignore + */ +class SchemaParserV1 extends SchemaParser { + + /** + * @param {ClientOptions} options + * @param {ControlConnection} cc + */ + constructor(options, cc) { + super(options, cc); + this.selectTable = _selectTableV1; + this.selectColumns = _selectColumnsV1; + this.selectUdt = _selectUdtV1; + this.selectAggregates = _selectAggregatesV1; + this.selectFunctions = _selectFunctionsV1; + } + + async getKeyspaces(waitReconnect) { + const keyspaces = {}; + const result = await this.cc.query(_selectAllKeyspacesV1, waitReconnect); + for (let i = 0; i < result.rows.length; i++) { + const row = result.rows[i]; + const ksInfo = this._createKeyspace(row['keyspace_name'], row['durable_writes'], row['strategy_class'], JSON.parse(row['strategy_options'] || null)); + keyspaces[ksInfo.name] = ksInfo; + } + return keyspaces; + } + + async getKeyspace(name) { + const row = await this._getFirstRow(format(_selectSingleKeyspaceV1, name)); + if (!row) { + return null; + } + return this._createKeyspace(row['keyspace_name'], row['durable_writes'], row['strategy_class'], JSON.parse(row['strategy_options'])); + } + + // eslint-disable-next-line require-await + async _parseTableOrView(tableInfo, tableRow, columnRows, indexRows, virtual) { + // All the tableInfo parsing in V1 is sync, it uses a async function because the super class defines one + // to support other versions. + let c, name, types; + const encoder = this.cc.getEncoder(); + const columnsKeyed = {}; + let partitionKeys = []; + let clusteringKeys = []; + tableInfo.bloomFilterFalsePositiveChance = tableRow['bloom_filter_fp_chance']; + tableInfo.caching = tableRow['caching']; + tableInfo.comment = tableRow['comment']; + tableInfo.compactionClass = tableRow['compaction_strategy_class']; + tableInfo.compactionOptions = JSON.parse(tableRow['compaction_strategy_options']); + tableInfo.compression = JSON.parse(tableRow['compression_parameters']); + tableInfo.gcGraceSeconds = tableRow['gc_grace_seconds']; + tableInfo.localReadRepairChance = tableRow['local_read_repair_chance']; + tableInfo.readRepairChance = tableRow['read_repair_chance']; + tableInfo.populateCacheOnFlush = tableRow['populate_io_cache_on_flush'] || tableInfo.populateCacheOnFlush; + tableInfo.memtableFlushPeriod = tableRow['memtable_flush_period_in_ms'] || tableInfo.memtableFlushPeriod; + tableInfo.defaultTtl = tableRow['default_time_to_live'] || tableInfo.defaultTtl; + tableInfo.speculativeRetry = tableRow['speculative_retry'] || tableInfo.speculativeRetry; + tableInfo.indexInterval = tableRow['index_interval'] || tableInfo.indexInterval; + if (typeof tableRow['min_index_interval'] !== 'undefined') { + //Cassandra 2.1+ + tableInfo.minIndexInterval = tableRow['min_index_interval'] || tableInfo.minIndexInterval; + tableInfo.maxIndexInterval = tableRow['max_index_interval'] || tableInfo.maxIndexInterval; + } + else { + //set to null + tableInfo.minIndexInterval = null; + tableInfo.maxIndexInterval = null; + } + if (typeof tableRow['replicate_on_write'] !== 'undefined') { + //leave the default otherwise + tableInfo.replicateOnWrite = tableRow['replicate_on_write']; + } + tableInfo.columns = []; + for (let i = 0; i < columnRows.length; i++) { + const row = columnRows[i]; + const type = encoder.parseFqTypeName(row['validator']); + c = { + name: row['column_name'], + type: type, + isStatic: false + }; + tableInfo.columns.push(c); + columnsKeyed[c.name] = c; + switch (row['type']) { + case 'partition_key': + partitionKeys.push({ c: c, index: (row['component_index'] || 0) }); + break; + case 'clustering_key': + clusteringKeys.push({ + c: c, + index: (row['component_index'] || 0), + order: c.type.options.reversed ? 'DESC' : 'ASC' + }); + break; + case 'static': + // C* 2.0.6+ supports static columns + c.isStatic = true; + break; + } + } + if (partitionKeys.length > 0) { + tableInfo.partitionKeys = partitionKeys.sort(utils.propCompare('index')).map(item => item.c); + clusteringKeys.sort(utils.propCompare('index')); + tableInfo.clusteringKeys = clusteringKeys.map(item => item.c); + tableInfo.clusteringOrder = clusteringKeys.map(item => item.order); + } + // In C* 1.2, keys are not stored on the schema_columns table + const keysStoredInTableRow = (tableInfo.partitionKeys.length === 0); + if (keysStoredInTableRow && tableRow['key_aliases']) { + //In C* 1.2, keys are not stored on the schema_columns table + partitionKeys = JSON.parse(tableRow['key_aliases']); + types = encoder.parseKeyTypes(tableRow['key_validator']).types; + for (let i = 0; i < partitionKeys.length; i++) { + name = partitionKeys[i]; + c = columnsKeyed[name]; + if (!c) { + c = { + name: name, + type: types[i] + }; + tableInfo.columns.push(c); + } + tableInfo.partitionKeys.push(c); + } + } + const comparator = encoder.parseKeyTypes(tableRow['comparator']); + if (keysStoredInTableRow && tableRow['column_aliases']) { + clusteringKeys = JSON.parse(tableRow['column_aliases']); + for (let i = 0; i < clusteringKeys.length; i++) { + name = clusteringKeys[i]; + c = columnsKeyed[name]; + if (!c) { + c = { + name: name, + type: comparator.types[i] + }; + tableInfo.columns.push(c); + } + tableInfo.clusteringKeys.push(c); + tableInfo.clusteringOrder.push(c.type.options.reversed ? 'DESC' : 'ASC'); + } + } + tableInfo.isCompact = !!tableRow['is_dense']; + if (!tableInfo.isCompact) { + //is_dense column does not exist in previous versions of Cassandra + //also, compact pk, ck and val appear as is_dense false + // clusteringKeys != comparator types - 1 + // or not composite (comparator) + tableInfo.isCompact = ( + //clustering keys are not marked as composite + !comparator.isComposite || + //only 1 column not part of the partition or clustering keys + (!comparator.hasCollections && tableInfo.clusteringKeys.length !== comparator.types.length - 1)); + } + name = tableRow['value_alias']; + if (tableInfo.isCompact && name && !columnsKeyed[name]) { + //additional column in C* 1.2 as value_alias + c = { + name: name, + type: encoder.parseFqTypeName(tableRow['default_validator']) + }; + tableInfo.columns.push(c); + columnsKeyed[name] = c; + } + tableInfo.columnsByName = columnsKeyed; + tableInfo.indexes = Index.fromColumnRows(columnRows, tableInfo.columnsByName); + } + + getMaterializedView(keyspaceName, name, cache) { + return Promise.reject(new errors.NotSupportedError('Materialized views are not supported on Cassandra versions below 3.0')); + } + + // eslint-disable-next-line require-await + async _parseAggregate(row) { + const encoder = this.cc.getEncoder(); + const aggregate = new Aggregate(); + aggregate.name = row['aggregate_name']; + aggregate.keyspaceName = row['keyspace_name']; + aggregate.signature = row['signature'] || utils.emptyArray; + aggregate.stateFunction = row['state_func']; + aggregate.finalFunction = row['final_func']; + aggregate.initConditionRaw = row['initcond']; + aggregate.argumentTypes = (row['argument_types'] || utils.emptyArray).map(name => encoder.parseFqTypeName(name)); + aggregate.stateType = encoder.parseFqTypeName(row['state_type']); + const initConditionValue = encoder.decode(aggregate.initConditionRaw, aggregate.stateType); + if (initConditionValue !== null && typeof initConditionValue !== 'undefined') { + aggregate.initCondition = initConditionValue.toString(); + } + aggregate.returnType = encoder.parseFqTypeName(row['return_type']); + return aggregate; + } + + // eslint-disable-next-line require-await + async _parseFunction(row) { + const encoder = this.cc.getEncoder(); + const func = new SchemaFunction(); + func.name = row['function_name']; + func.keyspaceName = row['keyspace_name']; + func.signature = row['signature'] || utils.emptyArray; + func.argumentNames = row['argument_names'] || utils.emptyArray; + func.body = row['body']; + func.calledOnNullInput = row['called_on_null_input']; + func.language = row['language']; + func.argumentTypes = (row['argument_types'] || utils.emptyArray).map(name => encoder.parseFqTypeName(name)); + func.returnType = encoder.parseFqTypeName(row['return_type']); + return func; + } + + // eslint-disable-next-line require-await + async _parseUdt(udtInfo, row) { + const encoder = this.cc.getEncoder(); + const fieldNames = row['field_names']; + const fieldTypes = row['field_types']; + const fields = new Array(fieldNames.length); + for (let i = 0; i < fieldNames.length; i++) { + fields[i] = { + name: fieldNames[i], + type: encoder.parseFqTypeName(fieldTypes[i]) + }; + } + udtInfo.fields = fields; + return udtInfo; + } +} + + +/** + * Used to parse schema information for Cassandra versions 3.x and above + * @param {ClientOptions} options The client options + * @param {ControlConnection} cc The control connection to be used + * @param {Function} udtResolver The function to be used to retrieve the udts. + * @ignore + */ +class SchemaParserV2 extends SchemaParser { + + /** + * @param {ClientOptions} options The client options + * @param {ControlConnection} cc The control connection to be used + * @param {Function} udtResolver The function to be used to retrieve the udts. + */ + constructor(options, cc, udtResolver) { + super(options, cc); + this.udtResolver = udtResolver; + this.selectTable = _selectTableV2; + this.selectColumns = _selectColumnsV2; + this.selectUdt = _selectUdtV2; + this.selectAggregates = _selectAggregatesV2; + this.selectFunctions = _selectFunctionsV2; + this.selectIndexes = _selectIndexesV2; + } + + async getKeyspaces(waitReconnect) { + const keyspaces = {}; + const result = await this.cc.query(_selectAllKeyspacesV2, waitReconnect); + for (let i = 0; i < result.rows.length; i++) { + const ksInfo = this._parseKeyspace(result.rows[i]); + keyspaces[ksInfo.name] = ksInfo; + } + return keyspaces; + } + + async getKeyspace(name) { + const row = await this._getFirstRow(format(_selectSingleKeyspaceV2, name)); + if (!row) { + return null; + } + return this._parseKeyspace(row); + } + + async getMaterializedView(keyspaceName, name, cache) { + let viewInfo = cache && cache[name]; + if (!viewInfo) { + viewInfo = new MaterializedView(name); + if (cache) { + cache[name] = viewInfo; + } + } + if (viewInfo.loaded) { + return viewInfo; + } + if (viewInfo.loading) { + return promiseUtils.fromEvent(viewInfo, 'load'); + } + viewInfo.loading = true; + try { + const tableRow = await this._getFirstRow(format(_selectMaterializedViewV2, keyspaceName, name)); + if (!tableRow) { + viewInfo.emit('load', null, null); + viewInfo.loading = false; + return null; + } + const columnRows = await this._getRows(format(this.selectColumns, keyspaceName, name)); + await this._parseTableOrView(viewInfo, tableRow, columnRows, null, false); + viewInfo.loaded = true; + viewInfo.emit('load', null, viewInfo); + return viewInfo; + } + catch (err) { + viewInfo.emit('load', err); + throw err; + } + finally { + viewInfo.loading = false; + } + } + + _parseKeyspace(row, virtual) { + const replication = row['replication']; + let strategy; + let strategyOptions; + if (replication) { + strategy = replication['class']; + strategyOptions = {}; + for (const key in replication) { + if (!replication.hasOwnProperty(key) || key === 'class') { + continue; + } + strategyOptions[key] = replication[key]; + } + } + + const ks = this._createKeyspace(row['keyspace_name'], row['durable_writes'], strategy, strategyOptions, virtual); + ks.graphEngine = row['graph_engine']; + return ks; + } + + async _parseTableOrView(tableInfo, tableRow, columnRows, indexRows, virtual) { + const encoder = this.cc.getEncoder(); + const columnsKeyed = {}; + const partitionKeys = []; + const clusteringKeys = []; + tableInfo.columns = await Promise.all(columnRows.map(async (row) => { + const type = await encoder.parseTypeName(tableRow['keyspace_name'], row['type'], 0, null, this.udtResolver); + const c = { + name: row['column_name'], + type: type, + isStatic: false + }; + columnsKeyed[c.name] = c; + switch (row['kind']) { + case 'partition_key': + partitionKeys.push({ c, index: (row['position'] || 0) }); + break; + case 'clustering': + clusteringKeys.push({ + c, index: (row['position'] || 0), order: row['clustering_order'] === 'desc' ? 'DESC' : 'ASC' + }); + break; + case 'static': + c.isStatic = true; + break; + } + return c; + })); + tableInfo.columnsByName = columnsKeyed; + tableInfo.partitionKeys = partitionKeys.sort(utils.propCompare('index')).map(item => item.c); + clusteringKeys.sort(utils.propCompare('index')); + tableInfo.clusteringKeys = clusteringKeys.map(item => item.c); + tableInfo.clusteringOrder = clusteringKeys.map(item => item.order); + if (virtual) { + // When table is virtual, the only relevant information to parse are the columns + // as the table itself has no configuration + tableInfo.virtual = true; + return; + } + const isView = tableInfo instanceof MaterializedView; + tableInfo.bloomFilterFalsePositiveChance = tableRow['bloom_filter_fp_chance']; + tableInfo.caching = JSON.stringify(tableRow['caching']); + tableInfo.comment = tableRow['comment']; + // Regardless of the encoding options, use always an Object to represent an associative Array + const compaction = this._asMap(tableRow['compaction']); + if (compaction) { + // compactionOptions as an Object + tableInfo.compactionOptions = {}; + tableInfo.compactionClass = compaction.get('class'); + compaction.forEach((value, key) => { + if (key === 'class') { + return; + } + tableInfo.compactionOptions[key] = compaction.get(key); + }); + } + // Convert compression to an Object + tableInfo.compression = this._mapAsObject(tableRow['compression']); + tableInfo.gcGraceSeconds = tableRow['gc_grace_seconds']; + tableInfo.localReadRepairChance = tableRow['dclocal_read_repair_chance']; + tableInfo.readRepairChance = tableRow['read_repair_chance']; + tableInfo.extensions = this._mapAsObject(tableRow['extensions']); + tableInfo.crcCheckChance = tableRow['crc_check_chance']; + tableInfo.memtableFlushPeriod = tableRow['memtable_flush_period_in_ms'] || tableInfo.memtableFlushPeriod; + tableInfo.defaultTtl = tableRow['default_time_to_live'] || tableInfo.defaultTtl; + tableInfo.speculativeRetry = tableRow['speculative_retry'] || tableInfo.speculativeRetry; + tableInfo.minIndexInterval = tableRow['min_index_interval'] || tableInfo.minIndexInterval; + tableInfo.maxIndexInterval = tableRow['max_index_interval'] || tableInfo.maxIndexInterval; + tableInfo.nodesync = tableRow['nodesync'] || tableInfo.nodesync; + if (!isView) { + const cdc = tableRow['cdc']; + if (cdc !== undefined) { + tableInfo.cdc = cdc; + } + } + if (isView) { + tableInfo.tableName = tableRow['base_table_name']; + tableInfo.whereClause = tableRow['where_clause']; + tableInfo.includeAllColumns = tableRow['include_all_columns']; + return; + } + tableInfo.indexes = this._getIndexes(indexRows); + // flags can be an instance of Array or Set (real or polyfill) + let flags = tableRow['flags']; + if (Array.isArray(flags)) { + flags = new Set(flags); + } + const isDense = flags.has('dense'); + const isSuper = flags.has('super'); + const isCompound = flags.has('compound'); + tableInfo.isCompact = isSuper || isDense || !isCompound; + // Remove the columns related to Thrift + const isStaticCompact = !isSuper && !isDense && !isCompound; + if (isStaticCompact) { + pruneStaticCompactTableColumns(tableInfo); + } + else if (isDense) { + pruneDenseTableColumns(tableInfo); + } + } + + _getIndexes(indexRows) { + if (!indexRows || indexRows.length === 0) { + return utils.emptyArray; + } + return indexRows.map((row) => { + const options = this._mapAsObject(row['options']); + return new Index(row['index_name'], options['target'], row['kind'], options); + }); + } + + async _parseAggregate(row) { + const encoder = this.cc.getEncoder(); + const aggregate = new Aggregate(); + aggregate.name = row['aggregate_name']; + aggregate.keyspaceName = row['keyspace_name']; + aggregate.signature = row['argument_types'] || utils.emptyArray; + aggregate.stateFunction = row['state_func']; + aggregate.finalFunction = row['final_func']; + aggregate.initConditionRaw = row['initcond']; + aggregate.initCondition = aggregate.initConditionRaw; + aggregate.deterministic = row['deterministic'] || false; + aggregate.argumentTypes = await Promise.all(aggregate.signature.map(name => encoder.parseTypeName(row['keyspace_name'], name, 0, null, this.udtResolver))); + aggregate.stateType = await encoder.parseTypeName(row['keyspace_name'], row['state_type'], 0, null, this.udtResolver); + aggregate.returnType = await encoder.parseTypeName(row['keyspace_name'], row['return_type'], 0, null, this.udtResolver); + return aggregate; + } + + async _parseFunction(row) { + const encoder = this.cc.getEncoder(); + const func = new SchemaFunction(); + func.name = row['function_name']; + func.keyspaceName = row['keyspace_name']; + func.signature = row['argument_types'] || utils.emptyArray; + func.argumentNames = row['argument_names'] || utils.emptyArray; + func.body = row['body']; + func.calledOnNullInput = row['called_on_null_input']; + func.language = row['language']; + func.deterministic = row['deterministic'] || false; + func.monotonic = row['monotonic'] || false; + func.monotonicOn = row['monotonic_on'] || utils.emptyArray; + func.argumentTypes = await Promise.all(func.signature.map(name => encoder.parseTypeName(row['keyspace_name'], name, 0, null, this.udtResolver))); + func.returnType = await encoder.parseTypeName(row['keyspace_name'], row['return_type'], 0, null, this.udtResolver); + return func; + } + + async _parseUdt(udtInfo, row) { + const encoder = this.cc.getEncoder(); + const fieldTypes = row['field_types']; + const keyspace = row['keyspace_name']; + udtInfo.fields = await Promise.all(row['field_names'].map(async (name, i) => { + const type = await encoder.parseTypeName(keyspace, fieldTypes[i], 0, null, this.udtResolver); + return { name, type }; + })); + return udtInfo; + } +} + +/** + * Used to parse schema information for Cassandra versions 4.x and above. + * + * This parser similar to [SchemaParserV2] expect it also parses virtual + * keyspaces. + * @ignore + */ +class SchemaParserV3 extends SchemaParserV2 { + /** + * @param {ClientOptions} options The client options + * @param {ControlConnection} cc The control connection to be used + * @param {Function} udtResolver The function to be used to retrieve the udts. + */ + constructor(options, cc, udtResolver) { + super(options, cc, udtResolver); + this.supportsVirtual = true; + } + + async getKeyspaces(waitReconnect) { + const keyspaces = {}; + const queries = [ + { query: _selectAllKeyspacesV2, virtual: false }, + { query: _selectAllVirtualKeyspaces, virtual: true } + ]; + + await Promise.all(queries.map(async (q) => { + let result = null; + try { + result = await this.cc.query(q.query, waitReconnect); + } + catch (err) { + if (q.virtual) { + // Only throw error for non-virtual query as + // server reporting C* 4.0 may not actually implement + // virtual tables. + return; + } + throw err; + } + for (let i = 0; i < result.rows.length; i++) { + const ksInfo = this._parseKeyspace(result.rows[i], q.virtual); + keyspaces[ksInfo.name] = ksInfo; + } + })); + return keyspaces; + } + + async getKeyspace(name) { + const ks = await this._getKeyspace(_selectSingleKeyspaceV2, name, false); + if (!ks) { + // if not found, attempt to retrieve as virtual keyspace. + return this._getKeyspace(_selectSingleVirtualKeyspace, name, true); + } + return ks; + } + + async _getKeyspace(query, name, virtual) { + try { + const row = await this._getFirstRow(format(query, name)); + + if (!row) { + return null; + } + + return this._parseKeyspace(row, virtual); + } + catch (err) { + if (virtual) { + // only throw error for non-virtual query as + // server reporting C* 4.0 may not actually implement + // virtual tables. + return null; + } + throw err; + } + } +} + +/** + * Upon migration from thrift to CQL, we internally create a pair of surrogate clustering/regular columns + * for compact static tables. These columns shouldn't be exposed to the user but are currently returned by C*. + * We also need to remove the static keyword for all other columns in the table. + * @param {module:metadata~TableMetadata} tableInfo +*/ +function pruneStaticCompactTableColumns(tableInfo) { + let i; + let c; + //remove "column1 text" clustering column + for (i = 0; i < tableInfo.clusteringKeys.length; i++) { + c = tableInfo.clusteringKeys[i]; + const index = tableInfo.columns.indexOf(c); + tableInfo.columns.splice(index, 1); + delete tableInfo.columnsByName[c.name]; + } + tableInfo.clusteringKeys = utils.emptyArray; + tableInfo.clusteringOrder = utils.emptyArray; + //remove regular columns and set the static columns to non-static + i = tableInfo.columns.length; + while (i--) { + c = tableInfo.columns[i]; + if (!c.isStatic && tableInfo.partitionKeys.indexOf(c) === -1) { + // remove "value blob" regular column + tableInfo.columns.splice(i, 1); + delete tableInfo.columnsByName[c.name]; + continue; + } + c.isStatic = false; + } +} + +/** + * Upon migration from thrift to CQL, we internally create a surrogate column "value" of type custom. + * This column shouldn't be exposed to the user but is currently returned by C*. + * @param {module:metadata~TableMetadata} tableInfo + */ +function pruneDenseTableColumns(tableInfo) { + let i = tableInfo.columns.length; + while (i--) { + const c = tableInfo.columns[i]; + if (!c.isStatic && c.type.code === types.dataTypes.custom && c.type.info === 'empty') { + // remove "value blob" regular column + tableInfo.columns.splice(i, 1); + delete tableInfo.columnsByName[c.name]; + continue; + } + c.isStatic = false; + } +} + +function getTokenToReplicaMapper(strategy, strategyOptions) { + if (/SimpleStrategy$/.test(strategy)) { + const rf = parseInt(strategyOptions['replication_factor'], 10); + if (rf > 1) { + return getTokenToReplicaSimpleMapper(rf); + } + } + if (/NetworkTopologyStrategy$/.test(strategy)) { + return getTokenToReplicaNetworkMapper(strategyOptions); + } + //default, wrap in an Array + return (function noStrategy(tokenizer, ring, primaryReplicas) { + const replicas = {}; + for (const key in primaryReplicas) { + if (!primaryReplicas.hasOwnProperty(key)) { + continue; + } + replicas[key] = [primaryReplicas[key]]; + } + return replicas; + }); +} + +/** + * @param {Number} replicationFactor + * @returns {function} + */ +function getTokenToReplicaSimpleMapper(replicationFactor) { + return (function tokenSimpleStrategy(tokenizer, ringTokensAsStrings, primaryReplicas) { + const ringLength = ringTokensAsStrings.length; + const rf = Math.min(replicationFactor, ringLength); + const replicas = {}; + for (let i = 0; i < ringLength; i++) { + const key = ringTokensAsStrings[i]; + const tokenReplicas = [primaryReplicas[key]]; + for (let j = 1; j < ringLength && tokenReplicas.length < rf; j++) { + let nextReplicaIndex = i + j; + if (nextReplicaIndex >= ringLength) { + //circle back + nextReplicaIndex = nextReplicaIndex % ringLength; + } + const nextReplica = primaryReplicas[ringTokensAsStrings[nextReplicaIndex]]; + // In the case of vnodes, consecutive sections of the ring can be assigned to the same host. + if (tokenReplicas.indexOf(nextReplica) === -1) { + tokenReplicas.push(nextReplica); + } + } + replicas[key] = tokenReplicas; + } + return replicas; + }); +} + +/** + * @param {Object} replicationFactors + * @returns {Function} + * @private + */ +function getTokenToReplicaNetworkMapper(replicationFactors) { + // A(DC1) + // + // H B(DC2) + // | + // G --+-- C(DC1) + // | + // F D(DC2) + // + // E(DC1) + return (function tokenNetworkStrategy(tokenizer, ringTokensAsStrings, primaryReplicas, datacenters) { + const replicas = {}; + const ringLength = ringTokensAsStrings.length; + + for (let i = 0; i < ringLength; i++) { + const key = ringTokensAsStrings[i]; + const tokenReplicas = []; + const replicasByDc = {}; + const racksPlaced = {}; + const skippedHosts = []; + for (let j = 0; j < ringLength; j++) { + let nextReplicaIndex = i + j; + if (nextReplicaIndex >= ringLength) { + //circle back + nextReplicaIndex = nextReplicaIndex % ringLength; + } + const h = primaryReplicas[ringTokensAsStrings[nextReplicaIndex]]; + // In the case of vnodes, consecutive sections of the ring can be assigned to the same host. + if (tokenReplicas.indexOf(h) !== -1) { + continue; + } + const dc = h.datacenter; + //Check if the next replica belongs to one of the targeted dcs + let dcRf = parseInt(replicationFactors[dc], 10); + if (!dcRf) { + continue; + } + dcRf = Math.min(dcRf, datacenters[dc].hostLength); + let dcReplicas = replicasByDc[dc] || 0; + //Amount of replicas per dc is greater than rf or the amount of host in the datacenter + if (dcReplicas >= dcRf) { + continue; + } + let racksPlacedInDc = racksPlaced[dc]; + if (!racksPlacedInDc) { + racksPlacedInDc = racksPlaced[dc] = new utils.HashSet(); + } + if (h.rack && + racksPlacedInDc.contains(h.rack) && + racksPlacedInDc.length < datacenters[dc].racks.length) { + // We already selected a replica for this rack + // Skip until replicas in other racks are added + if (skippedHosts.length < dcRf - dcReplicas) { + skippedHosts.push(h); + } + continue; + } + replicasByDc[h.datacenter] = ++dcReplicas; + tokenReplicas.push(h); + if (h.rack && racksPlacedInDc.add(h.rack) && racksPlacedInDc.length === datacenters[dc].racks.length) { + // We finished placing all replicas for all racks in this dc + // Add the skipped hosts + replicasByDc[dc] += addSkippedHosts(dcRf, dcReplicas, tokenReplicas, skippedHosts); + } + if (isDoneForToken(replicationFactors, datacenters, replicasByDc)) { + break; + } + } + replicas[key] = tokenReplicas; + } + return replicas; + }); +} + +/** + * @returns {Number} The number of skipped hosts added. + */ +function addSkippedHosts(dcRf, dcReplicas, tokenReplicas, skippedHosts) { + let i; + for (i = 0; i < dcRf - dcReplicas && i < skippedHosts.length; i++) { + tokenReplicas.push(skippedHosts[i]); + } + return i; +} + +function isDoneForToken(replicationFactors, datacenters, replicasByDc) { + const keys = Object.keys(replicationFactors); + for (let i = 0; i < keys.length; i++) { + const dcName = keys[i]; + const dc = datacenters[dcName]; + if (!dc) { + // A DC is included in the RF but the DC does not exist in the topology + continue; + } + const rf = Math.min(parseInt(replicationFactors[dcName], 10), dc.hostLength); + if (rf > 0 && (!replicasByDc[dcName] || replicasByDc[dcName] < rf)) { + return false; + } + } + return true; +} + +/** + * Creates a new instance if the currentInstance is not valid for the + * provided Cassandra version + * @param {ClientOptions} options The client options + * @param {ControlConnection} cc The control connection to be used + * @param {Function} udtResolver The function to be used to retrieve the udts. + * @param {Array.} [version] The cassandra version + * @param {SchemaParser} [currentInstance] The current instance + * @returns {SchemaParser} + */ +function getByVersion(options, cc, udtResolver, version, currentInstance) { + let parserConstructor = SchemaParserV1; + if (version && version[0] === 3) { + parserConstructor = SchemaParserV2; + } else if (version && version[0] >= 4) { + parserConstructor = SchemaParserV3; + } + if (!currentInstance || !(currentInstance instanceof parserConstructor)){ + return new parserConstructor(options, cc, udtResolver); + } + return currentInstance; +} + +exports.getByVersion = getByVersion; +exports.isDoneForToken = isDoneForToken; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metadata/table-metadata.js b/node_modules/cassandra-driver/lib/metadata/table-metadata.js new file mode 100644 index 0000000..87e171e --- /dev/null +++ b/node_modules/cassandra-driver/lib/metadata/table-metadata.js @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const DataCollection = require('./data-collection'); +/** + * Creates a new instance of TableMetadata + * @classdesc Describes a table + * @param {String} name Name of the Table + * @augments {module:metadata~DataCollection} + * @alias module:metadata~TableMetadata + * @constructor + */ +function TableMetadata(name) { + DataCollection.call(this, name); + /** + * Applies only to counter tables. + * When set to true, replicates writes to all affected replicas regardless of the consistency level specified by + * the client for a write request. For counter tables, this should always be set to true. + * @type {Boolean} + */ + this.replicateOnWrite = true; + /** + * Returns the memtable flush period (in milliseconds) option for this table. + * @type {Number} + */ + this.memtableFlushPeriod = 0; + /** + * Returns the index interval option for this table. + *

+ * Note: this option is only available in Apache Cassandra 2.0. It is deprecated in Apache Cassandra 2.1 and + * above, and will therefore return null for 2.1 nodes. + *

+ * @type {Number|null} + */ + this.indexInterval = null; + /** + * Determines whether the table uses the COMPACT STORAGE option. + * @type {Boolean} + */ + this.isCompact = false; + /** + * + * @type {Array.} + */ + this.indexes = null; + + /** + * Determines whether the Change Data Capture (CDC) flag is set for the table. + * @type {Boolean|null} + */ + this.cdc = null; + + /** + * Determines whether the table is a virtual table or not. + * @type {Boolean} + */ + this.virtual = false; +} + +util.inherits(TableMetadata, DataCollection); + +module.exports = TableMetadata; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metrics/client-metrics.js b/node_modules/cassandra-driver/lib/metrics/client-metrics.js new file mode 100644 index 0000000..88fe073 --- /dev/null +++ b/node_modules/cassandra-driver/lib/metrics/client-metrics.js @@ -0,0 +1,129 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * Represents a base class that is used to measure events from the server and the client as seen by the driver. + * @alias module:metrics~ClientMetrics + * @interface + */ +class ClientMetrics { + /** + * Method invoked when an authentication error is obtained from the server. + * @param {AuthenticationError|Error} e The error encountered. + */ + onAuthenticationError(e) {} + + /** + * Method invoked when an error (different than a server or client timeout, authentication or connection error) is + * encountered when executing a request. + * @param {OperationTimedOutError} e The timeout error. + */ + onClientTimeoutError(e) {} + + /** + * Method invoked when there is a connection error. + * @param {Error} e The error encountered. + */ + onConnectionError(e) {} + + /** + * Method invoked when an error (different than a server or client timeout, authentication or connection error) is + * encountered when executing a request. + * @param {Error} e The error encountered. + */ + onOtherError(e) {} + + /** + * Method invoked when a read timeout error is obtained from the server. + * @param {ResponseError} e The error encountered. + */ + onReadTimeoutError(e) {} + + /** + * Method invoked when a write timeout error is obtained from the server. + * @param {ResponseError} e The error encountered. + */ + onWriteTimeoutError(e) {} + + /** + * Method invoked when an unavailable error is obtained from the server. + * @param {ResponseError} e The error encountered. + */ + onUnavailableError(e) {} + + /** + * Method invoked when an execution is retried as a result of a client-level timeout. + * @param {Error} e The error that caused the retry. + */ + onClientTimeoutRetry(e) {} + + /** + * Method invoked when an error (other than a server or client timeout) is retried. + * @param {Error} e The error that caused the retry. + */ + onOtherErrorRetry(e) {} + + /** + * Method invoked when an execution is retried as a result of a read timeout from the server (coordinator to replica). + * @param {Error} e The error that caused the retry. + */ + onReadTimeoutRetry(e) {} + + /** + * Method invoked when an execution is retried as a result of an unavailable error from the server. + * @param {Error} e The error that caused the retry. + */ + onUnavailableRetry(e) {} + + /** + * Method invoked when an execution is retried as a result of a write timeout from the server (coordinator to + * replica). + * @param {Error} e The error that caused the retry. + */ + onWriteTimeoutRetry(e) {} + + /** + * Method invoked when an error is marked as ignored by the retry policy. + * @param {Error} e The error that was ignored by the retry policy. + */ + onIgnoreError(e) {} + + /** + * Method invoked when a speculative execution is started. + */ + onSpeculativeExecution() {} + + /** + * Method invoked when a response is obtained successfully. + * @param {Array} latency The latency represented in a [seconds, nanoseconds] tuple + * Array, where nanoseconds is the remaining part of the real time that can't be represented in second precision. + */ + onSuccessfulResponse(latency) {} + + /** + * Method invoked when any response is obtained, the response can be the result of a successful execution or a + * server-side error. + * @param {Array} latency The latency represented in a [seconds, nanoseconds] tuple + * Array, where nanoseconds is the remaining part of the real time that can't be represented in second precision. + */ + onResponse(latency) { + + } +} + +module.exports = ClientMetrics; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metrics/default-metrics.js b/node_modules/cassandra-driver/lib/metrics/default-metrics.js new file mode 100644 index 0000000..1df5dfa --- /dev/null +++ b/node_modules/cassandra-driver/lib/metrics/default-metrics.js @@ -0,0 +1,198 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const ClientMetrics = require('./client-metrics'); +const EventEmitter = require('events'); + +/** + * A default implementation of [ClientMetrics]{@link module:metrics~ClientMetrics} that exposes the driver events as + * Node.js events. + *

+ * An instance of [DefaultMetrics]{@link module:metrics~DefaultMetrics} is configured by default in the client, + * you can access this instance using [Client#metrics]{@link Client#metrics} property. + *

+ * @implements {module:metrics~ClientMetrics} + * @alias module:metrics~DefaultMetrics + * @example Listening to events emitted + * defaultMetrics.errors.on('increment', err => totalErrors++); + * defaultMetrics.errors.clientTimeout.on('increment', () => clientTimeoutErrors++); + * defaultMetrics.speculativeRetries.on('increment', () => specExecsCount++); + * defaultMetrics.responses.on('increment', latency => myHistogram.record(latency)); + */ +class DefaultMetrics extends ClientMetrics { + /** + * Creates a new instance of [DefaultMetrics]{@link module:metrics~DefaultMetrics}. + */ + constructor() { + super(); + + /** + * Emits all the error events. + *

Use each of the properties to measure events of specific errors.

+ * @type {EventEmitter} + * @property {EventEmitter} authentication Emits the authentication timeout error events. + * @property {EventEmitter} clientTimeout Emits the client timeout error events. + * @property {EventEmitter} connection Emits the connection error events. + * @property {EventEmitter} readTimeout Emits the read timeout error events obtained from the server. + * @property {EventEmitter} other Emits the error events, that are not part of the other categories. + * @property {EventEmitter} unavailable Emits the unavailable error events obtained from the server. + * @property {EventEmitter} writeTimeout Emits the write timeout error events obtained from the server + */ + this.errors = new EventEmitter(); + this.errors.authentication = new EventEmitter(); + this.errors.clientTimeout = new EventEmitter(); + this.errors.connection = new EventEmitter(); + this.errors.other = new EventEmitter(); + this.errors.readTimeout = new EventEmitter(); + this.errors.unavailable = new EventEmitter(); + this.errors.writeTimeout = new EventEmitter(); + + /** + * Emits all the retry events. + *

Use each of the properties to measure events of specific retries.

+ * @type {EventEmitter} + * @property {EventEmitter} clientTimeout Emits when an execution is retried as a result of an client timeout. + * @property {EventEmitter} other Emits the error events, that are not part of the other categories. + * @property {EventEmitter} readTimeout Emits an execution is retried as a result of an read timeout error from the + * server (coordinator to replica). + * @property {EventEmitter} unavailable Emits an execution is retried as a result of an unavailable error from the + * server. + * @property {EventEmitter} writeTimeout Emits an execution is retried as a result of a write timeout error from the + * server (coordinator to replica). + */ + this.retries = new EventEmitter(); + this.retries.clientTimeout = new EventEmitter(); + this.retries.other = new EventEmitter(); + this.retries.readTimeout = new EventEmitter(); + this.retries.unavailable = new EventEmitter(); + this.retries.writeTimeout = new EventEmitter(); + + /** + * Emits events when a speculative execution is started. + * @type {EventEmitter} + */ + this.speculativeExecutions = new EventEmitter(); + + /** + * Emits events when an error is ignored by the retry policy. + * @type {EventEmitter} + */ + this.ignoredErrors = new EventEmitter(); + + /** + * Emits events when a response message is obtained. + * @type {EventEmitter} + * @property {EventEmitter} success Emits when a response was obtained as the result of a successful execution. + */ + this.responses = new EventEmitter(); + this.responses.success = new EventEmitter(); + } + + /** @override */ + onAuthenticationError(e) { + this.errors.authentication.emit('increment', e); + this.errors.emit('increment', e);} + + /** @override */ + onConnectionError(e) { + this.errors.connection.emit('increment', e); + this.errors.emit('increment', e); + } + + /** @override */ + onReadTimeoutError(e) { + this.errors.readTimeout.emit('increment', e); + this.errors.emit('increment', e); + } + + /** @override */ + onWriteTimeoutError(e) { + this.errors.writeTimeout.emit('increment', e); + this.errors.emit('increment', e); + } + + /** @override */ + onUnavailableError(e) { + this.errors.unavailable.emit('increment', e); + this.errors.emit('increment', e); + } + + /** @override */ + onClientTimeoutError(e) { + this.errors.clientTimeout.emit('increment', e); + this.errors.emit('increment', e); + } + + /** @override */ + onOtherError(e) { + this.errors.other.emit('increment', e); + this.errors.emit('increment', e); + } + + /** @override */ + onClientTimeoutRetry(e) { + this.retries.clientTimeout.emit('increment', e); + this.retries.emit('increment', e); + } + + /** @override */ + onOtherErrorRetry(e) { + this.retries.other.emit('increment', e); + this.retries.emit('increment', e); + } + + /** @override */ + onReadTimeoutRetry(e) { + this.retries.readTimeout.emit('increment', e); + this.retries.emit('increment', e); + } + + /** @override */ + onUnavailableRetry(e) { + this.retries.unavailable.emit('increment', e); + this.retries.emit('increment', e); + } + + /** @override */ + onWriteTimeoutRetry(e) { + this.retries.writeTimeout.emit('increment', e); + this.retries.emit('increment', e); + } + + /** @override */ + onIgnoreError(e) { + this.ignoredErrors.emit('increment', e); + } + + /** @override */ + onSpeculativeExecution() { + this.speculativeExecutions.emit('increment'); + } + + /** @override */ + onSuccessfulResponse(latency) { + this.responses.success.emit('increment', latency); + } + + /** @override */ + onResponse(latency) { + this.responses.emit('increment', latency); + } +} + +module.exports = DefaultMetrics; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metrics/index.d.ts b/node_modules/cassandra-driver/lib/metrics/index.d.ts new file mode 100644 index 0000000..4ad8005 --- /dev/null +++ b/node_modules/cassandra-driver/lib/metrics/index.d.ts @@ -0,0 +1,89 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { errors } from '../../'; + +export namespace metrics { + interface ClientMetrics { + onAuthenticationError(e: Error | errors.AuthenticationError): void; + + onClientTimeoutError(e: errors.OperationTimedOutError): void; + + onClientTimeoutRetry(e: Error): void; + + onConnectionError(e: Error): void; + + onIgnoreError(e: Error): void; + + onOtherError(e: Error): void; + + onOtherErrorRetry(e: Error): void; + + onReadTimeoutError(e: errors.ResponseError): void; + + onReadTimeoutRetry(e: Error): void; + + onResponse(latency: number[]): void; + + onSpeculativeExecution(): void; + + onSuccessfulResponse(latency: number[]): void; + + onUnavailableError(e: errors.ResponseError): void; + + onUnavailableRetry(e: Error): void; + + onWriteTimeoutError(e: errors.ResponseError): void; + + onWriteTimeoutRetry(e: Error): void; + } + + class DefaultMetrics implements ClientMetrics { + constructor(); + + onAuthenticationError(e: Error | errors.AuthenticationError): void; + + onClientTimeoutError(e: errors.OperationTimedOutError): void; + + onClientTimeoutRetry(e: Error): void; + + onConnectionError(e: Error): void; + + onIgnoreError(e: Error): void; + + onOtherError(e: Error): void; + + onOtherErrorRetry(e: Error): void; + + onReadTimeoutError(e: errors.ResponseError): void; + + onReadTimeoutRetry(e: Error): void; + + onResponse(latency: number[]): void; + + onSpeculativeExecution(): void; + + onSuccessfulResponse(latency: number[]): void; + + onUnavailableError(e: errors.ResponseError): void; + + onUnavailableRetry(e: Error): void; + + onWriteTimeoutError(e: errors.ResponseError): void; + + onWriteTimeoutRetry(e: Error): void; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/metrics/index.js b/node_modules/cassandra-driver/lib/metrics/index.js new file mode 100644 index 0000000..9afb03a --- /dev/null +++ b/node_modules/cassandra-driver/lib/metrics/index.js @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const ClientMetrics = require('./client-metrics'); +const DefaultMetrics = require('./default-metrics'); + +/** + * The metrics module contains interfaces and implementations used by the driver to expose + * measurements of its internal behavior and of the server as seen from the driver side. + * @module metrics + */ + +module.exports = { ClientMetrics, DefaultMetrics }; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/operation-state.js b/node_modules/cassandra-driver/lib/operation-state.js new file mode 100644 index 0000000..8fe623d --- /dev/null +++ b/node_modules/cassandra-driver/lib/operation-state.js @@ -0,0 +1,164 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const utils = require('./utils'); +const errors = require('./errors'); +const requests = require('./requests'); +const ExecuteRequest = requests.ExecuteRequest; +const QueryRequest = requests.QueryRequest; + +const state = { + init: 0, + completed: 1, + timedOut: 2, + cancelled: 3 +}; + +/** + * Maintains the state information of a request inside a Connection. + */ +class OperationState { + /** + * Creates a new instance of OperationState. + * @param {Request} request + * @param {Function} rowCallback + * @param {Function} callback + */ + constructor(request, rowCallback, callback) { + this.request = request; + this._rowCallback = rowCallback; + this._callback = callback; + this._timeout = null; + this._state = state.init; + this._rowIndex = 0; + /** + * Stream id that is set right before being written. + * @type {number} + */ + this.streamId = -1; + } + + /** + * Marks the operation as cancelled, clearing all callbacks and timeouts. + */ + cancel() { + if (this._state !== state.init) { + return; + } + if (this._timeout !== null) { + clearTimeout(this._timeout); + } + this._state = state.cancelled; + this._callback = utils.noop; + } + + /** + * Determines if the operation can be written to the wire (when it hasn't been cancelled or it hasn't timed out). + */ + canBeWritten() { + return this._state === state.init; + } + + /** + * Determines if the response is going to be yielded by row. + * @return {boolean} + */ + isByRow() { + return this._rowCallback && (this.request instanceof ExecuteRequest || this.request instanceof QueryRequest); + } + + /** + * Creates the timeout for the request. + * @param {ExecutionOptions} execOptions + * @param {Number} defaultReadTimeout + * @param {String} address + * @param {Function} onTimeout The callback to be invoked when it times out. + * @param {Function} onResponse The callback to be invoked if a response is obtained after it timed out. + */ + setRequestTimeout(execOptions, defaultReadTimeout, address, onTimeout, onResponse) { + if (this._state !== state.init) { + // No need to set the timeout + return; + } + const millis = execOptions.getReadTimeout() !== undefined ? execOptions.getReadTimeout() : defaultReadTimeout; + if (!(millis > 0)) { + // Read timeout disabled + return; + } + const self = this; + this._timeout = setTimeout(function requestTimedOut() { + onTimeout(); + const message = util.format('The host %s did not reply before timeout %d ms', address, millis); + self._markAsTimedOut(new errors.OperationTimedOutError(message, address), onResponse); + }, millis); + } + + setResultRow(row, meta, rowLength, flags, header) { + this._markAsCompleted(); + if (!this._rowCallback) { + return this.setResult(new errors.DriverInternalError('RowCallback not found for streaming frame handler')); + } + this._rowCallback(this._rowIndex++, row, rowLength); + if (this._rowIndex === rowLength) { + this._swapCallbackAndInvoke(null, { rowLength: rowLength, meta: meta, flags: flags }, header.bodyLength); + } + } + + /** + * Marks the current operation as timed out. + * @param {Error} err + * @param {Function} onResponse + * @private + */ + _markAsTimedOut(err, onResponse) { + if (this._state !== state.init) { + return; + } + this._state = state.timedOut; + this._swapCallbackAndInvoke(err, null, null, onResponse); + } + + _markAsCompleted() { + if (this._state !== state.init) { + return; + } + if (this._timeout !== null) { + clearTimeout(this._timeout); + } + this._state = state.completed; + } + + /** + * Sets the result of this operation, declaring that no further input will be processed for this operation. + * @param {Error} err + * @param {Object} [result] + * @param {Number} [length] + */ + setResult(err, result, length) { + this._markAsCompleted(); + this._swapCallbackAndInvoke(err, result, length); + } + + _swapCallbackAndInvoke(err, result, length, newCallback) { + const callback = this._callback; + this._callback = newCallback || utils.noop; + callback(err, result, length); + } +} + +module.exports = OperationState; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/address-resolution.js b/node_modules/cassandra-driver/lib/policies/address-resolution.js new file mode 100644 index 0000000..e0a0fc0 --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/address-resolution.js @@ -0,0 +1,139 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const dns = require('dns'); +const util = require('util'); +const utils = require('../utils'); +/** @module policies/addressResolution */ +/** + * @class + * @classdesc + * Translates IP addresses received from Cassandra nodes into locally queryable + * addresses. + *

+ * The driver auto-detects new Cassandra nodes added to the cluster through server + * side pushed notifications and through checking the system tables. For each + * node, the address received will correspond to the address set as + * rpc_address in the node yaml file. In most case, this is the correct + * address to use by the driver and that is what is used by default. However, + * sometimes the addresses received through this mechanism will either not be + * reachable directly by the driver or should not be the preferred address to use + * to reach the node (for instance, the rpc_address set on Cassandra nodes + * might be a private IP, but some clients may have to use a public IP, or + * pass by a router to reach that node). This interface allows to deal with + * such cases, by allowing to translate an address as sent by a Cassandra node + * to another address to be used by the driver for connection. + *

+ * Please note that the contact points addresses provided while creating the + * {@link Client} instance are not "translated", only IP address retrieve from or sent + * by Cassandra nodes to the driver are. + * @constructor + */ +function AddressTranslator() { + +} + +/** + * Translates a Cassandra rpc_address to another address if necessary. + * @param {String} address the address of a node as returned by Cassandra. + *

+ * Note that if the rpc_address of a node has been configured to 0.0.0.0 + * server side, then the provided address will be the node listen_address, + * *not* 0.0.0.0. + *

+ * @param {Number} port The port number, as specified in the [protocolOptions]{@link ClientOptions} at Client instance creation (9042 by default). + * @param {Function} callback Callback to invoke with endpoint as first parameter. + * The endpoint is an string composed of the IP address and the port number in the format ipAddress:port. + */ +AddressTranslator.prototype.translate = function (address, port, callback) { + callback(address + ':' + port); +}; + +/** + * @class + * @classdesc + * {@link AddressTranslator} implementation for multi-region EC2 deployments where clients are also deployed in EC2. + *

+ * Its distinctive feature is that it translates addresses according to the location of the Cassandra host: + *

+ *
    + *
  • addresses in different EC2 regions (than the client) are unchanged
  • + *
  • addresses in the same EC2 region are translated to private IPs
  • + *
+ *

+ * This optimizes network costs, because Amazon charges more for communication over public IPs. + *

+ * @constructor + */ +function EC2MultiRegionTranslator() { + +} + +util.inherits(EC2MultiRegionTranslator, AddressTranslator); + +/** + * Addresses in the same EC2 region are translated to private IPs and addresses in + * different EC2 regions (than the client) are unchanged + */ +EC2MultiRegionTranslator.prototype.translate = function (address, port, callback) { + let newAddress = address; + const self = this; + let name; + utils.series([ + function resolve(next) { + dns.reverse(address, function (err, hostNames) { + if (err) { + return next(err); + } + if (!hostNames) { + return next(); + } + name = hostNames[0]; + next(); + }); + }, + function lookup(next) { + if (!name) { + return next(); + } + dns.lookup(name, function (err, lookupAddress) { + if (err) { + return next(err); + } + newAddress = lookupAddress; + next(); + }); + }], function (err) { + if (err) { + //there was an issue while doing dns resolution + self.logError(address, err); + } + callback(newAddress + ':' + port); + }); +}; + +/** + * Log method called to log errors that occurred while performing dns resolution. + * You can assign your own method to the class instance to do proper logging. + * @param {String} address + * @param {Error} err + */ +EC2MultiRegionTranslator.prototype.logError = function (address, err) { + //Do nothing by default +}; + +exports.AddressTranslator = AddressTranslator; +exports.EC2MultiRegionTranslator = EC2MultiRegionTranslator; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/index.d.ts b/node_modules/cassandra-driver/lib/policies/index.d.ts new file mode 100644 index 0000000..ffae185 --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/index.d.ts @@ -0,0 +1,210 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Client, EmptyCallback, ExecutionOptions, Host, HostMap } from '../../'; +import { types } from '../types'; + + +export namespace policies { + function defaultAddressTranslator(): addressResolution.AddressTranslator; + + function defaultLoadBalancingPolicy(localDc?: string): loadBalancing.LoadBalancingPolicy; + + function defaultReconnectionPolicy(): reconnection.ReconnectionPolicy; + + function defaultRetryPolicy(): retry.RetryPolicy; + + function defaultSpeculativeExecutionPolicy(): speculativeExecution.SpeculativeExecutionPolicy; + + function defaultTimestampGenerator(): timestampGeneration.TimestampGenerator; + + namespace addressResolution { + interface AddressTranslator { + translate(address: string, port: number, callback: Function): void; + } + + class EC2MultiRegionTranslator implements AddressTranslator { + translate(address: string, port: number, callback: Function): void; + } + } + + namespace loadBalancing { + abstract class LoadBalancingPolicy { + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + + getDistance(host: Host): types.distance; + + newQueryPlan( + keyspace: string, + executionOptions: ExecutionOptions, + callback: (error: Error, iterator: Iterator) => void): void; + + getOptions(): Map; + } + + class DCAwareRoundRobinPolicy extends LoadBalancingPolicy { + constructor(localDc: string); + } + + class TokenAwarePolicy extends LoadBalancingPolicy { + constructor(childPolicy: LoadBalancingPolicy); + } + + class AllowListPolicy extends LoadBalancingPolicy { + constructor(childPolicy: LoadBalancingPolicy, allowList: string[]); + } + + class WhiteListPolicy extends AllowListPolicy { + } + + class RoundRobinPolicy extends LoadBalancingPolicy { + constructor(); + } + + class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { + constructor(options?: { localDc?: string, filter?: (host: Host) => boolean }); + } + } + + namespace reconnection { + class ConstantReconnectionPolicy implements ReconnectionPolicy { + constructor(delay: number); + + getOptions(): Map; + + newSchedule(): Iterator; + + } + + class ExponentialReconnectionPolicy implements ReconnectionPolicy { + constructor(baseDelay: number, maxDelay: number, startWithNoDelay?: boolean); + + getOptions(): Map; + + newSchedule(): Iterator; + } + + interface ReconnectionPolicy { + getOptions(): Map; + + newSchedule(): Iterator; + } + } + + namespace retry { + class DecisionInfo { + decision: number; + consistency: types.consistencies; + } + + class OperationInfo { + query: string; + executionOptions: ExecutionOptions; + nbRetry: number; + } + + class IdempotenceAwareRetryPolicy extends RetryPolicy { + constructor(childPolicy: RetryPolicy); + } + + class FallthroughRetryPolicy extends RetryPolicy { + constructor(); + } + + class RetryPolicy { + onReadTimeout( + info: OperationInfo, + consistency: types.consistencies, + received: number, + blockFor: number, + isDataPresent: boolean): DecisionInfo; + + onRequestError(info: OperationInfo, consistency: types.consistencies, err: Error): DecisionInfo; + + onUnavailable( + info: OperationInfo, consistency: types.consistencies, required: number, alive: boolean): DecisionInfo; + + onWriteTimeout( + info: OperationInfo, + consistency: types.consistencies, + received: number, + blockFor: number, + writeType: string): DecisionInfo; + + rethrowResult(): DecisionInfo; + + retryResult(consistency: types.consistencies, useCurrentHost?: boolean): DecisionInfo; + } + + namespace RetryDecision { + enum retryDecision { + ignore, + rethrow, + retry + } + } + } + + namespace speculativeExecution { + class ConstantSpeculativeExecutionPolicy implements SpeculativeExecutionPolicy { + constructor(delay: number, maxSpeculativeExecutions: number); + + getOptions(): Map; + + init(client: Client): void; + + newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: Function }; + + shutdown(): void; + } + + class NoSpeculativeExecutionPolicy implements SpeculativeExecutionPolicy { + constructor(); + + getOptions(): Map; + + init(client: Client): void; + + newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: Function }; + + shutdown(): void; + } + + interface SpeculativeExecutionPolicy { + getOptions(): Map; + + init(client: Client): void; + + newPlan(keyspace: string, queryInfo: string|Array): { nextExecution: Function }; + + shutdown(): void; + } + } + + namespace timestampGeneration { + class MonotonicTimestampGenerator implements TimestampGenerator { + constructor(warningThreshold: number, minLogInterval: number); + + getDate(): number; + + next(client: Client): types.Long | number; + } + + interface TimestampGenerator { + next(client: Client): types.Long|number; + } + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/index.js b/node_modules/cassandra-driver/lib/policies/index.js new file mode 100644 index 0000000..9590d6b --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/index.js @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +/** + * Contains driver tuning policies to determine [load balancing]{@link module:policies/loadBalancing}, + * [retrying]{@link module:policies/retry} queries, [reconnecting]{@link module:policies/reconnection} to a node, + * [address resolution]{@link module:policies/addressResolution}, + * [timestamp generation]{@link module:policies/timestampGeneration} and + * [speculative execution]{@link module:policies/speculativeExecution}. + * @module policies + */ +const addressResolution = exports.addressResolution = require('./address-resolution'); +const loadBalancing = exports.loadBalancing = require('./load-balancing'); +const reconnection = exports.reconnection = require('./reconnection'); +const retry = exports.retry = require('./retry'); +const speculativeExecution = exports.speculativeExecution = require('./speculative-execution'); +const timestampGeneration = exports.timestampGeneration = require('./timestamp-generation'); + +/** + * Returns a new instance of the default address translator policy used by the driver. + * @returns {AddressTranslator} + */ +exports.defaultAddressTranslator = function () { + return new addressResolution.AddressTranslator(); +}; + +/** + * Returns a new instance of the default load-balancing policy used by the driver. + * @param {string} [localDc] When provided, it sets the data center that is going to be used as local for the + * load-balancing policy instance. + *

When localDc is undefined, the load-balancing policy instance will use the localDataCenter + * provided in the {@link ClientOptions}.

+ * @returns {LoadBalancingPolicy} + */ +exports.defaultLoadBalancingPolicy = function (localDc) { + return new loadBalancing.DefaultLoadBalancingPolicy(localDc); +}; + +/** + * Returns a new instance of the default retry policy used by the driver. + * @returns {RetryPolicy} + */ +exports.defaultRetryPolicy = function () { + return new retry.RetryPolicy(); +}; + +/** + * Returns a new instance of the default reconnection policy used by the driver. + * @returns {ReconnectionPolicy} + */ +exports.defaultReconnectionPolicy = function () { + return new reconnection.ExponentialReconnectionPolicy(1000, 10 * 60 * 1000, false); +}; + + +/** + * Returns a new instance of the default speculative execution policy used by the driver. + * @returns {SpeculativeExecutionPolicy} + */ +exports.defaultSpeculativeExecutionPolicy = function () { + return new speculativeExecution.NoSpeculativeExecutionPolicy(); +}; + +/** + * Returns a new instance of the default timestamp generator used by the driver. + * @returns {TimestampGenerator} + */ +exports.defaultTimestampGenerator = function () { + return new timestampGeneration.MonotonicTimestampGenerator(); +}; diff --git a/node_modules/cassandra-driver/lib/policies/load-balancing.js b/node_modules/cassandra-driver/lib/policies/load-balancing.js new file mode 100644 index 0000000..de56e0d --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/load-balancing.js @@ -0,0 +1,883 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const types = require('../types'); +const utils = require('../utils.js'); +const errors = require('../errors.js'); + +const doneIteratorObject = Object.freeze({ done: true }); +const newlyUpInterval = 60000; + +/** @module policies/loadBalancing */ +/** + * Base class for Load Balancing Policies + * @constructor + */ +function LoadBalancingPolicy() { + +} + +/** + * Initializes the load balancing policy, called after the driver obtained the information of the cluster. + * @param {Client} client + * @param {HostMap} hosts + * @param {Function} callback + */ +LoadBalancingPolicy.prototype.init = function (client, hosts, callback) { + this.client = client; + this.hosts = hosts; + callback(); +}; + +/** + * Returns the distance assigned by this policy to the provided host. + * @param {Host} host + */ +LoadBalancingPolicy.prototype.getDistance = function (host) { + return types.distance.local; +}; + +/** + * Returns an iterator with the hosts for a new query. + * Each new query will call this method. The first host in the result will + * then be used to perform the query. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ +LoadBalancingPolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { + callback(new Error('You must implement a query plan for the LoadBalancingPolicy class')); +}; + +/** + * Gets an associative array containing the policy options. + */ +LoadBalancingPolicy.prototype.getOptions = function () { + return new Map(); +}; + +/** + * This policy yield nodes in a round-robin fashion. + * @extends LoadBalancingPolicy + * @constructor + */ +function RoundRobinPolicy() { + this.index = 0; +} + +util.inherits(RoundRobinPolicy, LoadBalancingPolicy); + +/** + * Returns an iterator with the hosts to be used as coordinator for a query. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ +RoundRobinPolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { + if (!this.hosts) { + return callback(new Error('Load balancing policy not initialized')); + } + const hosts = this.hosts.values(); + const self = this; + let counter = 0; + + let planIndex = self.index % hosts.length; + self.index += 1; + if (self.index >= utils.maxInt) { + self.index = 0; + } + + callback(null, { + next: function () { + if (++counter > hosts.length) { + return doneIteratorObject; + } + return {value: hosts[planIndex++ % hosts.length], done: false}; + } + }); +}; + +/** + * A data-center aware Round-robin load balancing policy. + * This policy provides round-robin queries over the nodes of the local + * data center. + * @param {?String} [localDc] local datacenter name. This value overrides the 'localDataCenter' Client option \ + * and is useful for cases where you have multiple execution profiles that you intend on using for routing + * requests to different data centers. + * @extends {LoadBalancingPolicy} + * @constructor + */ +function DCAwareRoundRobinPolicy(localDc) { + this.localDc = localDc; + this.index = 0; + /** @type {Array} */ + this.localHostsArray = null; +} + +util.inherits(DCAwareRoundRobinPolicy, LoadBalancingPolicy); + +/** + * Initializes the load balancing policy. + * @param {Client} client + * @param {HostMap} hosts + * @param {Function} callback + */ +DCAwareRoundRobinPolicy.prototype.init = function (client, hosts, callback) { + this.client = client; + this.hosts = hosts; + hosts.on('add', this._cleanHostCache.bind(this)); + hosts.on('remove', this._cleanHostCache.bind(this)); + + try { + setLocalDc(this, client, this.hosts); + } catch (err) { + return callback(err); + } + + callback(); +}; + +/** + * Returns the distance depending on the datacenter. + * @param {Host} host + */ +DCAwareRoundRobinPolicy.prototype.getDistance = function (host) { + if (host.datacenter === this.localDc) { + return types.distance.local; + } + + return types.distance.ignored; +}; + +DCAwareRoundRobinPolicy.prototype._cleanHostCache = function () { + this.localHostsArray = null; +}; + +DCAwareRoundRobinPolicy.prototype._resolveLocalHosts = function() { + const hosts = this.hosts.values(); + if (this.localHostsArray) { + //there were already calculated + return; + } + this.localHostsArray = []; + hosts.forEach(function (h) { + if (!h.datacenter) { + //not a remote dc node + return; + } + if (h.datacenter === this.localDc) { + this.localHostsArray.push(h); + } + }, this); +}; + +/** + * It returns an iterator that yields local nodes. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ +DCAwareRoundRobinPolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { + if (!this.hosts) { + return callback(new Error('Load balancing policy not initialized')); + } + this.index += 1; + if (this.index >= utils.maxInt) { + this.index = 0; + } + this._resolveLocalHosts(); + // Use a local reference of hosts + const localHostsArray = this.localHostsArray; + let planLocalIndex = this.index; + let counter = 0; + callback(null, { + next: function () { + let host; + if (counter++ < localHostsArray.length) { + host = localHostsArray[planLocalIndex++ % localHostsArray.length]; + return { value: host, done: false }; + } + return doneIteratorObject; + } + }); +}; + +/** + * Gets an associative array containing the policy options. + */ +DCAwareRoundRobinPolicy.prototype.getOptions = function () { + return new Map([ + ['localDataCenter', this.localDc ] + ]); +}; + +/** + * A wrapper load balancing policy that add token awareness to a child policy. + * @param {LoadBalancingPolicy} childPolicy + * @extends LoadBalancingPolicy + * @constructor + */ +function TokenAwarePolicy (childPolicy) { + if (!childPolicy) { + throw new Error("You must specify a child load balancing policy"); + } + this.childPolicy = childPolicy; +} + +util.inherits(TokenAwarePolicy, LoadBalancingPolicy); + +TokenAwarePolicy.prototype.init = function (client, hosts, callback) { + this.client = client; + this.hosts = hosts; + this.childPolicy.init(client, hosts, callback); +}; + +TokenAwarePolicy.prototype.getDistance = function (host) { + return this.childPolicy.getDistance(host); +}; + +/** + * Returns the hosts to use for a new query. + * The returned plan will return local replicas first, if replicas can be determined, followed by the plan of the + * child policy. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ +TokenAwarePolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { + let routingKey; + if (executionOptions) { + routingKey = executionOptions.getRoutingKey(); + if (executionOptions.getKeyspace()) { + keyspace = executionOptions.getKeyspace(); + } + } + let replicas; + if (routingKey) { + replicas = this.client.getReplicas(keyspace, routingKey); + } + if (!routingKey || !replicas) { + return this.childPolicy.newQueryPlan(keyspace, executionOptions, callback); + } + const iterator = new TokenAwareIterator(keyspace, executionOptions, replicas, this.childPolicy); + iterator.iterate(callback); +}; + +/** + * An iterator that holds the context for the subsequent next() calls + * @param {String} keyspace + * @param {ExecutionOptions} execOptions + * @param {Array} replicas + * @param childPolicy + * @constructor + * @ignore + */ +function TokenAwareIterator(keyspace, execOptions, replicas, childPolicy) { + this.keyspace = keyspace; + this.childPolicy = childPolicy; + this.options = execOptions; + this.localReplicas = []; + this.replicaIndex = 0; + this.replicaMap = {}; + this.childIterator = null; + // Memoize the local replicas + // The amount of local replicas should be defined before start iterating, in order to select an + // appropriate (pseudo random) startIndex + for (let i = 0; i < replicas.length; i++) { + const host = replicas[i]; + if (this.childPolicy.getDistance(host) !== types.distance.local) { + continue; + } + this.replicaMap[host.address] = true; + this.localReplicas.push(host); + } + // We use a PRNG to set the replica index + // We only care about proportional fair scheduling between replicas of a given token + // Math.random() has an extremely short permutation cycle length but we don't care about collisions + this.startIndex = Math.floor(Math.random() * this.localReplicas.length); +} + +TokenAwareIterator.prototype.iterate = function (callback) { + //Load the child policy hosts + const self = this; + this.childPolicy.newQueryPlan(this.keyspace, this.options, function (err, iterator) { + if (err) { + return callback(err); + } + //get the iterator of the child policy in case is needed + self.childIterator = iterator; + callback(null, { + next: function () { return self.computeNext(); } + }); + }); +}; + +TokenAwareIterator.prototype.computeNext = function () { + let host; + if (this.replicaIndex < this.localReplicas.length) { + host = this.localReplicas[(this.startIndex + (this.replicaIndex++)) % this.localReplicas.length]; + return { value: host, done: false }; + } + // Return hosts from child policy + let item; + while ((item = this.childIterator.next()) && !item.done) { + if (this.replicaMap[item.value.address]) { + // Avoid yielding local replicas from the child load balancing policy query plan + continue; + } + return item; + } + return doneIteratorObject; +}; + +/** + * Gets an associative array containing the policy options. + */ +TokenAwarePolicy.prototype.getOptions = function () { + const map = new Map([ + ['childPolicy', this.childPolicy.constructor !== undefined ? this.childPolicy.constructor.name : null ] + ]); + + if (this.childPolicy instanceof DCAwareRoundRobinPolicy) { + map.set('localDataCenter', this.childPolicy.localDc); + } + + return map; +}; + +/** + * Create a new policy that wraps the provided child policy but only "allow" hosts + * from the provided list. + * @class + * @classdesc + * A load balancing policy wrapper that ensure that only hosts from a provided + * allow list will ever be returned. + *

+ * This policy wraps another load balancing policy and will delegate the choice + * of hosts to the wrapped policy with the exception that only hosts contained + * in the allow list provided when constructing this policy will ever be + * returned. Any host not in the while list will be considered ignored + * and thus will not be connected to. + *

+ * This policy can be useful to ensure that the driver only connects to a + * predefined set of hosts. Keep in mind however that this policy defeats + * somewhat the host auto-detection of the driver. As such, this policy is only + * useful in a few special cases or for testing, but is not optimal in general. + * If all you want to do is limiting connections to hosts of the local + * data-center then you should use DCAwareRoundRobinPolicy and *not* this policy + * in particular. + * @param {LoadBalancingPolicy} childPolicy the wrapped policy. + * @param {Array.} allowList The hosts address in the format ipAddress:port. + * Only hosts from this list may get connected + * to (whether they will get connected to or not depends on the child policy). + * @extends LoadBalancingPolicy + * @constructor + */ +function AllowListPolicy (childPolicy, allowList) { + if (!childPolicy) { + throw new Error("You must specify a child load balancing policy"); + } + if (!Array.isArray(allowList)) { + throw new Error("You must provide the list of allowed host addresses"); + } + + this.childPolicy = childPolicy; + this.allowList = new Map(allowList.map(address => [ address, true ])); +} + +util.inherits(AllowListPolicy, LoadBalancingPolicy); + +AllowListPolicy.prototype.init = function (client, hosts, callback) { + this.childPolicy.init(client, hosts, callback); +}; + +/** + * Uses the child policy to return the distance to the host if included in the allow list. + * Any host not in the while list will be considered ignored. + * @param host + */ +AllowListPolicy.prototype.getDistance = function (host) { + if (!this._contains(host)) { + return types.distance.ignored; + } + return this.childPolicy.getDistance(host); +}; + +/** + * @param {Host} host + * @returns {boolean} + * @private + */ +AllowListPolicy.prototype._contains = function (host) { + return !!this.allowList.get(host.address); +}; + +/** + * Returns the hosts to use for a new query filtered by the allow list. + */ +AllowListPolicy.prototype.newQueryPlan = function (keyspace, info, callback) { + const self = this; + this.childPolicy.newQueryPlan(keyspace, info, function (err, iterator) { + if (err) { + return callback(err); + } + callback(null, self._filter(iterator)); + }); +}; + +AllowListPolicy.prototype._filter = function (childIterator) { + const self = this; + return { + next: function () { + const item = childIterator.next(); + if (!item.done && !self._contains(item.value)) { + return this.next(); + } + return item; + } + }; +}; + +/** + * Gets an associative array containing the policy options. + */ +AllowListPolicy.prototype.getOptions = function () { + return new Map([ + ['childPolicy', this.childPolicy.constructor !== undefined ? this.childPolicy.constructor.name : null ], + ['allowList', Array.from(this.allowList.keys())] + ]); +}; + +/** + * Creates a new instance of the policy. + * @classdesc + * Exposed for backward-compatibility only, it's recommended that you use {@link AllowListPolicy} instead. + * @param {LoadBalancingPolicy} childPolicy the wrapped policy. + * @param {Array.} allowList The hosts address in the format ipAddress:port. + * Only hosts from this list may get connected to (whether they will get connected to or not depends on the child + * policy). + * @extends AllowListPolicy + * @deprecated Use allow-list instead. It will be removed in future major versions. + * @constructor + */ +function WhiteListPolicy(childPolicy, allowList) { + AllowListPolicy.call(this, childPolicy, allowList); +} + +util.inherits(WhiteListPolicy, AllowListPolicy); + +/** + * A load-balancing policy implementation that attempts to fairly distribute the load based on the amount of in-flight + * request per hosts. The local replicas are initially shuffled and + * between the first two nodes in the + * shuffled list, the one with fewer in-flight requests is selected as coordinator. + * + *

+ * Additionally, it detects unresponsive replicas and reorders them at the back of the query plan. + *

+ * + *

+ * For graph analytics queries, it uses the preferred analytics graph server previously obtained by driver as first + * host in the query plan. + *

+ */ +class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { + + /** + * Creates a new instance of DefaultLoadBalancingPolicy. + * @param {String|Object} [options] The local data center name or the optional policy options object. + *

+ * Note that when providing the local data center name, it overrides localDataCenter option at + * Client level. + *

+ * @param {String} [options.localDc] local data center name. This value overrides the 'localDataCenter' Client option + * and is useful for cases where you have multiple execution profiles that you intend on using for routing + * requests to different data centers. + * @param {Function} [options.filter] A function to apply to determine if hosts are included in the query plan. + * The function takes a Host parameter and returns a Boolean. + */ + constructor(options) { + super(); + + if (typeof options === 'string') { + options = { localDc: options }; + } else if (!options) { + options = utils.emptyObject; + } + + this._client = null; + this._hosts = null; + this._filteredHosts = null; + this._preferredHost = null; + this._index = 0; + this.localDc = options.localDc; + this._filter = options.filter || this._defaultFilter; + + // Allow some checks to be injected + if (options.isHostNewlyUp) { + this._isHostNewlyUp = options.isHostNewlyUp; + } + if (options.healthCheck) { + this._healthCheck = options.healthCheck; + } + if (options.compare) { + this._compare = options.compare; + } + if (options.getReplicas) { + this._getReplicas = options.getReplicas; + } + } + + /** + * Initializes the load balancing policy, called after the driver obtained the information of the cluster. + * @param {Client} client + * @param {HostMap} hosts + * @param {Function} callback + */ + init(client, hosts, callback) { + this._client = client; + this._hosts = hosts; + + // Clean local host cache + this._hosts.on('add', () => this._filteredHosts = null); + this._hosts.on('remove', () => this._filteredHosts = null); + + try { + setLocalDc(this, client, this._hosts); + } catch (err) { + return callback(err); + } + + callback(); + } + + /** + * Returns the distance assigned by this policy to the provided host, relatively to the client instance. + * @param {Host} host + */ + getDistance(host) { + if (this._preferredHost !== null && host === this._preferredHost) { + // Set the last preferred host as local. + // It ensures that the pool for the graph analytics host has the appropriate size + return types.distance.local; + } + + if (!this._filter(host)) { + return types.distance.ignored; + } + + return host.datacenter === this.localDc ? types.distance.local : types.distance.ignored; + } + + /** + * Returns a host iterator to be used for a query execution. + * @override + * @param {String} keyspace + * @param {ExecutionOptions} executionOptions + * @param {Function} callback + */ + newQueryPlan(keyspace, executionOptions, callback) { + let routingKey; + let preferredHost; + + if (executionOptions) { + routingKey = executionOptions.getRoutingKey(); + + if (executionOptions.getKeyspace()) { + keyspace = executionOptions.getKeyspace(); + } + + preferredHost = executionOptions.getPreferredHost(); + } + + let iterable; + + if (!keyspace || !routingKey) { + iterable = this._getLocalHosts(); + } else { + iterable = this._getReplicasAndLocalHosts(keyspace, routingKey); + } + + if (preferredHost) { + // Set it on an instance level field to set the distance + this._preferredHost = preferredHost; + iterable = DefaultLoadBalancingPolicy._getPreferredHostFirst(preferredHost, iterable); + } + + return callback(null, iterable); + } + + /** + * Yields the preferred host first, followed by the host in the provided iterable + * @param preferredHost + * @param iterable + * @private + */ + static *_getPreferredHostFirst(preferredHost, iterable) { + yield preferredHost; + + for (const host of iterable) { + if (host !== preferredHost) { + yield host; + } + } + } + + /** + * Yields the local hosts without the replicas already yielded + * @param {Array} [localReplicas] The local replicas that we should avoid to include again + * @private + */ + *_getLocalHosts(localReplicas) { + // Use a local reference + const hosts = this._getFilteredLocalHosts(); + const initialIndex = this._getIndex(); + + // indexOf() over an Array is a O(n) operation but given that there should be 3 to 7 replicas, + // it shouldn't be an expensive call. Additionally, this will only be executed when the local replicas + // have been exhausted in a lazy manner. + const canBeYield = localReplicas + ? h => localReplicas.indexOf(h) === -1 + : h => true; + + for (let i = 0; i < hosts.length; i++) { + const h = hosts[(i + initialIndex) % hosts.length]; + if (canBeYield(h) && h.isUp()) { + yield h; + } + } + } + + _getReplicasAndLocalHosts(keyspace, routingKey) { + let replicas = this._getReplicas(keyspace, routingKey); + if (replicas === null) { + return this._getLocalHosts(); + } + + const filteredReplicas = []; + let newlyUpReplica = null; + let newlyUpReplicaTimestamp = Number.MIN_SAFE_INTEGER; + let unhealthyReplicas = 0; + + // Filter by DC, predicate and UP replicas + // Use the same iteration to perform other checks: whether if its newly UP or unhealthy + // As this is part of the hot path, we use a simple loop and avoid using Array.prototype.filter() + closure + for (let i = 0; i < replicas.length; i++) { + const h = replicas[i]; + if (!this._filter(h) || h.datacenter !== this.localDc || !h.isUp()) { + continue; + } + const isUpSince = this._isHostNewlyUp(h); + if (isUpSince !== null && isUpSince > newlyUpReplicaTimestamp) { + newlyUpReplica = h; + newlyUpReplicaTimestamp = isUpSince; + } + if (newlyUpReplica === null && !this._healthCheck(h)) { + unhealthyReplicas++; + } + filteredReplicas.push(h); + } + + replicas = filteredReplicas; + + // Shuffle remaining local replicas + utils.shuffleArray(replicas); + + if (replicas.length < 3) { + // Avoid reordering replicas of a set of 2 as we could be doing more harm than good + return this.yieldReplicasFirst(replicas); + } + + let temp; + + if (newlyUpReplica === null) { + if (unhealthyReplicas > 0 && unhealthyReplicas < Math.floor(replicas.length / 2 + 1)) { + // There is one or more unhealthy replicas and there is a majority of healthy replicas + this._sendUnhealthyToTheBack(replicas, unhealthyReplicas); + } + } + else if ((newlyUpReplica === replicas[0] || newlyUpReplica === replicas[1]) && Math.random() * 4 >= 1) { + // There is a newly UP replica and the replica in first or second position is the most recent replica + // marked as UP and dice roll 1d4!=1 -> Send it to the back of the Array + const index = newlyUpReplica === replicas[0] ? 0 : 1; + temp = replicas[replicas.length - 1]; + replicas[replicas.length - 1] = replicas[index]; + replicas[index] = temp; + } + + if (this._compare(replicas[1], replicas[0]) > 0) { + // Power of two random choices + temp = replicas[0]; + replicas[0] = replicas[1]; + replicas[1] = temp; + } + + return this.yieldReplicasFirst(replicas); + } + + /** + * Yields the local replicas followed by the rest of local nodes. + * @param {Array} replicas The local replicas + */ + *yieldReplicasFirst(replicas) { + for (let i = 0; i < replicas.length; i++) { + yield replicas[i]; + } + yield* this._getLocalHosts(replicas); + } + + _isHostNewlyUp(h) { + return (h.isUpSince !== null && Date.now() - h.isUpSince < newlyUpInterval) ? h.isUpSince : null; + } + + /** + * Returns a boolean determining whether the host health is ok or not. + * A Host is considered unhealthy when there are enough items in the queue (10 items in-flight) but the + * Host is not responding to those requests. + * @param {Host} h + * @return {boolean} + * @private + */ + _healthCheck(h) { + return !(h.getInFlight() >= 10 && h.getResponseCount() <= 1); + } + + /** + * Compares to host and returns 1 if it needs to favor the first host otherwise, -1. + * @return {number} + * @private + */ + _compare(h1, h2) { + return h1.getInFlight() < h2.getInFlight() ? 1 : -1; + } + + _getReplicas(keyspace, routingKey) { + return this._client.getReplicas(keyspace, routingKey); + } + + /** + * Returns an Array of hosts filtered by DC and predicate. + * @returns {Array} + * @private + */ + _getFilteredLocalHosts() { + if (this._filteredHosts === null) { + this._filteredHosts = this._hosts.values() + .filter(h => this._filter(h) && h.datacenter === this.localDc); + } + return this._filteredHosts; + } + + _getIndex() { + const result = this._index++; + // Overflow protection + if (this._index === 0x7fffffff) { + this._index = 0; + } + return result; + } + + _sendUnhealthyToTheBack(replicas, unhealthyReplicas) { + let counter = 0; + + // Start from the back, move backwards and stop once all unhealthy replicas are at the back + for (let i = replicas.length - 1; i >= 0 && counter < unhealthyReplicas; i--) { + const host = replicas[i]; + if (this._healthCheck(host)) { + continue; + } + + const targetIndex = replicas.length - 1 - counter; + if (targetIndex !== i) { + const temp = replicas[targetIndex]; + replicas[targetIndex] = host; + replicas[i] = temp; + } + counter++; + } + } + + _defaultFilter() { + return true; + } + + /** + * Gets an associative array containing the policy options. + */ + getOptions() { + return new Map([ + ['localDataCenter', this.localDc ], + ['filterFunction', this._filter !== this._defaultFilter ] + ]); + } +} + +/** + * Validates and sets the local data center to be used. + * @param {LoadBalancingPolicy} lbp + * @param {Client} client + * @param {HostMap} hosts + * @private + */ +function setLocalDc(lbp, client, hosts) { + if (!(lbp instanceof LoadBalancingPolicy)) { + throw new errors.DriverInternalError('LoadBalancingPolicy instance was not provided'); + } + + if (client && client.options) { + if (lbp.localDc && !client.options.localDataCenter) { + client.log('info', `Local data center '${lbp.localDc}' was provided as an argument to the load-balancing` + + ` policy. It is preferable to specify the local data center using 'localDataCenter' in Client` + + ` options instead when your application is targeting a single data center.`); + } + + // If localDc is unset, use value set in client options. + lbp.localDc = lbp.localDc || client.options.localDataCenter; + } + + const dcs = getDataCenters(hosts); + + if (!lbp.localDc) { + throw new errors.ArgumentError( + `'localDataCenter' is not defined in Client options and also was not specified in constructor.` + + ` At least one is required. Available DCs are: [${Array.from(dcs)}]`); + } + + if (!dcs.has(lbp.localDc)) { + throw new errors.ArgumentError(`Datacenter ${lbp.localDc} was not found. Available DCs are: [${Array.from(dcs)}]`); + } +} + +function getDataCenters(hosts) { + return new Set(hosts.values().map(h => h.datacenter)); +} + +module.exports = { + AllowListPolicy, + DCAwareRoundRobinPolicy, + DefaultLoadBalancingPolicy, + LoadBalancingPolicy, + RoundRobinPolicy, + TokenAwarePolicy, + // Deprecated: for backward compatibility only. + WhiteListPolicy +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/reconnection.js b/node_modules/cassandra-driver/lib/policies/reconnection.js new file mode 100644 index 0000000..fa6a899 --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/reconnection.js @@ -0,0 +1,157 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +/** @module policies/reconnection */ +/** + * Base class for Reconnection Policies + * @constructor + */ +function ReconnectionPolicy() { + +} + +/** + * A new reconnection schedule. + * @returns {{next: function}} An infinite iterator + */ +ReconnectionPolicy.prototype.newSchedule = function () { + throw new Error('You must implement a new schedule for the Reconnection class'); +}; + +/** + * Gets an associative array containing the policy options. + */ +ReconnectionPolicy.prototype.getOptions = function () { + return new Map(); +}; + +/** + * A reconnection policy that waits a constant time between each reconnection attempt. + * @param {Number} delay Delay in ms + * @constructor + */ +function ConstantReconnectionPolicy(delay) { + this.delay = delay; +} + +util.inherits(ConstantReconnectionPolicy, ReconnectionPolicy); + +/** + * A new reconnection schedule that returns the same next delay value + * @returns {{next: Function}} An infinite iterator + */ +ConstantReconnectionPolicy.prototype.newSchedule = function () { + const self = this; + return { + next: function () { + return {value: self.delay, done: false}; + } + }; +}; + +/** + * Gets an associative array containing the policy options. + */ +ConstantReconnectionPolicy.prototype.getOptions = function () { + return new Map([['delay', this.delay ]]); +}; + +/** + * A reconnection policy that waits exponentially longer between each + * reconnection attempt (but keeps a constant delay once a maximum delay is reached). + *

+ * A random amount of jitter (+/- 15%) will be added to the pure exponential delay value to avoid situations + * where many clients are in the reconnection process at exactly the same time. The jitter will never cause the + * delay to be less than the base delay, or more than the max delay. + *

+ * @param {Number} baseDelay The base delay in milliseconds to use for the schedules created by this policy. + * @param {Number} maxDelay The maximum delay in milliseconds to wait between two reconnection attempt. + * @param {Boolean} startWithNoDelay Determines if the first attempt should be zero delay + * @constructor + */ +function ExponentialReconnectionPolicy(baseDelay, maxDelay, startWithNoDelay) { + this.baseDelay = baseDelay; + this.maxDelay = maxDelay; + this.startWithNoDelay = startWithNoDelay; +} + +util.inherits(ExponentialReconnectionPolicy, ReconnectionPolicy); + +/** + * A new schedule that uses an exponentially growing delay between reconnection attempts. + * @returns {{next: Function}} An infinite iterator. + */ +ExponentialReconnectionPolicy.prototype.newSchedule = function* () { + let index = this.startWithNoDelay ? -1 : 0; + + while (true) { + let delay = 0; + + if (index >= 64) { + delay = this.maxDelay; + } else if (index !== -1) { + delay = Math.min(Math.pow(2, index) * this.baseDelay, this.maxDelay); + } + + index++; + + yield this._addJitter(delay); + } +}; + +/** + * Adds a random portion of +-15% to the delay provided. + * Initially, its adds a random value of 15% to avoid reconnection before reaching the base delay. + * When the schedule reaches max delay, only subtracts a random portion of 15%. + */ +ExponentialReconnectionPolicy.prototype._addJitter = function (value) { + if (value === 0) { + // Instant reconnection without jitter + return value; + } + + // Use the formula: 85% + rnd() * 30% to calculate the percentage of the original delay + let minPercentage = 0.85; + let range = 0.30; + + if (!this.startWithNoDelay && value === this.baseDelay) { + // Between 100% to 115% of the original value + minPercentage = 1; + range = 0.15; + } else if (value === this.maxDelay) { + // Between 85% to 100% of the original value + range = 0.15; + } + + return Math.floor(value * (Math.random() * range + minPercentage)); +}; + +/** + * Gets an associative array containing the policy options. + */ +ExponentialReconnectionPolicy.prototype.getOptions = function () { + return new Map([ + ['baseDelay', this.baseDelay ], + ['maxDelay', this.maxDelay ], + ['startWithNoDelay', this.startWithNoDelay ] + ]); +}; + +exports.ReconnectionPolicy = ReconnectionPolicy; +exports.ConstantReconnectionPolicy = ConstantReconnectionPolicy; +exports.ExponentialReconnectionPolicy = ExponentialReconnectionPolicy; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/retry.js b/node_modules/cassandra-driver/lib/policies/retry.js new file mode 100644 index 0000000..370d6c5 --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/retry.js @@ -0,0 +1,276 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + + +/** @module policies/retry */ +/** + * Base and default RetryPolicy. + * Determines what to do when the drivers runs into an specific Cassandra exception + * @constructor + */ +function RetryPolicy() { + +} + +/** + * Determines what to do when the driver gets an UnavailableException response from a Cassandra node. + * @param {OperationInfo} info + * @param {Number} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} required The number of replicas whose response is required to achieve the + * required [consistency]{@link module:types~consistencies}. + * @param {Number} alive The number of replicas that were known to be alive when the request had been processed + * (since an unavailable exception has been triggered, there will be alive < required) + * @returns {DecisionInfo} + */ +RetryPolicy.prototype.onUnavailable = function (info, consistency, required, alive) { + if (info.nbRetry > 0) { + return this.rethrowResult(); + } + return this.retryResult(undefined, false); +}; + +/** + * Determines what to do when the driver gets a ReadTimeoutException response from a Cassandra node. + * @param {OperationInfo} info + * @param {Number} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} received The number of nodes having answered the request. + * @param {Number} blockFor The number of replicas whose response is required to achieve the + * required [consistency]{@link module:types~consistencies}. + * @param {Boolean} isDataPresent When false, it means the replica that was asked for data has not responded. + * @returns {DecisionInfo} + */ +RetryPolicy.prototype.onReadTimeout = function (info, consistency, received, blockFor, isDataPresent) { + if (info.nbRetry > 0) { + return this.rethrowResult(); + } + return ((received >= blockFor && !isDataPresent) ? + this.retryResult() : + this.rethrowResult()); +}; + +/** + * Determines what to do when the driver gets a WriteTimeoutException response from a Cassandra node. + * @param {OperationInfo} info + * @param {Number} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} received The number of nodes having acknowledged the request. + * @param {Number} blockFor The number of replicas whose acknowledgement is required to achieve the required + * [consistency]{@link module:types~consistencies}. + * @param {String} writeType A string that describes the type of the write that timed out ("SIMPLE" + * / "BATCH" / "BATCH_LOG" / "UNLOGGED_BATCH" / "COUNTER"). + * @returns {DecisionInfo} + */ +RetryPolicy.prototype.onWriteTimeout = function (info, consistency, received, blockFor, writeType) { + if (info.nbRetry > 0) { + return this.rethrowResult(); + } + // If the batch log write failed, retry the operation as this might just be we were unlucky at picking candidates + return writeType === "BATCH_LOG" ? this.retryResult() : this.rethrowResult(); +}; + +/** + * Defines whether to retry and at which consistency level on an unexpected error. + *

+ * This method might be invoked in the following situations: + *

+ *
    + *
  1. On a client timeout, while waiting for the server response + * (see [socketOptions.readTimeout]{@link ClientOptions}), being the error an instance of + * [OperationTimedOutError]{@link module:errors~OperationTimedOutError}.
  2. + *
  3. On a connection error (socket closed, etc.).
  4. + *
  5. When the contacted host replies with an error, such as overloaded, isBootstrapping, + * serverError, etc. In this case, the error is instance of [ResponseError]{@link module:errors~ResponseError}. + *
  6. + *
+ *

+ * Note that when this method is invoked, the driver cannot guarantee that the mutation has been effectively + * applied server-side; a retry should only be attempted if the request is known to be idempotent. + *

+ * @param {OperationInfo} info + * @param {Number|undefined} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Error} err The error that caused this request to fail. + * @returns {DecisionInfo} + */ +RetryPolicy.prototype.onRequestError = function (info, consistency, err) { + // The default implementation triggers a retry on the next host in the query plan with the same consistency level, + // regardless of the statement's idempotence, for historical reasons. + return this.retryResult(undefined, false); +}; + +/** + * Returns a {@link DecisionInfo} to retry the request with the given [consistency]{@link module:types~consistencies}. + * @param {Number|undefined} [consistency] When specified, it retries the request with the given consistency. + * @param {Boolean} [useCurrentHost] When specified, determines if the retry should be made using the same coordinator. + * Default: true. + * @returns {DecisionInfo} + */ +RetryPolicy.prototype.retryResult = function (consistency, useCurrentHost) { + return { + decision: RetryPolicy.retryDecision.retry, + consistency: consistency, + useCurrentHost: useCurrentHost !== false + }; +}; + +/** + * Returns a {@link DecisionInfo} to callback in error when a err is obtained for a given request. + * @returns {DecisionInfo} + */ +RetryPolicy.prototype.rethrowResult = function () { + return { decision: RetryPolicy.retryDecision.rethrow }; +}; + +/** + * Determines the retry decision for the retry policies. + * @type {Object} + * @property {Number} rethrow + * @property {Number} retry + * @property {Number} ignore + * @static + */ +RetryPolicy.retryDecision = { + rethrow: 0, + retry: 1, + ignore: 2 +}; + +/** + * Creates a new instance of IdempotenceAwareRetryPolicy. + * @classdesc + * A retry policy that avoids retrying non-idempotent statements. + *

+ * In case of write timeouts or unexpected errors, this policy will always return + * [rethrowResult()]{@link module:policies/retry~RetryPolicy#rethrowResult} if the statement is deemed non-idempotent + * (see [QueryOptions.isIdempotent]{@link QueryOptions}). + *

+ * For all other cases, this policy delegates the decision to the child policy. + * @param {RetryPolicy} [childPolicy] The child retry policy to wrap. When not defined, it will use an instance of + * [RetryPolicy]{@link module:policies/retry~RetryPolicy} as child policy. + * @extends module:policies/retry~RetryPolicy + * @constructor + * @deprecated Since version 4.0 non-idempotent operations are never tried for write timeout or request error, use the + * default retry policy instead. + */ +function IdempotenceAwareRetryPolicy(childPolicy) { + this._childPolicy = childPolicy || new RetryPolicy(); +} + +util.inherits(IdempotenceAwareRetryPolicy, RetryPolicy); + +IdempotenceAwareRetryPolicy.prototype.onReadTimeout = function (info, consistency, received, blockFor, isDataPresent) { + return this._childPolicy.onReadTimeout(info, consistency, received, blockFor, isDataPresent); +}; + +/** + * If the query is not idempotent, it returns a rethrow decision. Otherwise, it relies on the child policy to decide. + */ +IdempotenceAwareRetryPolicy.prototype.onRequestError = function (info, consistency, err) { + if (info.executionOptions.isIdempotent()) { + return this._childPolicy.onRequestError(info, consistency, err); + } + return this.rethrowResult(); +}; + +IdempotenceAwareRetryPolicy.prototype.onUnavailable = function (info, consistency, required, alive) { + return this._childPolicy.onUnavailable(info, consistency, required, alive); +}; + +/** + * If the query is not idempotent, it return a rethrow decision. Otherwise, it relies on the child policy to decide. + */ +IdempotenceAwareRetryPolicy.prototype.onWriteTimeout = function (info, consistency, received, blockFor, writeType) { + if (info.executionOptions.isIdempotent()) { + return this._childPolicy.onWriteTimeout(info, consistency, received, blockFor, writeType); + } + return this.rethrowResult(); +}; + +/** + * Creates a new instance of FallthroughRetryPolicy. + * @classdesc + * A retry policy that never retries nor ignores. + *

+ * All of the methods of this retry policy unconditionally return + * [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. If this policy is used, retry logic will have to be + * implemented in business code. + *

+ * @alias module:policies/retry~FallthroughRetryPolicy + * @extends RetryPolicy + * @constructor + */ +function FallthroughRetryPolicy() { + +} + +util.inherits(FallthroughRetryPolicy, RetryPolicy); + +/** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ +FallthroughRetryPolicy.prototype.onReadTimeout = function () { + return this.rethrowResult(); +}; + +/** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ +FallthroughRetryPolicy.prototype.onRequestError = function () { + return this.rethrowResult(); +}; + +/** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ +FallthroughRetryPolicy.prototype.onUnavailable = function () { + return this.rethrowResult(); +}; + +/** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ +FallthroughRetryPolicy.prototype.onWriteTimeout = function () { + return this.rethrowResult(); +}; + +/** + * Decision information + * @typedef {Object} DecisionInfo + * @property {Number} decision The decision as specified in + * [retryDecision]{@link module:policies/retry~RetryPolicy.retryDecision}. + * @property {Number} [consistency] The [consistency level]{@link module:types~consistencies}. + * @property {useCurrentHost} [useCurrentHost] Determines if it should use the same host to retry the request. + *

+ * In the case that the current host is not available anymore, it will be retried on the next host even when + * useCurrentHost is set to true. + *

+ */ + +/** + * Information of the execution to be used to determine whether the operation should be retried. + * @typedef {Object} OperationInfo + * @property {String} query The query that was executed. + * @param {ExecutionOptions} executionOptions The options related to the execution of the request. + * @property {Number} nbRetry The number of retries already performed for this operation. + */ + +exports.IdempotenceAwareRetryPolicy = IdempotenceAwareRetryPolicy; +exports.FallthroughRetryPolicy = FallthroughRetryPolicy; +exports.RetryPolicy = RetryPolicy; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/speculative-execution.js b/node_modules/cassandra-driver/lib/policies/speculative-execution.js new file mode 100644 index 0000000..7705802 --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/speculative-execution.js @@ -0,0 +1,143 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const errors = require('../errors'); + +/** @module policies/speculativeExecution */ + +/** + * @classdesc + * The policy that decides if the driver will send speculative queries to the next hosts when the current host takes too + * long to respond. + *

Note that only idempotent statements will be speculatively retried.

+ * @constructor + * @abstract + */ +function SpeculativeExecutionPolicy() { + +} + +/** + * Initialization method that gets invoked on Client startup. + * @param {Client} client + * @abstract + */ +SpeculativeExecutionPolicy.prototype.init = function (client) { + +}; + +/** + * Gets invoked at client shutdown, giving the opportunity to the implementor to perform cleanup. + * @abstract + */ +SpeculativeExecutionPolicy.prototype.shutdown = function () { + +}; + +/** + * Gets the plan to use for a new query. + * Returns an object with a nextExecution() method, which returns a positive number representing the + * amount of milliseconds to delay the next execution or a non-negative number to avoid further executions. + * @param {String} keyspace The currently logged keyspace. + * @param {String|Array} queryInfo The query, or queries in the case of batches, for which to build a plan. + * @return {{nextExecution: function}} + * @abstract + */ +SpeculativeExecutionPolicy.prototype.newPlan = function (keyspace, queryInfo) { + throw new Error('You must implement newPlan() method in the SpeculativeExecutionPolicy'); +}; + +/** + * Gets an associative array containing the policy options. + */ +SpeculativeExecutionPolicy.prototype.getOptions = function () { + return new Map(); +}; + +/** + * Creates a new instance of NoSpeculativeExecutionPolicy. + * @classdesc + * A {@link SpeculativeExecutionPolicy} that never schedules speculative executions. + * @constructor + * @extends {SpeculativeExecutionPolicy} + */ +function NoSpeculativeExecutionPolicy() { + this._plan = { + nextExecution: function () { + return -1; + } + }; +} + +util.inherits(NoSpeculativeExecutionPolicy, SpeculativeExecutionPolicy); + +NoSpeculativeExecutionPolicy.prototype.newPlan = function () { + return this._plan; +}; + + +/** + * Creates a new instance of ConstantSpeculativeExecutionPolicy. + * @classdesc + * A {@link SpeculativeExecutionPolicy} that schedules a given number of speculative executions, + * separated by a fixed delay. + * @constructor + * @param {Number} delay The delay between each speculative execution. + * @param {Number} maxSpeculativeExecutions The amount of speculative executions that should be scheduled after the + * initial execution. Must be strictly positive. + * @extends {SpeculativeExecutionPolicy} + */ +function ConstantSpeculativeExecutionPolicy(delay, maxSpeculativeExecutions) { + if (!(delay >= 0)) { + throw new errors.ArgumentError('delay must be a positive number or zero'); + } + if (!(maxSpeculativeExecutions > 0)) { + throw new errors.ArgumentError('maxSpeculativeExecutions must be a positive number'); + } + this._delay = delay; + this._maxSpeculativeExecutions = maxSpeculativeExecutions; +} + +util.inherits(ConstantSpeculativeExecutionPolicy, SpeculativeExecutionPolicy); + +ConstantSpeculativeExecutionPolicy.prototype.newPlan = function () { + let executions = 0; + const self = this; + return { + nextExecution: function () { + if (executions++ < self._maxSpeculativeExecutions) { + return self._delay; + } + return -1; + } + }; +}; + +/** + * Gets an associative array containing the policy options. + */ +ConstantSpeculativeExecutionPolicy.prototype.getOptions = function () { + return new Map([ + ['delay', this._delay ], + ['maxSpeculativeExecutions', this._maxSpeculativeExecutions ] + ]); +}; + +exports.NoSpeculativeExecutionPolicy = NoSpeculativeExecutionPolicy; +exports.SpeculativeExecutionPolicy = SpeculativeExecutionPolicy; +exports.ConstantSpeculativeExecutionPolicy = ConstantSpeculativeExecutionPolicy; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/policies/timestamp-generation.js b/node_modules/cassandra-driver/lib/policies/timestamp-generation.js new file mode 100644 index 0000000..dbae075 --- /dev/null +++ b/node_modules/cassandra-driver/lib/policies/timestamp-generation.js @@ -0,0 +1,170 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const { Long } = require('../types'); +const errors = require('../errors'); + +/** @module policies/timestampGeneration */ + +/** + * Defines the maximum date in milliseconds that can be represented in microseconds using Number ((2 ^ 53) / 1000) + * @const + * @private + */ +const _maxSafeNumberDate = 9007199254740; + +/** + * A long representing the value 1000 + * @const + * @private + */ +const _longOneThousand = Long.fromInt(1000); + +/** + * Creates a new instance of {@link TimestampGenerator}. + * @classdesc + * Generates client-side, microsecond-precision query timestamps. + *

+ * Given that Cassandra uses those timestamps to resolve conflicts, implementations should generate + * monotonically increasing timestamps for successive invocations of {@link TimestampGenerator.next()}. + *

+ * @constructor + */ +function TimestampGenerator() { + +} + +/** + * Returns the next timestamp. + *

+ * Implementors should enforce increasing monotonicity of timestamps, that is, + * a timestamp returned should always be strictly greater that any previously returned + * timestamp. + *

+ *

+ * Implementors should strive to achieve microsecond precision in the best possible way, + * which is usually largely dependent on the underlying operating system's capabilities. + *

+ * @param {Client} client The {@link Client} instance to generate timestamps to. + * @returns {Long|Number|null} the next timestamp (in microseconds). If it's equals to null, it won't be + * sent by the driver, letting the server to generate the timestamp. + * @abstract + */ +TimestampGenerator.prototype.next = function (client) { + throw new Error('next() must be implemented'); +}; + +/** + * A timestamp generator that guarantees monotonically increasing timestamps and logs warnings when timestamps + * drift in the future. + *

+ * {@link Date} has millisecond precision and client timestamps require microsecond precision. This generator + * keeps track of the last generated timestamp, and if the current time is within the same millisecond as the last, + * it fills the microsecond portion of the new timestamp with the value of an incrementing counter. + *

+ * @param {Number} [warningThreshold] Determines how far in the future timestamps are allowed to drift before a + * warning is logged, expressed in milliseconds. Default: 1000. + * @param {Number} [minLogInterval] In case of multiple log events, it determines the time separation between log + * events, expressed in milliseconds. Use 0 to disable. Default: 1000. + * @extends {TimestampGenerator} + * @constructor + */ +function MonotonicTimestampGenerator(warningThreshold, minLogInterval) { + if (warningThreshold < 0) { + throw new errors.ArgumentError('warningThreshold can not be lower than 0'); + } + this._warningThreshold = warningThreshold || 1000; + this._minLogInterval = 1000; + if (typeof minLogInterval === 'number') { + // A value under 1 will disable logging + this._minLogInterval = minLogInterval; + } + this._micros = -1; + this._lastDate = 0; + this._lastLogDate = 0; +} + +util.inherits(MonotonicTimestampGenerator, TimestampGenerator); + +/** + * Returns the current time in milliseconds since UNIX epoch + * @returns {Number} + */ +MonotonicTimestampGenerator.prototype.getDate = function () { + return Date.now(); +}; + +MonotonicTimestampGenerator.prototype.next = function (client) { + let date = this.getDate(); + let drifted = 0; + if (date > this._lastDate) { + this._micros = 0; + this._lastDate = date; + return this._generateMicroseconds(); + } + + if (date < this._lastDate) { + drifted = this._lastDate - date; + date = this._lastDate; + } + if (++this._micros === 1000) { + this._micros = 0; + if (date === this._lastDate) { + // Move date 1 millisecond into the future + date++; + drifted++; + } + } + const lastDate = this._lastDate; + this._lastDate = date; + const result = this._generateMicroseconds(); + if (drifted >= this._warningThreshold) { + // Avoid logging an unbounded amount of times within a clock-skew event or during an interval when more than 1 + // query is being issued by microsecond + const currentLogDate = Date.now(); + if (this._minLogInterval > 0 && this._lastLogDate + this._minLogInterval <= currentLogDate){ + const message = util.format( + 'Timestamp generated using current date was %d milliseconds behind the last generated timestamp (which ' + + 'millisecond portion was %d), the returned value (%s) is being artificially incremented to guarantee ' + + 'monotonicity.', + drifted, lastDate, result); + this._lastLogDate = currentLogDate; + client.log('warning', message); + } + } + return result; +}; + +/** + * @private + * @returns {Number|Long} + */ +MonotonicTimestampGenerator.prototype._generateMicroseconds = function () { + if (this._lastDate < _maxSafeNumberDate) { + // We are safe until Jun 06 2255, its faster to perform this operations on Number than on Long + // We hope to have native int64 by then :) + return this._lastDate * 1000 + this._micros; + } + return Long + .fromNumber(this._lastDate) + .multiply(_longOneThousand) + .add(Long.fromInt(this._micros)); +}; + +exports.TimestampGenerator = TimestampGenerator; +exports.MonotonicTimestampGenerator = MonotonicTimestampGenerator; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/prepare-handler.js b/node_modules/cassandra-driver/lib/prepare-handler.js new file mode 100644 index 0000000..0edaa60 --- /dev/null +++ b/node_modules/cassandra-driver/lib/prepare-handler.js @@ -0,0 +1,297 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const errors = require('./errors'); +const utils = require('./utils'); +const types = require('./types'); +const promiseUtils = require('./promise-utils'); + +/** + * Encapsulates the logic for dealing with the different prepare request and response flows, including failover when + * trying to prepare a query. + */ +class PrepareHandler { + /** + * Creates a new instance of PrepareHandler + * @param {Client} client + * @param {LoadBalancingPolicy} loadBalancing + */ + constructor(client, loadBalancing) { + this._client = client; + this._loadBalancing = loadBalancing; + this.logEmitter = client.options.logEmitter; + this.log = utils.log; + } + + /** + * Gets the query id and metadata for a prepared statement, preparing it on + * single host or on all hosts depending on the options. + * @param {Client} client + * @param {LoadBalancingPolicy} loadBalancing + * @param {String} query + * @param {String} keyspace + * @returns {Promise<{queryId, meta}>} + * @static + */ + static async getPrepared(client, loadBalancing, query, keyspace) { + const info = client.metadata.getPreparedInfo(keyspace, query); + if (info.queryId) { + return info; + } + + if (info.preparing) { + // It's already being prepared + return await promiseUtils.fromEvent(info, 'prepared'); + } + + const instance = new PrepareHandler(client, loadBalancing); + return await instance._prepare(info, query, keyspace); + } + + /** + * @param {Client} client + * @param {LoadBalancingPolicy} loadBalancing + * @param {Array} queries + * @param {String} keyspace + * @static + */ + static async getPreparedMultiple(client, loadBalancing, queries, keyspace) { + const result = []; + + for (const item of queries) { + let query; + + if (item) { + query = typeof item === 'string' ? item : item.query; + } + + if (typeof query !== 'string') { + throw new errors.ArgumentError('Query item should be a string'); + } + + const { queryId, meta } = await PrepareHandler.getPrepared(client, loadBalancing, query, keyspace); + result.push({ query, params: utils.adaptNamedParamsPrepared(item.params, meta.columns), queryId, meta }); + } + + return result; + } + + /** + * Prepares the query on a single host or on all hosts depending on the options. + * Uses the info 'prepared' event to emit the result. + * @param {Object} info + * @param {String} query + * @param {String} keyspace + * @returns {Promise<{queryId, meta}>} + */ + async _prepare(info, query, keyspace) { + info.preparing = true; + let iterator; + + try { + iterator = await promiseUtils.newQueryPlan(this._loadBalancing, keyspace, null); + return await this._prepareWithQueryPlan(info, iterator, query, keyspace); + } catch (err) { + info.preparing = false; + err.query = query; + info.emit('prepared', err); + + throw err; + } + } + + /** + * Uses the query plan to prepare the query on the first host and optionally on the rest of the hosts. + * @param {Object} info + * @param {Iterator} iterator + * @param {String} query + * @param {String} keyspace + * @returns {Promise<{queryId, meta}>} + * @private + */ + async _prepareWithQueryPlan(info, iterator, query, keyspace) { + const triedHosts = {}; + + while (true) { + const host = PrepareHandler.getNextHost(iterator, this._client.profileManager, triedHosts); + + if (host === null) { + throw new errors.NoHostAvailableError(triedHosts); + } + + try { + const connection = await PrepareHandler._borrowWithKeyspace(host, keyspace); + const response = await connection.prepareOnceAsync(query, keyspace); + + if (this._client.options.prepareOnAllHosts) { + await this._prepareOnAllHosts(iterator, query, keyspace); + } + + // Set the prepared metadata + info.preparing = false; + info.queryId = response.id; + info.meta = response.meta; + this._client.metadata.setPreparedById(info); + info.emit('prepared', null, info); + + return info; + + } catch (err) { + triedHosts[host.address] = err; + + if (!err.isSocketError && !(err instanceof errors.OperationTimedOutError)) { + // There's no point in retrying syntax errors and other response errors + throw err; + } + } + } + } + + /** + * Gets the next host from the query plan. + * @param {Iterator} iterator + * @param {ProfileManager} profileManager + * @param {Object} [triedHosts] + * @return {Host|null} + */ + static getNextHost(iterator, profileManager, triedHosts) { + let host; + // Get a host that is UP in a sync loop + while (true) { + const item = iterator.next(); + if (item.done) { + return null; + } + + host = item.value; + + // set the distance relative to the client first + const distance = profileManager.getDistance(host); + if (distance === types.distance.ignored) { + //If its marked as ignore by the load balancing policy, move on. + continue; + } + + if (host.isUp()) { + break; + } + + if (triedHosts) { + triedHosts[host.address] = 'Host considered as DOWN'; + } + } + + return host; + } + + /** + * Prepares all queries on a single host. + * @param {Host} host + * @param {Array} allPrepared + */ + static async prepareAllQueries(host, allPrepared) { + const anyKeyspaceQueries = []; + + const queriesByKeyspace = new Map(); + allPrepared.forEach(info => { + let arr; + if (info.keyspace) { + arr = queriesByKeyspace.get(info.keyspace); + + if (!arr) { + arr = []; + queriesByKeyspace.set(info.keyspace, arr); + } + } else { + arr = anyKeyspaceQueries; + } + + arr.push(info.query); + }); + + for (const [keyspace, queries] of queriesByKeyspace) { + await PrepareHandler._borrowAndPrepare(host, keyspace, queries); + } + + await PrepareHandler._borrowAndPrepare(host, null, anyKeyspaceQueries); + } + + /** + * Borrows a connection from the host and prepares the queries provided. + * @param {Host} host + * @param {String} keyspace + * @param {Array} queries + * @returns {Promise} + * @private + */ + static async _borrowAndPrepare(host, keyspace, queries) { + if (queries.length === 0) { + return; + } + + const connection = await PrepareHandler._borrowWithKeyspace(host, keyspace); + + for (const query of queries) { + await connection.prepareOnceAsync(query, keyspace); + } + } + + /** + * Borrows a connection and changes the active keyspace on the connection, if needed. + * It does not perform any retry or error handling. + * @param {Host!} host + * @param {string} keyspace + * @returns {Promise} + * @throws {errors.BusyConnectionError} When the connection is busy. + * @throws {errors.ResponseError} For invalid keyspaces. + * @throws {Error} For socket errors. + * @private + */ + static async _borrowWithKeyspace(host, keyspace) { + const connection = host.borrowConnection(); + + if (keyspace && connection.keyspace !== keyspace) { + await connection.changeKeyspace(keyspace); + } + + return connection; + } + + /** + * Prepares the provided query on all hosts, except the host provided. + * @param {Iterator} iterator + * @param {String} query + * @param {String} keyspace + * @private + */ + _prepareOnAllHosts(iterator, query, keyspace) { + const queries = [ query ]; + let h; + const hosts = []; + + while ((h = PrepareHandler.getNextHost(iterator, this._client.profileManager)) !== null) { + hosts.push(h); + } + + return Promise.all(hosts.map(h => + PrepareHandler + ._borrowAndPrepare(h, keyspace, queries) + .catch(err => this.log('verbose', `Unexpected error while preparing query (${query}) on ${h.address}`, err)))); + } +} + +module.exports = PrepareHandler; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/promise-utils.js b/node_modules/cassandra-driver/lib/promise-utils.js new file mode 100644 index 0000000..1fc5680 --- /dev/null +++ b/node_modules/cassandra-driver/lib/promise-utils.js @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * Creates a non-clearable timer that resolves the promise once elapses. + * @param {number} ms + * @returns {Promise} + */ +function delay(ms) { + return new Promise(r => setTimeout(r, ms || 0)); +} + +/** + * Creates a Promise that gets resolved or rejected based on an event. + * @param {object} emitter + * @param {string} eventName + * @returns {Promise} + */ +function fromEvent(emitter, eventName) { + return new Promise((resolve, reject) => + emitter.once(eventName, (err, result) => { + if (err) { + reject(err); + } else { + resolve(result); + } + })); +} + +/** + * Creates a Promise from a callback based function. + * @param {Function} fn + * @returns {Promise} + */ +function fromCallback(fn) { + return new Promise((resolve, reject) => + fn((err, result) => { + if (err) { + reject(err); + } else { + resolve(result); + } + })); +} + +/** + * Gets a function that has the signature of a callback that invokes the appropriate promise handler parameters. + * @param {Function} resolve + * @param {Function} reject + * @returns {Function} + */ +function getCallback(resolve, reject) { + return function (err, result) { + if (err) { + reject(err); + } else { + resolve(result); + } + }; +} + +async function invokeSequentially(info, length, fn) { + let index; + while ((index = info.counter++) < length) { + await fn(index); + } +} + +/** + * Invokes the new query plan of the load balancing policy and returns a Promise. + * @param {LoadBalancingPolicy} lbp The load balancing policy. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @returns {Promise} + */ +function newQueryPlan(lbp, keyspace, executionOptions) { + return new Promise((resolve, reject) => { + lbp.newQueryPlan(keyspace, executionOptions, (err, iterator) => { + if (err) { + reject(err); + } else { + resolve(iterator); + } + }); + }); +} + +/** + * Method that handles optional callbacks (dual promise and callback support). + * When callback is undefined it returns the promise. + * When using a callback, it will use it as handlers of the continuation of the promise. + * @param {Promise} promise + * @param {Function?} callback + * @returns {Promise|undefined} + */ +function optionalCallback(promise, callback) { + if (!callback) { + return promise; + } + + toCallback(promise, callback); +} + +/** + * Invokes the provided function multiple times, considering the concurrency level limit. + * @param {Number} count + * @param {Number} limit + * @param {Function} fn + * @returns {Promise} + */ +function times(count, limit, fn) { + if (limit > count) { + limit = count; + } + + const promises = new Array(limit); + + const info = { + counter: 0 + }; + + for (let i = 0; i < limit; i++) { + promises[i] = invokeSequentially(info, count, fn); + } + + return Promise.all(promises); +} + +/** + * Deals with unexpected rejections in order to avoid the unhandled promise rejection warning or failure. + * @param {Promise} promise + * @returns {undefined} + */ +function toBackground(promise) { + promise.catch(() => {}); +} + +/** + * Invokes the callback once outside the promise chain the promise is resolved or rejected. + * @param {Promise} promise + * @param {Function?} callback + * @returns {undefined} + */ +function toCallback(promise, callback) { + promise + .then( + result => process.nextTick(() => callback(null, result)), + // Avoid marking the promise as rejected + err => process.nextTick(() => callback(err))); +} + +module.exports = { + delay, + fromCallback, + fromEvent, + getCallback, + newQueryPlan, + optionalCallback, + times, + toBackground, + toCallback +}; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/readers.js b/node_modules/cassandra-driver/lib/readers.js new file mode 100644 index 0000000..57f0e0f --- /dev/null +++ b/node_modules/cassandra-driver/lib/readers.js @@ -0,0 +1,542 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const util = require('util'); +const utils = require('./utils'); +const types = require('./types'); +const errors = require('./errors'); + +/** + * Information on the formatting of the returned rows + */ +const resultFlag = { + globalTablesSpec: 0x0001, + hasMorePages: 0x0002, + noMetadata: 0x0004, + metadataChanged: 0x0008, + continuousPaging: 0x40000000, + lastContinuousPage: 0x80000000, +}; + +// templates for derived error messages. +const _writeTimeoutQueryMessage = 'Server timeout during write query at consistency %s (%d peer(s) acknowledged the write over %d required)'; +const _writeTimeoutBatchLogMessage = 'Server timeout during batchlog write at consistency %s (%d peer(s) acknowledged the write over %d required)'; +const _writeFailureMessage = 'Server failure during write query at consistency %s (%d responses were required but only %d replicas responded, %d failed)'; +const _unavailableMessage = 'Not enough replicas available for query at consistency %s (%d required but only %d alive)'; +const _readTimeoutMessage = 'Server timeout during read query at consistency %s (%s)'; +const _readFailureMessage = 'Server failure during read query at consistency %s (%d responses were required but only %d replicas responded, %d failed)'; + +/** + * Buffer forward reader of CQL binary frames + * @param {FrameHeader} header + * @param {Buffer} body + * @param {Number} [offset] + */ +class FrameReader { + + /** + * Creates a new instance of the reader + * @param {FrameHeader} header + * @param {Buffer} body + * @param {Number} [offset] + */ + constructor(header, body, offset) { + this.header = header; + this.opcode = header.opcode; + this.offset = offset || 0; + this.buf = body; + } + + remainingLength() { + return this.buf.length - this.offset; + } + + getBuffer() { + return this.buf; + } + + /** + * Slices the underlining buffer + * @param {Number} begin + * @param {Number} [end] + * @returns {Buffer} + */ + slice(begin, end) { + if (typeof end === 'undefined') { + end = this.buf.length; + } + return this.buf.slice(begin, end); + } + + /** + * Modifies the underlying buffer, it concatenates the given buffer with the original (internalBuffer = concat(bytes, internalBuffer) + */ + unshift(bytes) { + if (this.offset > 0) { + throw new Error('Can not modify the underlying buffer if already read'); + } + this.buf = Buffer.concat([bytes, this.buf], bytes.length + this.buf.length); + } + + /** + * Reads any number of bytes and moves the offset. + * if length not provided or it's larger than the remaining bytes, reads to end. + * @param length + * @returns {Buffer} + */ + read(length) { + let end = this.buf.length; + if (typeof length !== 'undefined' && this.offset + length < this.buf.length) { + end = this.offset + length; + } + const bytes = this.slice(this.offset, end); + this.offset = end; + return bytes; + } + + /** + * Moves the reader cursor to the end + */ + toEnd() { + this.offset = this.buf.length; + } + + /** + * Reads a BE Int and moves the offset + * @returns {Number} + */ + readInt() { + const result = this.buf.readInt32BE(this.offset); + this.offset += 4; + return result; + } + + /** @returns {Number} */ + readShort() { + const result = this.buf.readUInt16BE(this.offset); + this.offset += 2; + return result; + } + + readByte() { + const result = this.buf.readUInt8(this.offset); + this.offset += 1; + return result; + } + + readString() { + const length = this.readShort(); + this.checkOffset(length); + const result = this.buf.toString('utf8', this.offset, this.offset + length); + this.offset += length; + return result; + } + + /** + * Checks that the new length to read is within the range of the buffer length. Throws a RangeError if not. + * @param {Number} newLength + */ + checkOffset(newLength) { + if (this.offset + newLength > this.buf.length) { + const err = new RangeError('Trying to access beyond buffer length'); + err.expectedLength = newLength; + throw err; + } + } + + /** + * Reads a protocol string list + * @returns {Array} + */ + readStringList() { + const length = this.readShort(); + const list = new Array(length); + for (let i = 0; i < length; i++) { + list[i] = this.readString(); + } + return list; + } + + /** + * Reads the amount of bytes that the field has and returns them (slicing them). + * @returns {Buffer} + */ + readBytes() { + const length = this.readInt(); + if (length < 0) { + return null; + } + this.checkOffset(length); + return this.read(length); + } + + readShortBytes() { + const length = this.readShort(); + if (length < 0) { + return null; + } + this.checkOffset(length); + return this.read(length); + } + + /** + * Reads an associative array of strings as keys and bytes as values + * @param {Number} length + * @param {Function} keyFn + * @param {Function} valueFn + * @returns {Object} + */ + readMap(length, keyFn, valueFn) { + if (length < 0) { + return null; + } + const map = {}; + for (let i = 0; i < length; i++) { + map[keyFn.call(this)] = valueFn.call(this); + } + return map; + } + + /** + * Reads an associative array of strings as keys and string lists as values + * @returns {Object} + */ + readStringMultiMap() { + //A [short] n, followed by n pair where is a + //[string] and is a [string[]]. + const length = this.readShort(); + if (length < 0) { + return null; + } + const map = {}; + for (let i = 0; i < length; i++) { + map[this.readString()] = this.readStringList(); + } + return map; + } + + /** + * Reads a data type definition + * @returns {{code: Number, info: Object|null}} An array of 2 elements + */ + readType() { + let i; + const type = { + code: this.readShort(), + type: null + }; + switch (type.code) { + case types.dataTypes.custom: + type.info = this.readString(); + break; + case types.dataTypes.list: + case types.dataTypes.set: + type.info = this.readType(); + break; + case types.dataTypes.map: + type.info = [this.readType(), this.readType()]; + break; + case types.dataTypes.udt: + type.info = { + keyspace: this.readString(), + name: this.readString(), + fields: new Array(this.readShort()) + }; + for (i = 0; i < type.info.fields.length; i++) { + type.info.fields[i] = { + name: this.readString(), + type: this.readType() + }; + } + break; + case types.dataTypes.tuple: + type.info = new Array(this.readShort()); + for (i = 0; i < type.info.length; i++) { + type.info[i] = this.readType(); + } + break; + } + return type; + } + + /** + * Reads an Ip address and port + * @returns {{address: exports.InetAddress, port: Number}} + */ + readInet() { + const length = this.readByte(); + const address = this.read(length); + return { address: new types.InetAddress(address), port: this.readInt() }; + } + + /** + * Reads an Ip address + * @returns {InetAddress} + */ + readInetAddress() { + const length = this.readByte(); + return new types.InetAddress(this.read(length)); + } + + /** + * Reads the body bytes corresponding to the flags + * @returns {{traceId: Uuid, warnings: Array, customPayload}} + * @throws {RangeError} + */ + readFlagsInfo() { + if (this.header.flags === 0) { + return utils.emptyObject; + } + const result = {}; + if (this.header.flags & types.frameFlags.tracing) { + this.checkOffset(16); + result.traceId = new types.Uuid(utils.copyBuffer(this.read(16))); + } + if (this.header.flags & types.frameFlags.warning) { + result.warnings = this.readStringList(); + } + if (this.header.flags & types.frameFlags.customPayload) { + // Custom payload is a Map + result.customPayload = this.readMap(this.readShort(), this.readString, this.readBytes); + } + return result; + } + + /** + * Reads the metadata from a row or a prepared result response + * @param {Number} kind + * @returns {Object} + * @throws {RangeError} + */ + readMetadata(kind) { + let i; + //Determines if its a prepared metadata + const isPrepared = (kind === types.resultKind.prepared); + const meta = {}; + if (types.protocolVersion.supportsResultMetadataId(this.header.version) && isPrepared) { + meta.resultId = utils.copyBuffer(this.readShortBytes()); + } + //as used in Rows and Prepared responses + const flags = this.readInt(); + const columnLength = this.readInt(); + if (types.protocolVersion.supportsPreparedPartitionKey(this.header.version) && isPrepared) { + //read the pk columns + meta.partitionKeys = new Array(this.readInt()); + for (i = 0; i < meta.partitionKeys.length; i++) { + meta.partitionKeys[i] = this.readShort(); + } + } + if (flags & resultFlag.hasMorePages) { + meta.pageState = utils.copyBuffer(this.readBytes()); + } + if (flags & resultFlag.metadataChanged) { + meta.newResultId = utils.copyBuffer(this.readShortBytes()); + } + if (flags & resultFlag.continuousPaging) { + meta.continuousPageIndex = this.readInt(); + meta.lastContinuousPage = !!(flags & resultFlag.lastContinuousPage); + } + if (flags & resultFlag.globalTablesSpec) { + meta.global_tables_spec = true; + meta.keyspace = this.readString(); + meta.table = this.readString(); + } + meta.columns = new Array(columnLength); + meta.columnsByName = utils.emptyObject; + if (isPrepared) { + //for prepared metadata, we will need a index of the columns (param) by name + meta.columnsByName = {}; + } + for (i = 0; i < columnLength; i++) { + const col = {}; + if (!meta.global_tables_spec) { + col.ksname = this.readString(); + col.tablename = this.readString(); + } + col.name = this.readString(); + col.type = this.readType(); + meta.columns[i] = col; + if (isPrepared) { + meta.columnsByName[col.name] = i; + } + } + return meta; + } + + /** + * Reads the error from the frame + * @throws {RangeError} + * @returns {ResponseError} + */ + readError() { + const code = this.readInt(); + const message = this.readString(); + const err = new errors.ResponseError(code, message); + //read extra info + switch (code) { + case types.responseErrorCodes.unavailableException: + err.consistencies = this.readShort(); + err.required = this.readInt(); + err.alive = this.readInt(); + err.message = util.format(_unavailableMessage, types.consistencyToString[err.consistencies], err.required, err.alive); + break; + case types.responseErrorCodes.readTimeout: + case types.responseErrorCodes.readFailure: + err.consistencies = this.readShort(); + err.received = this.readInt(); + err.blockFor = this.readInt(); + if (code === types.responseErrorCodes.readFailure) { + if (types.protocolVersion.supportsFailureReasonMap(this.header.version)) { + err.failures = this.readInt(); + err.reasons = this.readMap(err.failures, this.readInetAddress, this.readShort); + } + else { + err.failures = this.readInt(); + } + } + err.isDataPresent = this.readByte(); + if (code === types.responseErrorCodes.readTimeout) { + let details; + if (err.received < err.blockFor) { + details = util.format('%d replica(s) responded over %d required', err.received, err.blockFor); + } + else if (!err.isDataPresent) { + details = 'the replica queried for the data didn\'t respond'; + } + else { + details = 'timeout while waiting for repair of inconsistent replica'; + } + err.message = util.format(_readTimeoutMessage, types.consistencyToString[err.consistencies], details); + } + else { + err.message = util.format(_readFailureMessage, types.consistencyToString[err.consistencies], err.blockFor, err.received, err.failures); + } + break; + case types.responseErrorCodes.writeTimeout: + case types.responseErrorCodes.writeFailure: + err.consistencies = this.readShort(); + err.received = this.readInt(); + err.blockFor = this.readInt(); + if (code === types.responseErrorCodes.writeFailure) { + if (types.protocolVersion.supportsFailureReasonMap(this.header.version)) { + err.failures = this.readInt(); + err.reasons = this.readMap(err.failures, this.readInetAddress, this.readShort); + } + else { + err.failures = this.readInt(); + } + } + err.writeType = this.readString(); + if (code === types.responseErrorCodes.writeTimeout) { + const template = err.writeType === 'BATCH_LOG' ? _writeTimeoutBatchLogMessage : _writeTimeoutQueryMessage; + err.message = util.format(template, types.consistencyToString[err.consistencies], err.received, err.blockFor); + } + else { + err.message = util.format(_writeFailureMessage, types.consistencyToString[err.consistencies], err.blockFor, err.received, err.failures); + } + break; + case types.responseErrorCodes.unprepared: + err.queryId = utils.copyBuffer(this.readShortBytes()); + break; + case types.responseErrorCodes.functionFailure: + err.keyspace = this.readString(); + err.functionName = this.readString(); + err.argTypes = this.readStringList(); + break; + case types.responseErrorCodes.alreadyExists: { + err.keyspace = this.readString(); + const table = this.readString(); + if (table.length > 0) { + err.table = table; + } + break; + } + } + return err; + } + + /** + * Reads an event from Cassandra and returns the detail + * @returns {{eventType: String, inet: {address: Buffer, port: Number}}, *} + */ + readEvent() { + const eventType = this.readString(); + switch (eventType) { + case types.protocolEvents.topologyChange: + return { + added: this.readString() === 'NEW_NODE', + inet: this.readInet(), + eventType: eventType + }; + case types.protocolEvents.statusChange: + return { + up: this.readString() === 'UP', + inet: this.readInet(), + eventType: eventType + }; + case types.protocolEvents.schemaChange: + return this.parseSchemaChange(); + } + //Forward compatibility + return { eventType: eventType }; + } + + parseSchemaChange() { + let result; + if (!types.protocolVersion.supportsSchemaChangeFullMetadata(this.header.version)) { + //v1/v2: 3 strings, the table value can be empty + result = { + eventType: types.protocolEvents.schemaChange, + schemaChangeType: this.readString(), + keyspace: this.readString(), + table: this.readString() + }; + result.isKeyspace = !result.table; + return result; + } + //v3+: 3 or 4 strings: change_type, target, keyspace and (table, type, functionName or aggregate) + result = { + eventType: types.protocolEvents.schemaChange, + schemaChangeType: this.readString(), + target: this.readString(), + keyspace: this.readString(), + table: null, + udt: null, + signature: null + }; + result.isKeyspace = result.target === 'KEYSPACE'; + switch (result.target) { + case 'TABLE': + result.table = this.readString(); + break; + case 'TYPE': + result.udt = this.readString(); + break; + case 'FUNCTION': + result.functionName = this.readString(); + result.signature = this.readStringList(); + break; + case 'AGGREGATE': + result.aggregate = this.readString(); + result.signature = this.readStringList(); + } + return result; + } +} + +module.exports = { FrameReader }; diff --git a/node_modules/cassandra-driver/lib/request-execution.js b/node_modules/cassandra-driver/lib/request-execution.js new file mode 100644 index 0000000..00cd9a5 --- /dev/null +++ b/node_modules/cassandra-driver/lib/request-execution.js @@ -0,0 +1,497 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const errors = require('./errors'); +const requests = require('./requests'); +const retry = require('./policies/retry'); +const types = require('./types'); +const utils = require('./utils'); +const promiseUtils = require('./promise-utils'); + +const retryOnCurrentHost = Object.freeze({ + decision: retry.RetryPolicy.retryDecision.retry, + useCurrentHost: true, + consistency: undefined +}); + +const rethrowDecision = Object.freeze({ decision: retry.RetryPolicy.retryDecision.rethrow }); + +/** + * An internal representation of an error that occurred during the execution of a request. + */ +const errorCodes = { + none: 0, + // Socket error + socketError: 1, + // Socket error before the request was written to the wire + socketErrorBeforeRequestWritten: 2, + // OperationTimedOutError + clientTimeout: 3, + // Response error "unprepared" + serverErrorUnprepared: 4, + // Response error "overloaded", "is_bootstrapping" and "truncateError": + serverErrorOverloaded: 5, + serverErrorReadTimeout: 6, + serverErrorUnavailable: 7, + serverErrorWriteTimeout: 8, + // Any other server error (different from the ones detailed above) + serverErrorOther: 9 +}; + +const metricsHandlers = new Map([ + [ errorCodes.none, (metrics, err, latency) => metrics.onSuccessfulResponse(latency) ], + [ errorCodes.socketError, (metrics, err) => metrics.onConnectionError(err) ], + [ errorCodes.clientTimeout, (metrics, err) => metrics.onClientTimeoutError(err) ], + [ errorCodes.serverErrorOverloaded, (metrics, err) => metrics.onOtherError(err) ], + [ errorCodes.serverErrorReadTimeout, (metrics, err) => metrics.onReadTimeoutError(err) ], + [ errorCodes.serverErrorUnavailable, (metrics, err) => metrics.onUnavailableError(err) ], + [ errorCodes.serverErrorWriteTimeout, (metrics, err) => metrics.onWriteTimeoutError(err) ], + [ errorCodes.serverErrorOther, (metrics, err) => metrics.onOtherError(err) ] +]); + +const metricsRetryHandlers = new Map([ + [ errorCodes.socketError, (metrics, err) => metrics.onOtherErrorRetry(err) ], + [ errorCodes.clientTimeout, (metrics, err) => metrics.onClientTimeoutRetry(err) ], + [ errorCodes.serverErrorOverloaded, (metrics, err) => metrics.onOtherErrorRetry(err) ], + [ errorCodes.serverErrorReadTimeout, (metrics, err) => metrics.onReadTimeoutRetry(err) ], + [ errorCodes.serverErrorUnavailable, (metrics, err) => metrics.onUnavailableRetry(err) ], + [ errorCodes.serverErrorWriteTimeout, (metrics, err) => metrics.onWriteTimeoutRetry(err) ], + [ errorCodes.serverErrorOther, (metrics, err) => metrics.onOtherErrorRetry(err) ] +]); + +class RequestExecution { + /** + * Encapsulates a single flow of execution against a coordinator, handling individual retries and failover. + * @param {RequestHandler!} parent + * @param {Host!} host + * @param {Connection!} connection + */ + constructor(parent, host, connection) { + this._parent = parent; + /** @type {OperationState} */ + this._operation = null; + this._host = host; + this._connection = connection; + this._cancelled = false; + this._startTime = null; + this._retryCount = 0; + // The streamId information is not included in the request. + // A pointer to the parent request can be used, except when changing the consistency level from the retry policy + this._request = this._parent.request; + + // Mark that it launched a new execution + parent.speculativeExecutions++; + } + + /** + * Sends the request using the active connection. + */ + start() { + this._sendOnConnection(); + } + + /** + * Borrows the next connection available using the query plan and sends the request. + * @returns {Promise} + */ + async restart() { + try { + const { host, connection } = this._parent.getNextConnection(); + + this._connection = connection; + this._host = host; + } catch (err) { + return this._parent.handleNoHostAvailable(err, this); + } + + // It could be a new connection from the pool, we should make sure it's in the correct keyspace. + const keyspace = this._parent.client.keyspace; + if (keyspace && keyspace !== this._connection.keyspace) { + try { + await this._connection.changeKeyspace(keyspace); + } catch (err) { + // When its a socket error, attempt to retry. + // Otherwise, rethrow the error to the user. + return this._handleError(err, RequestExecution._getErrorCode(err)); + } + } + + if (this._cancelled) { + // No need to send the request or invoke any callback + return; + } + + this._sendOnConnection(); + } + + /** + * Sends the request using the active connection. + * @private + */ + _sendOnConnection() { + this._startTime = process.hrtime(); + + this._operation = + this._connection.sendStream(this._request, this._parent.executionOptions, (err, response, length) => { + const errorCode = RequestExecution._getErrorCode(err); + + this._trackResponse(process.hrtime(this._startTime), errorCode, err, length); + + if (this._cancelled) { + // Avoid handling the response / err + return; + } + + if (errorCode !== errorCodes.none) { + return this._handleError(errorCode, err); + } + + if (response.schemaChange) { + return promiseUtils.toBackground( + this._parent.client + .handleSchemaAgreementAndRefresh(this._connection, response.schemaChange) + .then(agreement => { + if (this._cancelled) { + // After the schema agreement method was started, this execution was cancelled + return; + } + + this._parent.setCompleted(null, this._getResultSet(response, agreement)); + }) + ); + } + + if (response.keyspaceSet) { + this._parent.client.keyspace = response.keyspaceSet; + } + + if (response.meta && response.meta.newResultId && this._request.queryId) { + // Update the resultId on the existing prepared statement. + // Eventually would want to update the result metadata as well (NODEJS-433) + const info = this._parent.client.metadata.getPreparedById(this._request.queryId); + info.meta.resultId = response.meta.newResultId; + } + + this._parent.setCompleted(null, this._getResultSet(response)); + }); + } + + _trackResponse(latency, errorCode, err, length) { + // Record metrics + RequestExecution._invokeMetricsHandler(errorCode, this._parent.client.metrics, err, latency); + + // Request tracker + const tracker = this._parent.client.options.requestTracker; + + if (tracker === null) { + return; + } + + // Avoid using instanceof as property check is faster + const query = this._request.query || this._request.queries; + const parameters = this._request.params; + const requestLength = this._request.length; + + if (err) { + tracker.onError(this._host, query, parameters, this._parent.executionOptions, requestLength, err, latency); + } else { + tracker.onSuccess(this._host, query, parameters, this._parent.executionOptions, requestLength, length, latency); + } + } + + _getResultSet(response, agreement) { + const rs = new types.ResultSet(response, this._host.address, this._parent.triedHosts, this._parent.speculativeExecutions, + this._request.consistency, agreement === undefined || agreement); + + if (rs.rawPageState) { + rs.nextPageAsync = this._parent.getNextPageHandler(); + } + + return rs; + } + + /** + * Gets the method of the {ClientMetrics} instance depending on the error code and invokes it. + * @param {Number} errorCode + * @param {ClientMetrics} metrics + * @param {Error} err + * @param {Array} latency + * @private + */ + static _invokeMetricsHandler(errorCode, metrics, err, latency) { + const handler = metricsHandlers.get(errorCode); + if (handler !== undefined) { + handler(metrics, err, latency); + } + + if (!err || err instanceof errors.ResponseError) { + metrics.onResponse(latency); + } + } + + /** + * Gets the method of the {ClientMetrics} instance related to retry depending on the error code and invokes it. + * @param {Number} errorCode + * @param {ClientMetrics} metrics + * @param {Error} err + * @private + */ + static _invokeMetricsHandlerForRetry(errorCode, metrics, err) { + const handler = metricsRetryHandlers.get(errorCode); + + if (handler !== undefined) { + handler(metrics, err); + } + } + + /** + * Allows the handler to cancel the current request. + * When the request has been already written, we can unset the callback and forget about it. + */ + cancel() { + this._cancelled = true; + + if (this._operation === null) { + return; + } + + this._operation.cancel(); + } + + /** + * Determines if the current execution was cancelled. + */ + wasCancelled() { + return this._cancelled; + } + + _handleError(errorCode, err) { + this._parent.triedHosts[this._host.address] = err; + err['coordinator'] = this._host.address; + + if (errorCode === errorCodes.serverErrorUnprepared) { + return this._prepareAndRetry(err.queryId); + } + + if (errorCode === errorCodes.socketError || errorCode === errorCodes.socketErrorBeforeRequestWritten) { + this._host.removeFromPool(this._connection); + } else if (errorCode === errorCodes.clientTimeout) { + this._parent.log('warning', err.message); + this._host.checkHealth(this._connection); + } + + const decisionInfo = this._getDecision(errorCode, err); + + if (!decisionInfo || decisionInfo.decision === retry.RetryPolicy.retryDecision.rethrow) { + if (this._request instanceof requests.QueryRequest || this._request instanceof requests.ExecuteRequest) { + err['query'] = this._request.query; + } + return this._parent.setCompleted(err); + } + + const metrics = this._parent.client.metrics; + + if (decisionInfo.decision === retry.RetryPolicy.retryDecision.ignore) { + metrics.onIgnoreError(err); + + // Return an empty ResultSet + return this._parent.setCompleted(null, this._getResultSet(utils.emptyObject)); + } + + RequestExecution._invokeMetricsHandlerForRetry(errorCode, metrics, err); + + return this._retry(decisionInfo.consistency, decisionInfo.useCurrentHost); + } + + /** + * Gets a decision whether or not to retry based on the error information. + * @param {Number} errorCode + * @param {Error} err + * @returns {{decision, useCurrentHost, consistency}} + */ + _getDecision(errorCode, err) { + const operationInfo = { + query: this._request && this._request.query, + executionOptions: this._parent.executionOptions, + nbRetry: this._retryCount + }; + + const retryPolicy = operationInfo.executionOptions.getRetryPolicy(); + + switch (errorCode) { + case errorCodes.socketErrorBeforeRequestWritten: + // The request was definitely not applied, it's safe to retry. + // Retry on the current host as there might be other connections open, in case it fails to obtain a connection + // on the current host, the driver will immediately retry on the next host. + return retryOnCurrentHost; + case errorCodes.socketError: + case errorCodes.clientTimeout: + case errorCodes.serverErrorOverloaded: + if (operationInfo.executionOptions.isIdempotent()) { + return retryPolicy.onRequestError(operationInfo, this._request.consistency, err); + } + return rethrowDecision; + case errorCodes.serverErrorUnavailable: + return retryPolicy.onUnavailable(operationInfo, err.consistencies, err.required, err.alive); + case errorCodes.serverErrorReadTimeout: + return retryPolicy.onReadTimeout( + operationInfo, err.consistencies, err.received, err.blockFor, err.isDataPresent); + case errorCodes.serverErrorWriteTimeout: + if (operationInfo.executionOptions.isIdempotent()) { + return retryPolicy.onWriteTimeout( + operationInfo, err.consistencies, err.received, err.blockFor, err.writeType); + } + return rethrowDecision; + default: + return rethrowDecision; + } + } + + static _getErrorCode(err) { + if (!err) { + return errorCodes.none; + } + + if (err.isSocketError) { + if (err.requestNotWritten) { + return errorCodes.socketErrorBeforeRequestWritten; + } + return errorCodes.socketError; + } + + if (err instanceof errors.OperationTimedOutError) { + return errorCodes.clientTimeout; + } + + if (err instanceof errors.ResponseError) { + switch (err.code) { + case types.responseErrorCodes.overloaded: + case types.responseErrorCodes.isBootstrapping: + case types.responseErrorCodes.truncateError: + return errorCodes.serverErrorOverloaded; + case types.responseErrorCodes.unavailableException: + return errorCodes.serverErrorUnavailable; + case types.responseErrorCodes.readTimeout: + return errorCodes.serverErrorReadTimeout; + case types.responseErrorCodes.writeTimeout: + return errorCodes.serverErrorWriteTimeout; + case types.responseErrorCodes.unprepared: + return errorCodes.serverErrorUnprepared; + } + } + + return errorCodes.serverErrorOther; + } + + /** + * @param {Number|undefined} consistency + * @param {Boolean} useCurrentHost + * @param {Object} [meta] + * @private + */ + _retry(consistency, useCurrentHost, meta) { + if (this._cancelled) { + // No point in retrying + return; + } + + this._parent.log('info', 'Retrying request'); + this._retryCount++; + + if (meta || (typeof consistency === 'number' && this._request.consistency !== consistency)) { + this._request = this._request.clone(); + if (typeof consistency === 'number') { + this._request.consistency = consistency; + } + // possible that we are retrying because we had to reprepare. In this case it is also possible + // that our known metadata had changed, therefore we update it on the request. + if (meta) { + this._request.meta = meta; + } + } + + if (useCurrentHost !== false) { + // Reusing the existing connection is suitable for the most common scenarios, like server read timeouts that + // will be fixed with a new request. + // To cover all scenarios (e.g., where a different connection to the same host might mean something different), + // we obtain a new connection from the host pool. + // When there was a socket error, the connection provided was already removed from the pool earlier. + try { + this._connection = this._host.borrowConnection(this._connection); + } catch (err) { + // All connections are busy (`BusyConnectionError`) or there isn't a ready connection in the pool (`Error`) + // The retry policy declared the intention to retry on the current host but its not available anymore. + // Use the next host + return promiseUtils.toBackground(this.restart()); + } + + return this._sendOnConnection(); + } + + // Use the next host in the query plan to send the request in the background + promiseUtils.toBackground(this.restart()); + } + + /** + * Issues a PREPARE request on the current connection. + * If there's a socket or timeout issue, it moves to next host and executes the original request. + * @param {Buffer} queryId + * @private + */ + _prepareAndRetry(queryId) { + const connection = this._connection; + + this._parent.log('info', + `Query 0x${queryId.toString('hex')} not prepared on` + + ` host ${connection.endpointFriendlyName}, preparing and retrying`); + + const info = this._parent.client.metadata.getPreparedById(queryId); + + if (!info) { + return this._parent.setCompleted(new errors.DriverInternalError( + `Unprepared response invalid, id: 0x${queryId.toString('hex')}`)); + } + + const version = this._connection.protocolVersion; + + if (!types.protocolVersion.supportsKeyspaceInRequest(version) && info.keyspace && info.keyspace !== connection.keyspace) { + return this._parent.setCompleted( + new Error(`Query was prepared on keyspace ${info.keyspace}, can't execute it on ${connection.keyspace} (${info.query})`)); + } + + const self = this; + this._connection.prepareOnce(info.query, info.keyspace, function (err, result) { + if (err) { + if (!err.isSocketError && err instanceof errors.OperationTimedOutError) { + self._parent.log('warning', + `Unexpected timeout error when re-preparing query on host ${connection.endpointFriendlyName}`); + } + + // There was a failure re-preparing on this connection. + // Execute the original request on the next connection and forget about the PREPARE-UNPREPARE flow. + return self._retry(undefined, false); + } + + // It's possible that when re-preparing we got new metadata (i.e. if schema changed), update cache. + info.meta = result.meta; + // pass the metadata so it can be used in retry. + self._retry(undefined, true, result.meta); + }); + } +} + +module.exports = RequestExecution; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/request-handler.js b/node_modules/cassandra-driver/lib/request-handler.js new file mode 100644 index 0000000..c1c7b6d --- /dev/null +++ b/node_modules/cassandra-driver/lib/request-handler.js @@ -0,0 +1,311 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +const errors = require('./errors'); +const types = require('./types'); +const utils = require('./utils'); +const RequestExecution = require('./request-execution'); +const promiseUtils = require('./promise-utils'); + +/** + * Handles a BATCH, QUERY and EXECUTE request to the server, dealing with host fail-over and retries on error + */ +class RequestHandler { + /** + * Creates a new instance of RequestHandler. + * @param {Request} request + * @param {ExecutionOptions} execOptions + * @param {Client} client Client instance used to retrieve and set the keyspace. + */ + constructor(request, execOptions, client) { + this.client = client; + this._speculativeExecutionPlan = client.options.policies.speculativeExecution.newPlan( + client.keyspace, request.query || request.queries); + this.logEmitter = client.options.logEmitter; + this.log = utils.log; + this.request = request; + this.executionOptions = execOptions; + this.stackContainer = null; + this.triedHosts = {}; + // start at -1 as first request does not count. + this.speculativeExecutions = -1; + this._hostIterator = null; + this._resolveCallback = null; + this._rejectCallback = null; + this._newExecutionTimeout = null; + /** @type {RequestExecution[]} */ + this._executions = []; + } + + /** + * Sends a new BATCH, QUERY or EXECUTE request. + * @param {Request} request + * @param {ExecutionOptions} execOptions + * @param {Client} client Client instance used to retrieve and set the keyspace. + * @returns {Promise} + */ + static send(request, execOptions, client) { + const instance = new RequestHandler(request, execOptions, client); + return instance.send(); + } + + /** + * Gets a connection from the next host according to the query plan or throws a NoHostAvailableError. + * @returns {{host, connection}} + * @throws {NoHostAvailableError} + */ + getNextConnection() { + let host; + let connection; + const iterator = this._hostIterator; + + // Get a host that is UP in a sync loop + while (true) { + const item = iterator.next(); + if (item.done) { + throw new errors.NoHostAvailableError(this.triedHosts); + } + + host = item.value; + + // Set the distance relative to the client first + const distance = this.client.profileManager.getDistance(host); + if (distance === types.distance.ignored) { + //If its marked as ignore by the load balancing policy, move on. + continue; + } + + if (!host.isUp()) { + this.triedHosts[host.address] = 'Host considered as DOWN'; + continue; + } + + try { + connection = host.borrowConnection(); + this.triedHosts[host.address] = null; + break; + } catch (err) { + this.triedHosts[host.address] = err; + } + } + + return { connection, host }; + } + + /** + * Gets an available connection and sends the request + * @returns {Promise} + */ + send() { + if (this.executionOptions.getCaptureStackTrace()) { + Error.captureStackTrace(this.stackContainer = {}); + } + + return new Promise((resolve, reject) => { + this._resolveCallback = resolve; + this._rejectCallback = reject; + + const lbp = this.executionOptions.getLoadBalancingPolicy(); + const fixedHost = this.executionOptions.getFixedHost(); + + if (fixedHost) { + // if host is configured bypass load balancing policy and use + // a single host plan. + this._hostIterator = utils.arrayIterator([fixedHost]); + promiseUtils.toBackground(this._startNewExecution()); + } else { + lbp.newQueryPlan(this.client.keyspace, this.executionOptions, (err, iterator) => { + if (err) { + return reject(err); + } + + this._hostIterator = iterator; + promiseUtils.toBackground(this._startNewExecution()); + }); + } + }); + } + + /** + * Starts a new execution on the next host of the query plan. + * @param {Boolean} [isSpecExec] + * @returns {Promise} + * @private + */ + async _startNewExecution(isSpecExec) { + if (isSpecExec) { + this.client.metrics.onSpeculativeExecution(); + } + + let host; + let connection; + + try { + ({ host, connection } = this.getNextConnection()); + } catch (err) { + return this.handleNoHostAvailable(err, null); + } + + if (isSpecExec && this._executions.length >= 0 && this._executions[0].wasCancelled()) { + // This method was called on the next tick and could not be cleared, the previous execution was cancelled so + // there's no point in launching a new execution. + return; + } + + if (this.client.keyspace && this.client.keyspace !== connection.keyspace) { + try { + await connection.changeKeyspace(this.client.keyspace); + } catch (err) { + this.triedHosts[host.address] = err; + // The error occurred asynchronously + // We can blindly re-try to obtain a different host/connection. + return this._startNewExecution(isSpecExec); + } + } + + const execution = new RequestExecution(this, host, connection); + this._executions.push(execution); + execution.start(); + + if (this.executionOptions.isIdempotent()) { + this._scheduleSpeculativeExecution(host); + } + } + + /** + * Schedules next speculative execution, if any. + * @param {Host!} host + * @private + */ + _scheduleSpeculativeExecution(host) { + const delay = this._speculativeExecutionPlan.nextExecution(host); + if (typeof delay !== 'number' || delay < 0) { + return; + } + + if (delay === 0) { + // Parallel speculative execution + return process.nextTick(() => { + promiseUtils.toBackground(this._startNewExecution(true)); + }); + } + + // Create timer for speculative execution + this._newExecutionTimeout = setTimeout(() => + promiseUtils.toBackground(this._startNewExecution(true)), delay); + } + + /** + * Sets the keyspace in any connection that is already opened. + * @param {Client} client + * @returns {Promise} + */ + static setKeyspace(client) { + let connection; + + for (const host of client.hosts.values()) { + connection = host.getActiveConnection(); + if (connection) { + break; + } + } + + if (!connection) { + throw new errors.DriverInternalError('No active connection found'); + } + + return connection.changeKeyspace(client.keyspace); + } + + /** + * @param {Error} err + * @param {ResultSet} [result] + */ + setCompleted(err, result) { + if (this._newExecutionTimeout !== null) { + clearTimeout(this._newExecutionTimeout); + } + + // Mark all executions as cancelled + for (const execution of this._executions) { + execution.cancel(); + } + + if (err) { + if (this.executionOptions.getCaptureStackTrace()) { + utils.fixStack(this.stackContainer.stack, err); + } + + // Reject the promise + return this._rejectCallback(err); + } + + if (result.info.warnings) { + // Log the warnings from the response + result.info.warnings.forEach(function (message, i, warnings) { + this.log('warning', util.format( + 'Received warning (%d of %d) "%s" for "%s"', + i + 1, + warnings.length, + message, + this.request.query || 'batch')); + }, this); + } + + // We used to invoke the callback on next tick to allow stack unwinding and prevent the optimizing compiler to + // optimize read and write functions together. + // As we are resolving a Promise then() and catch() are always scheduled in the microtask queue + // We can invoke the resolve method directly. + this._resolveCallback(result); + } + + /** + * @param {NoHostAvailableError} err + * @param {RequestExecution|null} execution + */ + handleNoHostAvailable(err, execution) { + if (execution !== null) { + // Remove the execution + const index = this._executions.indexOf(execution); + this._executions.splice(index, 1); + } + + if (this._executions.length === 0) { + // There aren't any other executions, we should report back to the user that there isn't + // a host available for executing the request + this.setCompleted(err); + } + } + + /** + * Gets a long lived closure that can fetch the next page. + * @returns {Function} + */ + getNextPageHandler() { + const request = this.request; + const execOptions = this.executionOptions; + const client = this.client; + + return function nextPageHandler(pageState) { + execOptions.setPageState(pageState); + return new RequestHandler(request, execOptions, client).send(); + }; + } +} + +module.exports = RequestHandler; diff --git a/node_modules/cassandra-driver/lib/requests.js b/node_modules/cassandra-driver/lib/requests.js new file mode 100644 index 0000000..443347e --- /dev/null +++ b/node_modules/cassandra-driver/lib/requests.js @@ -0,0 +1,542 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +const { FrameWriter } = require('./writers'); +const types = require('./types'); +const utils = require('./utils'); +const { ExecutionOptions } = require('./execution-options'); +const packageInfo = require('../package.json'); + +/** + * Options for the execution of the query / prepared statement + * @private + */ +const queryFlag = { + values: 0x01, + skipMetadata: 0x02, + pageSize: 0x04, + withPagingState: 0x08, + withSerialConsistency: 0x10, + withDefaultTimestamp: 0x20, + withNameForValues: 0x40, + withKeyspace: 0x80, + withPageSizeBytes: 0x40000000, + withContinuousPaging: 0x80000000 +}; + +/** + * Options for the execution of a batch request from protocol v3 and above + * @private + */ +const batchFlag = { + withSerialConsistency: 0x10, + withDefaultTimestamp: 0x20, + withNameForValues: 0x40, + withKeyspace: 0x80 +}; + +/** + * Options for execution of a prepare request from protocol DSE_V2 and above + * @private + */ +const prepareFlag = { + withKeyspace: 0x01 +}; + +const batchType = { + logged: 0, + unlogged: 1, + counter: 2 +}; + +/** + * Abstract class Request + */ +class Request { + constructor() { + this.length = 0; + } + + /** + * @abstract + * @param {Encoder} encoder + * @param {Number} streamId + * @throws {TypeError} + * @returns {Buffer} + */ + write(encoder, streamId) { + throw new Error('Method must be implemented'); + } + + /** + * Creates a new instance using the same constructor as the current instance, copying the properties. + * @return {Request} + */ + clone() { + const newRequest = new (this.constructor)(); + const keysArray = Object.keys(this); + for (let i = 0; i < keysArray.length; i++) { + const key = keysArray[i]; + newRequest[key] = this[key]; + } + return newRequest; + } +} + +/** + * Writes a execute query (given a prepared queryId) + * @param {String} query + * @param {Buffer} queryId + * @param {Array} params + * @param options + */ +class ExecuteRequest extends Request { + /** + * @param {String} query + * @param queryId + * @param params + * @param {ExecutionOptions} execOptions + * @param meta + */ + constructor(query, queryId, params, execOptions, meta) { + super(); + + this.query = query; + this.queryId = queryId; + this.params = params; + this.meta = meta; + this.options = execOptions || ExecutionOptions.empty(); + this.consistency = this.options.getConsistency() || types.consistencies.one; + // Only QUERY request parameters are encoded as named parameters + // EXECUTE request parameters are always encoded as positional parameters + this.namedParameters = false; + } + + getParamType(index) { + const columnInfo = this.meta.columns[index]; + return columnInfo ? columnInfo.type : null; + } + + write(encoder, streamId) { + //v1: + // .... + //v2: + // [...][][][] + //v3: + // [[name_1]...[name_n]][][][][] + const frameWriter = new FrameWriter(types.opcodes.execute); + let headerFlags = this.options.isQueryTracing() ? types.frameFlags.tracing : 0; + if (this.options.getCustomPayload()) { + //The body may contain the custom payload + headerFlags |= types.frameFlags.customPayload; + frameWriter.writeCustomPayload(this.options.getCustomPayload()); + } + frameWriter.writeShortBytes(this.queryId); + if(types.protocolVersion.supportsResultMetadataId(encoder.protocolVersion)) { + frameWriter.writeShortBytes(this.meta.resultId); + } + this.writeQueryParameters(frameWriter, encoder); + + // Record the length of the body of the request before writing it + this.length = frameWriter.bodyLength; + + return frameWriter.write(encoder.protocolVersion, streamId, headerFlags); + } + + /** + * Writes v1 and v2 execute query parameters + * @param {FrameWriter} frameWriter + * @param {Encoder} encoder + * @param {Boolean} [isQuery] True if query, otherwise assumed to be execute request. + */ + writeQueryParameters(frameWriter, encoder, isQuery) { + //v1: .... + //v2: [...][][][] + //v3: [[name_1]...[name_n]][][][][] + //dse_v1: [[name_1]...[name_n]][][] + // [][][continuous_paging_options] + //dse_v2: [[name_1]...[name_n]][][] + // [][][keyspace][continuous_paging_options] + let flags = 0; + + const timestamp = this.options.getOrGenerateTimestamp(); + + if (types.protocolVersion.supportsPaging(encoder.protocolVersion)) { + flags |= (this.params && this.params.length) ? queryFlag.values : 0; + flags |= (this.options.getFetchSize() > 0) ? queryFlag.pageSize : 0; + flags |= this.options.getPageState() ? queryFlag.withPagingState : 0; + flags |= this.options.getSerialConsistency() ? queryFlag.withSerialConsistency : 0; + flags |= timestamp !== null && timestamp !== undefined ? queryFlag.withDefaultTimestamp : 0; + flags |= this.namedParameters ? queryFlag.withNameForValues : 0; + + // Don't inject keyspace for EXECUTE requests as inherited from prepared statement. + const supportsKeyspace = isQuery && types.protocolVersion.supportsKeyspaceInRequest(encoder.protocolVersion); + flags |= supportsKeyspace && this.options.getKeyspace() ? queryFlag.withKeyspace : 0; + + frameWriter.writeShort(this.consistency); + if (types.protocolVersion.uses4BytesQueryFlags(encoder.protocolVersion)) { + frameWriter.writeInt(flags); + } + else { + frameWriter.writeByte(flags); + } + } + + if (this.params && this.params.length) { + frameWriter.writeShort(this.params.length); + for (let i = 0; i < this.params.length; i++) { + let paramValue = this.params[i]; + if (flags & queryFlag.withNameForValues) { + //parameter is composed by name / value + frameWriter.writeString(paramValue.name); + paramValue = paramValue.value; + } + frameWriter.writeBytes(encoder.encode(paramValue, this.getParamType(i))); + } + } + + if (!types.protocolVersion.supportsPaging(encoder.protocolVersion)) { + if (!this.params || !this.params.length) { + //zero parameters + frameWriter.writeShort(0); + } + frameWriter.writeShort(this.consistency); + return; + } + if (flags & queryFlag.pageSize) { + frameWriter.writeInt(this.options.getFetchSize()); + } + if (flags & queryFlag.withPagingState) { + frameWriter.writeBytes(this.options.getPageState()); + } + if (flags & queryFlag.withSerialConsistency) { + frameWriter.writeShort(this.options.getSerialConsistency()); + } + if (flags & queryFlag.withDefaultTimestamp) { + frameWriter.writeLong(timestamp); + } + if (flags & queryFlag.withKeyspace) { + frameWriter.writeString(this.options.getKeyspace()); + } + } +} + +class QueryRequest extends ExecuteRequest { + /** + * @param {String} query + * @param params + * @param {ExecutionOptions} [execOptions] + * @param {Boolean} [namedParameters] + */ + constructor(query, params, execOptions, namedParameters) { + super(query, null, params, execOptions, null); + this.hints = this.options.getHints() || utils.emptyArray; + this.namedParameters = namedParameters; + } + + getParamType(index) { + return this.hints[index]; + } + + write(encoder, streamId) { + //v1: + //v2: + // [...][][][] + //v3: + // [[name_1]...[name_n]][][][][] + const frameWriter = new FrameWriter(types.opcodes.query); + let headerFlags = this.options.isQueryTracing() ? types.frameFlags.tracing : 0; + if (this.options.getCustomPayload()) { + //The body may contain the custom payload + headerFlags |= types.frameFlags.customPayload; + frameWriter.writeCustomPayload(this.options.getCustomPayload()); + } + + frameWriter.writeLString(this.query); + + if (!types.protocolVersion.supportsPaging(encoder.protocolVersion)) { + frameWriter.writeShort(this.consistency); + } else { + //Use the same fields as the execute writer + this.writeQueryParameters(frameWriter, encoder, true); + } + + // Record the length of the body of the request before writing it + this.length = frameWriter.bodyLength; + + return frameWriter.write(encoder.protocolVersion, streamId, headerFlags); + } +} + +class PrepareRequest extends Request { + constructor(query, keyspace) { + super(); + this.query = query; + this.keyspace = keyspace; + } + + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.prepare); + frameWriter.writeLString(this.query); + if (types.protocolVersion.supportsPrepareFlags(encoder.protocolVersion)) { + const flags = this.keyspace && types.protocolVersion.supportsKeyspaceInRequest(encoder.protocolVersion) ? prepareFlag.withKeyspace : 0; + frameWriter.writeInt(flags); + if (flags & prepareFlag.withKeyspace) { + frameWriter.writeString(this.keyspace); + } + } + return frameWriter.write(encoder.protocolVersion, streamId); + } +} + +class StartupRequest extends Request { + + /** + * Creates a new instance of {@link StartupRequest}. + * @param {Object} [options] + * @param [options.cqlVersion] + * @param [options.noCompact] + * @param [options.clientId] + * @param [options.applicationName] + * @param [options.applicationVersion] + */ + constructor(options) { + super(); + this.options = options || {}; + } + + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.startup); + + const startupOptions = { + CQL_VERSION: this.options.cqlVersion || '3.0.0', + DRIVER_NAME: packageInfo.description, + DRIVER_VERSION: packageInfo.version + }; + + if(this.options.noCompact) { + startupOptions['NO_COMPACT'] = 'true'; + } + + if (this.options.clientId) { + startupOptions['CLIENT_ID'] = this.options.clientId.toString(); + } + + if (this.options.applicationName) { + startupOptions['APPLICATION_NAME'] = this.options.applicationName; + } + + if (this.options.applicationVersion) { + startupOptions['APPLICATION_VERSION'] = this.options.applicationVersion; + } + + frameWriter.writeStringMap(startupOptions); + return frameWriter.write(encoder.protocolVersion, streamId); + } +} + +class RegisterRequest extends Request { + constructor(events) { + super(); + this.events = events; + } + + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.register); + frameWriter.writeStringList(this.events); + return frameWriter.write(encoder.protocolVersion, streamId); + } +} + +/** + * Represents an AUTH_RESPONSE request + * @param {Buffer} token + */ +class AuthResponseRequest extends Request { + constructor(token) { + super(); + this.token = token; + } + + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.authResponse); + frameWriter.writeBytes(this.token); + return frameWriter.write(encoder.protocolVersion, streamId); + } +} + +/** + * Represents a protocol v1 CREDENTIALS request message + */ +class CredentialsRequest extends Request { + constructor(username, password) { + super(); + this.username = username; + this.password = password; + } + + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.credentials); + frameWriter.writeStringMap({ username:this.username, password:this.password }); + return frameWriter.write(encoder.protocolVersion, streamId); + } +} + +class BatchRequest extends Request { + /** + * Creates a new instance of BatchRequest. + * @param {Array.<{query, params, [info]}>} queries Array of objects with the properties query and params + * @param {ExecutionOptions} execOptions + */ + constructor(queries, execOptions) { + super(); + this.queries = queries; + this.options = execOptions; + this.hints = execOptions.getHints() || utils.emptyArray; + this.type = batchType.logged; + + if (execOptions.isBatchCounter()) { + this.type = batchType.counter; + } else if (!execOptions.isBatchLogged()) { + this.type = batchType.unlogged; + } + } + + /** + * Writes a batch request + */ + write(encoder, streamId) { + //v2: ... + //v3: ...[][] + //dseV1+: similar to v3/v4, flags is an int instead of a byte + if (!this.queries || !(this.queries.length > 0)) { + throw new TypeError(util.format('Invalid queries provided %s', this.queries)); + } + const frameWriter = new FrameWriter(types.opcodes.batch); + let headerFlags = this.options.isQueryTracing() ? types.frameFlags.tracing : 0; + if (this.options.getCustomPayload()) { + //The body may contain the custom payload + headerFlags |= types.frameFlags.customPayload; + frameWriter.writeCustomPayload(this.options.getCustomPayload()); + } + frameWriter.writeByte(this.type); + frameWriter.writeShort(this.queries.length); + const self = this; + this.queries.forEach(function eachQuery(item, i) { + const hints = self.hints[i]; + const params = item.params || utils.emptyArray; + let getParamType; + if (item.queryId) { + // Contains prepared queries + frameWriter.writeByte(1); + frameWriter.writeShortBytes(item.queryId); + getParamType = i => item.meta.columns[i].type; + } + else { + // Contains string queries + frameWriter.writeByte(0); + frameWriter.writeLString(item.query); + getParamType = hints ? (i => hints[i]) : (() => null); + } + + frameWriter.writeShort(params.length); + params.forEach((param, index) => frameWriter.writeBytes(encoder.encode(param, getParamType(index)))); + }, this); + + frameWriter.writeShort(this.options.getConsistency()); + + if (types.protocolVersion.supportsTimestamp(encoder.protocolVersion)) { + // Batch flags + let flags = this.options.getSerialConsistency() ? batchFlag.withSerialConsistency : 0; + const timestamp = this.options.getOrGenerateTimestamp(); + flags |= timestamp !== null && timestamp !== undefined ? batchFlag.withDefaultTimestamp : 0; + + flags |= this.options.getKeyspace() && types.protocolVersion.supportsKeyspaceInRequest(encoder.protocolVersion) + ? batchFlag.withKeyspace : 0; + + if (types.protocolVersion.uses4BytesQueryFlags(encoder.protocolVersion)) { + frameWriter.writeInt(flags); + } + else { + frameWriter.writeByte(flags); + } + + if (flags & batchFlag.withSerialConsistency) { + frameWriter.writeShort(this.options.getSerialConsistency()); + } + + if (flags & batchFlag.withDefaultTimestamp) { + frameWriter.writeLong(timestamp); + } + + if (flags & batchFlag.withKeyspace) { + frameWriter.writeString(this.options.getKeyspace()); + } + } + + // Set the length of the body of the request before writing it + this.length = frameWriter.bodyLength; + + return frameWriter.write(encoder.protocolVersion, streamId, headerFlags); + } + + clone() { + return new BatchRequest(this.queries, this.options); + } +} + +function CancelRequest(operationId) { + this.streamId = null; + this.operationId = operationId; +} + +util.inherits(CancelRequest, Request); + +CancelRequest.prototype.write = function (encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.cancel); + frameWriter.writeInt(1); + frameWriter.writeInt(this.operationId); + return frameWriter.write(encoder.protocolVersion, streamId); +}; + +class OptionsRequest extends Request { + + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.options); + return frameWriter.write(encoder.protocolVersion, streamId, 0); + } + + clone() { + // since options has no unique state, simply return self. + return this; + } +} + +const options = new OptionsRequest(); + +exports.AuthResponseRequest = AuthResponseRequest; +exports.BatchRequest = BatchRequest; +exports.CancelRequest = CancelRequest; +exports.CredentialsRequest = CredentialsRequest; +exports.ExecuteRequest = ExecuteRequest; +exports.PrepareRequest = PrepareRequest; +exports.QueryRequest = QueryRequest; +exports.Request = Request; +exports.RegisterRequest = RegisterRequest; +exports.StartupRequest = StartupRequest; +exports.options = options; diff --git a/node_modules/cassandra-driver/lib/stream-id-stack.js b/node_modules/cassandra-driver/lib/stream-id-stack.js new file mode 100644 index 0000000..7947af7 --- /dev/null +++ b/node_modules/cassandra-driver/lib/stream-id-stack.js @@ -0,0 +1,200 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const types = require('./types'); + +/** + * Group size + * @type {number} + */ +const groupSize = 128; + +/** + * Number used to right shift ids to allocate them into groups + * @const + * @type {number} + */ +const shiftToGroup = 7; + +/** + * Amount of groups that can be released per time + * If it grows larger than 4 groups (128 * 4), groups can be released + * @const + * @type {number} + */ +const releasableSize = 4; + +/** + * 32K possible stream ids depending for protocol v3 and above + * @const + * @type {number} + */ +const maxGroupsFor2Bytes = 256; + +/** + * Delay used to check if groups can be released + * @const + * @type {number} + */ +const defaultReleaseDelay = 5000; + +/** + * Represents a queue of ids from 0 to maximum stream id supported by the protocol version. + * Clients can dequeue a stream id using {@link StreamIdStack#shift()} and enqueue (release) using + * {@link StreamIdStack#push()} + */ +class StreamIdStack { + /** + * Creates a new instance of StreamIdStack. + * @param {number} version Protocol version + * @constructor + */ + constructor(version) { + //Ecmascript Number is 64-bit double, it can be optimized by the engine into a 32-bit int, but nothing below that. + //We try to allocate as few as possible in arrays of 128 + this.currentGroup = generateGroup(0); + this.groupIndex = 0; + this.groups = [this.currentGroup]; + this.releaseTimeout = null; + this.setVersion(version); + /** + * Returns the amount of ids currently in use + * @member {number} + */ + this.inUse = 0; + this.releaseDelay = defaultReleaseDelay; + } + + /** + * Sets the protocol version + * @param {Number} version + */ + setVersion(version) { + //128 or 32K stream ids depending on the protocol version + this.maxGroups = types.protocolVersion.uses2BytesStreamIds(version) ? maxGroupsFor2Bytes : 1; + } + + /** + * Dequeues an id. + * Similar to {@link Array#pop()}. + * @returns {Number} Returns an id or null + */ + pop() { + let id = this.currentGroup.pop(); + if (typeof id !== 'undefined') { + this.inUse++; + return id; + } + //try to use the following groups + while (this.groupIndex < this.groups.length - 1) { + //move to the following group + this.currentGroup = this.groups[++this.groupIndex]; + //try dequeue + id = this.currentGroup.pop(); + if (typeof id !== 'undefined') { + this.inUse++; + return id; + } + } + return this._tryCreateGroup(); + } + + /** + * Enqueue an id for future use. + * Similar to {@link Array#push()}. + * @param {Number} id + */ + push(id) { + this.inUse--; + const groupIndex = id >> shiftToGroup; + const group = this.groups[groupIndex]; + group.push(id); + if (groupIndex < this.groupIndex) { + //Set the lower group to be used to dequeue from + this.groupIndex = groupIndex; + this.currentGroup = group; + } + this._tryIssueRelease(); + } + + /** + * Clears all timers + */ + clear() { + if (this.releaseTimeout) { + clearTimeout(this.releaseTimeout); + this.releaseTimeout = null; + } + } + + /** + * Tries to create an additional group and returns a new id + * @returns {Number} Returns a new id or null if it's not possible to create a new group + * @private + */ + _tryCreateGroup() { + if (this.groups.length === this.maxGroups) { + //we can have an additional group + return null; + } + //Add a new group at the last position + this.groupIndex = this.groups.length; + //Using 128 * groupIndex as initial value + this.currentGroup = generateGroup(this.groupIndex << shiftToGroup); + this.groups.push(this.currentGroup); + this.inUse++; + return this.currentGroup.pop(); + } + + _tryIssueRelease() { + if (this.releaseTimeout || this.groups.length <= releasableSize) { + //Nothing to release or a release delay has been issued + return; + } + + this.releaseTimeout = setTimeout(() => this._releaseGroups(), this.releaseDelay); + } + + _releaseGroups() { + let counter = 0; + let index = this.groups.length - 1; + //only release up to n groups (n = releasable size) + //shrink back up to n groups not all the way up to 1 + while (counter++ < releasableSize && this.groups.length > releasableSize && index > this.groupIndex) { + if (this.groups[index].length !== groupSize) { + //the group is being used + break; + } + this.groups.pop(); + index--; + } + this.releaseTimeout = null; + //Issue next release if applies + this._tryIssueRelease(); + } +} + +function generateGroup(initialValue) { + const arr = new Array(groupSize); + const upperBound = initialValue + groupSize - 1; + for (let i = 0; i < groupSize; i++) { + arr[i] = upperBound - i; + } + return arr; +} + +module.exports = StreamIdStack; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/streams.js b/node_modules/cassandra-driver/lib/streams.js new file mode 100644 index 0000000..158cc8b --- /dev/null +++ b/node_modules/cassandra-driver/lib/streams.js @@ -0,0 +1,582 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const util = require('util'); +const { Transform, Writable } = require('stream'); + +const types = require('./types'); +const utils = require('./utils'); +const errors = require('./errors'); +const { FrameHeader } = types; +const { FrameReader } = require('./readers'); + +/** + * Transforms chunks, emits data objects {header, chunk} + * @param options Stream options + * @extends Transform + */ +function Protocol (options) { + Transform.call(this, options); + this.header = null; + this.bodyLength = 0; + this.clearHeaderChunks(); + this.version = 0; + this.headerSize = 0; +} + +util.inherits(Protocol, Transform); + +Protocol.prototype._transform = function (chunk, encoding, callback) { + let error = null; + try { + this.readItems(chunk); + } + catch (err) { + error = err; + } + callback(error); +}; + +/** + * Parses the chunk into frames (header and body). + * Emits (push) complete frames or frames with incomplete bodies. Following chunks containing the rest of the body will + * be emitted using the same frame. + * It buffers incomplete headers. + * @param {Buffer} chunk + */ +Protocol.prototype.readItems = function (chunk) { + if (!chunk || chunk.length === 0) { + return; + } + if (this.version === 0) { + //The server replies the first message with the max protocol version supported + this.version = FrameHeader.getProtocolVersion(chunk); + this.headerSize = FrameHeader.size(this.version); + } + let offset = 0; + let currentHeader = this.header; + this.header = null; + if (this.headerChunks.byteLength !== 0) { + //incomplete header was buffered try to read the header from the buffered chunks + this.headerChunks.parts.push(chunk); + if (this.headerChunks.byteLength + chunk.length < this.headerSize) { + this.headerChunks.byteLength += chunk.length; + return; + } + currentHeader = FrameHeader.fromBuffer(Buffer.concat(this.headerChunks.parts, this.headerSize)); + offset = this.headerSize - this.headerChunks.byteLength; + this.clearHeaderChunks(); + } + const items = []; + while (true) { + if (!currentHeader) { + if (this.headerSize > chunk.length - offset) { + if (chunk.length - offset <= 0) { + break; + } + //the header is incomplete, buffer it until the next chunk + const headerPart = chunk.slice(offset, chunk.length); + this.headerChunks.parts.push(headerPart); + this.headerChunks.byteLength = headerPart.length; + break; + } + //read header + currentHeader = FrameHeader.fromBuffer(chunk, offset); + offset += this.headerSize; + } + //parse body + const remaining = chunk.length - offset; + if (currentHeader.bodyLength <= remaining + this.bodyLength) { + items.push({ header: currentHeader, chunk: chunk, offset: offset, frameEnded: true }); + offset += currentHeader.bodyLength - this.bodyLength; + //reset the body length + this.bodyLength = 0; + } + else if (remaining >= 0) { + //the body is not fully contained in this chunk + //will continue later + this.header = currentHeader; + this.bodyLength += remaining; + if (remaining > 0) { + //emit if there is at least a byte to emit + items.push({ header: currentHeader, chunk: chunk, offset: offset, frameEnded: false }); + } + break; + } + currentHeader = null; + } + for (let i = 0; i < items.length; i++) { + this.push(items[i]); + } +}; + +Protocol.prototype.clearHeaderChunks = function () { + this.headerChunks = { byteLength: 0, parts: [] }; +}; + +/** + * A stream that gets reads header + body chunks and transforms them into header + (row | error) + * @param {Object} streamOptions Node.js Stream options + * @param {Encoder} encoder Encoder instance for the parser to use + * @extends Transform + */ +function Parser (streamOptions, encoder) { + Transform.call(this, streamOptions); + //frames that are streaming, indexed by id + this.frames = {}; + this.encoder = encoder; +} + +util.inherits(Parser, Transform); + +Parser.prototype._transform = function (item, encoding, callback) { + const frameInfo = this.frameState(item); + + let error = null; + try { + this.parseBody(frameInfo, item); + } + catch (err) { + error = err; + } + callback(error); + + if (item.frameEnded) { + if (frameInfo.cellBuffer) { + //Frame was being streamed but an error force it to buffer the result + this.push({ + header: frameInfo.header, + error: new errors.DriverInternalError('There was an problem while parsing streaming frame, opcode ' + + frameInfo.header.opcode) + }); + } + //all the parsing finished and it was streamed down + //emit an item that signals it + this.push({ header: frameInfo.header, frameEnded: true}); + } +}; + +/** + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + */ +Parser.prototype.parseBody = function (frameInfo, item) { + frameInfo.isStreaming = frameInfo.byRow && item.header.opcode === types.opcodes.result; + if (!this.handleFrameBuffers(frameInfo, item)) { + // Frame isn't complete and we are not streaming the frame + return; + } + const reader = new FrameReader(item.header, item.chunk, item.offset); + // Check that flags have not been parsed yet for this frame + if (frameInfo.flagsInfo === undefined) { + const originalOffset = reader.offset; + try { + frameInfo.flagsInfo = reader.readFlagsInfo(); + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader, originalOffset); + } + } + + //All the body for most operations is already buffered at this stage + //Except for RESULT + switch (item.header.opcode) { + case types.opcodes.result: + return this.parseResult(frameInfo, reader); + case types.opcodes.ready: + case types.opcodes.authSuccess: + return this.push({ header: frameInfo.header, ready: true }); + case types.opcodes.authChallenge: + return this.push({ header: frameInfo.header, authChallenge: true, token: reader.readBytes()}); + case types.opcodes.authenticate: + return this.push({ header: frameInfo.header, mustAuthenticate: true, authenticatorName: reader.readString()}); + case types.opcodes.error: + return this.push({ header: frameInfo.header, error: reader.readError()}); + case types.opcodes.supported: + return this.push({ header: frameInfo.header, supported: reader.readStringMultiMap()}); + case types.opcodes.event: + return this.push({ header: frameInfo.header, event: reader.readEvent()}); + default: + return this.push({ header: frameInfo.header, error: new Error('Received invalid opcode: ' + item.header.opcode) }); + } +}; + +/** + * Buffers if needed and returns true if it has all the necessary data to continue parsing the frame. + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + * @returns {Boolean} + */ +Parser.prototype.handleFrameBuffers = function (frameInfo, item) { + if (!frameInfo.isStreaming) { + // Handle buffering for complete frame bodies + const currentLength = (frameInfo.bufferLength || 0) + item.chunk.length - item.offset; + if (currentLength < item.header.bodyLength) { + //buffer until the frame is completed + this.addFrameBuffer(frameInfo, item); + return false; + } + //We have received the full frame body + if (frameInfo.buffers) { + item.chunk = this.getFrameBuffer(frameInfo, item); + item.offset = 0; + } + return true; + } + if (frameInfo.cellBuffer) { + // Handle buffering for frame cells (row cells or metadata cells) + if (item.offset !== 0) { + throw new errors.DriverInternalError('Following chunks can not have an offset greater than zero'); + } + frameInfo.cellBuffer.parts.push(item.chunk); + if (!frameInfo.cellBuffer.expectedLength) { + //Its a buffer outside a row cell (metadata or other) + if (frameInfo.cellBuffer.parts.length !== 2) { + throw new errors.DriverInternalError('Buffer for streaming frame can not contain more than 1 item'); + } + item.chunk = Buffer.concat(frameInfo.cellBuffer.parts, frameInfo.cellBuffer.byteLength + item.chunk.length); + frameInfo.cellBuffer = null; + return true; + } + if (frameInfo.cellBuffer.expectedLength > frameInfo.cellBuffer.byteLength + item.chunk.length) { + //We still haven't got the cell data + frameInfo.cellBuffer.byteLength += item.chunk.length; + return false; + } + item.chunk = Buffer.concat(frameInfo.cellBuffer.parts, frameInfo.cellBuffer.byteLength + item.chunk.length); + frameInfo.cellBuffer = null; + } + return true; +}; + +/** + * Adds this chunk to the frame buffers. + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + */ +Parser.prototype.addFrameBuffer = function (frameInfo, item) { + if (!frameInfo.buffers) { + frameInfo.buffers = [ item.chunk.slice(item.offset) ]; + frameInfo.bufferLength = item.chunk.length - item.offset; + return; + } + if (item.offset > 0) { + throw new errors.DriverInternalError('Following chunks can not have an offset greater than zero'); + } + frameInfo.buffers.push(item.chunk); + frameInfo.bufferLength += item.chunk.length; +}; + +/** + * Adds the last chunk and concatenates the frame buffers + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + */ +Parser.prototype.getFrameBuffer = function (frameInfo, item) { + frameInfo.buffers.push(item.chunk); + const result = Buffer.concat(frameInfo.buffers, frameInfo.bodyLength); + frameInfo.buffers = null; + return result; +}; + +/** + * Tries to read the result in the body of a message + * @param frameInfo Frame information, header / metadata + * @param {FrameReader} reader + */ +Parser.prototype.parseResult = function (frameInfo, reader) { + let result; + // As we might be streaming and the frame buffer might not be complete, + // read the metadata and different types of result values in a try-catch. + // Store the reader position + const originalOffset = reader.offset; + try { + if (!frameInfo.meta) { + frameInfo.kind = reader.readInt(); + // Spec 4.2.5 + switch (frameInfo.kind) { + case types.resultKind.voidResult: + result = { header: frameInfo.header, flags: frameInfo.flagsInfo }; + break; + case types.resultKind.rows: + // Parse the rows metadata, the rest of the response is going to be parsed afterwards + frameInfo.meta = reader.readMetadata(frameInfo.kind); + break; + case types.resultKind.setKeyspace: + result = { header: frameInfo.header, keyspaceSet: reader.readString(), flags: frameInfo.flagsInfo }; + break; + case types.resultKind.prepared: + { + const preparedId = utils.copyBuffer(reader.readShortBytes()); + frameInfo.meta = reader.readMetadata(frameInfo.kind); + result = { header: frameInfo.header, id: preparedId, meta: frameInfo.meta, flags: frameInfo.flagsInfo }; + break; + } + case types.resultKind.schemaChange: + result = { header: frameInfo.header, schemaChange: reader.parseSchemaChange(), flags: frameInfo.flagsInfo }; + break; + default: + throw errors.DriverInternalError('Unexpected result kind: ' + frameInfo.kind); + } + } + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader, originalOffset); + } + if (result) { + if (frameInfo.emitted) { + // It may contain additional metadata and info that it's not being parsed + return; + } + frameInfo.emitted = true; + return this.push(result); + } + if (reader.remainingLength() > 0) { + this.parseRows(frameInfo, reader); + } +}; + +/** + * @param frameInfo + * @param {FrameReader} reader + */ +Parser.prototype.parseRows = function (frameInfo, reader) { + if (frameInfo.parsingError) { + //No more processing on this frame + return; + } + if (frameInfo.rowLength === undefined) { + try { + frameInfo.rowLength = reader.readInt(); + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader); + } + } + if (frameInfo.rowLength === 0) { + return this.push({ + header: frameInfo.header, + result: { rows: utils.emptyArray, meta: frameInfo.meta, flags: frameInfo.flagsInfo } + }); + } + const meta = frameInfo.meta; + frameInfo.rowIndex = frameInfo.rowIndex || 0; + for (let i = frameInfo.rowIndex; i < frameInfo.rowLength; i++) { + const rowOffset = reader.offset; + const row = new types.Row(meta.columns); + let cellBuffer; + for (let j = 0; j < meta.columns.length; j++ ) { + const c = meta.columns[j]; + try { + cellBuffer = reader.readBytes(); + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader, rowOffset, i); + } + try { + row[c.name] = this.encoder.decode(cellBuffer, c.type); + } + catch (e) { + //Something went wrong while decoding, we are not going to be able to recover + return this.handleParsingError(e, frameInfo, null); + } + } + this.push({ + header: frameInfo.header, + row: row, + meta: frameInfo.meta, + byRow: frameInfo.byRow, + length: frameInfo.rowLength, + flags: frameInfo.flagsInfo + }); + } + if (frameInfo.byRow) { + // Use an event item to identify that all the streaming rows have finished processing + this.push({ + header: frameInfo.header, + byRowCompleted: true, + meta: frameInfo.meta, + length: frameInfo.rowLength, + flags: frameInfo.flagsInfo + }); + } +}; + +/** + * Sets parser options (ie: how to yield the results as they are parsed) + * @param {Number} id Id of the stream + * @param options + */ +Parser.prototype.setOptions = function (id, options) { + if (this.frames[id.toString()]) { + throw new types.DriverError('There was already state for this frame'); + } + this.frames[id.toString()] = options; +}; + +/** + * Manually clears the frame options. + * This class already clears the provided options when the frame ends, so it's usually not required to invoke this + * method. + * When manually setting the options for continuous paging, it's possible that the frame options are set while + * it's being cancelled. + * @param {Number} id The streamId + */ +Parser.prototype.clearOptions = function (id) { + delete this.frames[id.toString()]; +}; + +/** + * Gets the frame info from the internal state. + * In case it is not there, it creates it. + * In case the frame ended + */ +Parser.prototype.frameState = function (item) { + let frameInfo = this.frames[item.header.streamId]; + if (!frameInfo) { + frameInfo = {}; + if (!item.frameEnded) { + //store it in the frames + this.frames[item.header.streamId] = frameInfo; + } + } + else if (item.frameEnded) { + //if it was already stored, remove it + delete this.frames[item.header.streamId]; + } + frameInfo.header = item.header; + return frameInfo; +}; + +/** + * Handles parsing error: pushing an error if its unexpected or buffer the cell if its streaming + * @param {Error} e + * @param frameInfo + * @param {FrameReader} reader + * @param {Number} [originalOffset] + * @param {Number} [rowIndex] + */ +Parser.prototype.handleParsingError = function (e, frameInfo, reader, originalOffset, rowIndex) { + if (reader && frameInfo.isStreaming && (e instanceof RangeError)) { + //A controlled error, buffer from offset and move on + return this.bufferResultCell(frameInfo, reader, originalOffset, rowIndex, e.expectedLength); + } + frameInfo.parsingError = true; + frameInfo.cellBuffer = null; + this.push({ header: frameInfo.header, error: e }); +}; + +/** + * When streaming, it buffers data since originalOffset. + * @param frameInfo + * @param {FrameReader} reader + * @param {Number} [originalOffset] + * @param {Number} [rowIndex] + * @param {Number} [expectedLength] + */ +Parser.prototype.bufferResultCell = function (frameInfo, reader, originalOffset, rowIndex, expectedLength) { + if (!originalOffset && originalOffset !== 0) { + originalOffset = reader.offset; + } + frameInfo.rowIndex = rowIndex; + const buffer = reader.slice(originalOffset); + frameInfo.cellBuffer = { + parts: [ buffer ], + byteLength: buffer.length, + expectedLength: expectedLength + }; +}; + +/** + * Represents a writable streams that emits results + */ +function ResultEmitter(options) { + Writable.call(this, options); + /** + * Stores the rows for frames that needs to be yielded as one result with many rows + */ + this.rowBuffer = {}; +} + +util.inherits(ResultEmitter, Writable); + +ResultEmitter.prototype._write = function (item, encoding, callback) { + let error = null; + try { + this.each(item); + } + catch (err) { + error = err; + } + callback(error); +}; + + +/** + * Analyzes the item and emit the corresponding event + */ +ResultEmitter.prototype.each = function (item) { + if (item.error || item.result) { + //Its either an error or an empty array rows + //no transformation needs to be made + return this.emit('result', item.header, item.error, item.result); + } + if (item.frameEnded) { + return this.emit('frameEnded', item.header); + } + if (item.lastContinuousPage) { + return this.emit('lastContinuousPage', item.header); + } + if (item.byRowCompleted) { + return this.emit('byRowCompleted', item.header, item.row, item.meta, item.flags); + } + if (item.byRow) { + //it should be yielded by row + return this.emit('row', item.header, item.row, item.meta, item.length, item.flags); + } + if (item.row) { + //it should be yielded as a result + //it needs to be buffered to an array of rows + return this.bufferAndEmit(item); + } + if (item.event) { + //its an event from Cassandra + return this.emit('nodeEvent', item.header, item.event); + } + //its a raw response (object with flags) + return this.emit('result', item.header, null, item); +}; + +/** + * Buffers the rows until the result set is completed and emits the result event. + */ +ResultEmitter.prototype.bufferAndEmit = function (item) { + let rows = this.rowBuffer[item.header.streamId]; + if (!rows) { + rows = this.rowBuffer[item.header.streamId] = []; + } + rows.push(item.row); + if (rows.length === item.length) { + this.emit('result', item.header, null, { rows: rows, meta: item.meta, flags: item.flags}); + delete this.rowBuffer[item.header.streamId]; + } +}; + +exports.Protocol = Protocol; +exports.Parser = Parser; +exports.ResultEmitter = ResultEmitter; diff --git a/node_modules/cassandra-driver/lib/token.js b/node_modules/cassandra-driver/lib/token.js new file mode 100644 index 0000000..dabed71 --- /dev/null +++ b/node_modules/cassandra-driver/lib/token.js @@ -0,0 +1,286 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const types = require('./types'); +const util = require('util'); + +const _Murmur3TokenType = types.dataTypes.getByName('bigint'); +const _RandomTokenType = types.dataTypes.getByName('varint'); +const _OrderedTokenType = types.dataTypes.getByName('blob'); + +/** + * Represents a token on the Cassandra ring. + */ +class Token { + constructor(value) { + this._value = value; + } + + /** + * @returns {{code: number, info: *|Object}} The type info for the + * type of the value of the token. + */ + getType() { + throw new Error('You must implement a getType function for this Token instance'); + } + + /** + * @returns {*} The raw value of the token. + */ + getValue() { + return this._value; + } + + toString() { + return this._value.toString(); + } + + /** + * Returns 0 if the values are equal, 1 if greater than other, -1 + * otherwise. + * + * @param {Token} other + * @returns {Number} + */ + compare(other) { + return this._value.compare(other._value); + } + + equals(other) { + return this.compare(other) === 0; + } + + inspect() { + return this.constructor.name + ' { ' + this.toString() + ' }'; + } +} + +/** + * Represents a token from a Cassandra ring where the partitioner + * is Murmur3Partitioner. + * + * The raw token type is a varint (represented by MutableLong). + */ +class Murmur3Token extends Token { + constructor(value) { + super(value); + } + + getType() { + return _Murmur3TokenType; + } +} + +/** + * Represents a token from a Cassandra ring where the partitioner + * is RandomPartitioner. + * + * The raw token type is a bigint (represented by Number). + */ +class RandomToken extends Token { + constructor(value) { + super(value); + } + + getType() { + return _RandomTokenType; + } +} + +/** + * Represents a token from a Cassandra ring where the partitioner + * is ByteOrderedPartitioner. + * + * The raw token type is a blob (represented by Buffer or Array). + */ +class ByteOrderedToken extends Token { + constructor(value) { + super(value); + } + + getType() { + return _OrderedTokenType; + } + + toString() { + return this._value.toString('hex').toUpperCase(); + } +} + +/** + * Represents a range of tokens on a Cassandra ring. + * + * A range is start-exclusive and end-inclusive. It is empty when + * start and end are the same token, except if that is the minimum + * token, in which case the range covers the whole ring (this is + * consistent with the behavior of CQL range queries). + * + * Note that CQL does not handle wrapping. To query all partitions + * in a range, see {@link unwrap}. + */ +class TokenRange { + constructor(start, end, tokenizer) { + this.start = start; + this.end = end; + Object.defineProperty(this, '_tokenizer', { value: tokenizer, enumerable: false}); + } + + /** + * Splits this range into a number of smaller ranges of equal "size" + * (referring to the number of tokens, not the actual amount of data). + * + * Splitting an empty range is not permitted. But not that, in edge + * cases, splitting a range might produce one or more empty ranges. + * + * @param {Number} numberOfSplits Number of splits to make. + * @returns {TokenRange[]} Split ranges. + * @throws {Error} If splitting an empty range. + */ + splitEvenly(numberOfSplits) { + if (numberOfSplits < 1) { + throw new Error(util.format("numberOfSplits (%d) must be greater than 0.", numberOfSplits)); + } + if (this.isEmpty()) { + throw new Error("Can't split empty range " + this.toString()); + } + + const tokenRanges = []; + const splitPoints = this._tokenizer.split(this.start, this.end, numberOfSplits); + let splitStart = this.start; + let splitEnd; + for (let splitIndex = 0; splitIndex < splitPoints.length; splitIndex++) { + splitEnd = splitPoints[splitIndex]; + tokenRanges.push(new TokenRange(splitStart, splitEnd, this._tokenizer)); + splitStart = splitEnd; + } + tokenRanges.push(new TokenRange(splitStart, this.end, this._tokenizer)); + return tokenRanges; + } + + /** + * A range is empty when start and end are the same token, except if + * that is the minimum token, in which case the range covers the + * whole ring. This is consistent with the behavior of CQL range + * queries. + * + * @returns {boolean} Whether this range is empty. + */ + isEmpty() { + return this.start.equals(this.end) && !this.start.equals(this._tokenizer.minToken()); + } + + /** + * A range wraps around the end of the ring when the start token + * is greater than the end token and the end token is not the + * minimum token. + * + * @returns {boolean} Whether this range wraps around. + */ + isWrappedAround() { + return this.start.compare(this.end) > 0 && !this.end.equals(this._tokenizer.minToken()); + } + + /** + * Splits this range into a list of two non-wrapping ranges. + * + * This will return the range itself if it is non-wrapped, or two + * ranges otherwise. + * + * This is useful for CQL range queries, which do not handle + * wrapping. + * + * @returns {TokenRange[]} The list of non-wrapping ranges. + */ + unwrap() { + if (this.isWrappedAround()) { + return [ + new TokenRange(this.start, this._tokenizer.minToken(), this._tokenizer), + new TokenRange(this._tokenizer.minToken(), this.end, this._tokenizer) + ]; + } + return [this]; + } + + /** + * Whether this range contains a given Token. + * + * @param {*} token Token to check for. + * @returns {boolean} Whether or not the Token is in this range. + */ + contains(token) { + if (this.isEmpty()) { + return false; + } + const minToken = this._tokenizer.minToken(); + if (this.end.equals(minToken)) { + if (this.start.equals(minToken)) { + return true; // ]minToken, minToken] === full ring + } else if (token.equals(minToken)) { + return true; + } + return token.compare(this.start) > 0; + } + + const isAfterStart = token.compare(this.start) > 0; + const isBeforeEnd = token.compare(this.end) <= 0; + // if wrapped around ring, token is in ring if its after start or before end. + // otherwise, token is in ring if its after start and before end. + return this.isWrappedAround() + ? isAfterStart || isBeforeEnd + : isAfterStart && isBeforeEnd; + } + + /** + * Determines if the input range is equivalent to this one. + * + * @param {TokenRange} other Range to compare with. + * @returns {boolean} Whether or not the ranges are equal. + */ + equals(other) { + if (other === this) { + return true; + } else if (other instanceof TokenRange) { + return this.compare(other) === 0; + } + return false; + } + + /** + * Returns 0 if the values are equal, otherwise compares against + * start, if start is equal, compares against end. + * + * @param {TokenRange} other Range to compare with. + * @returns {Number} + */ + compare(other) { + const compareStart = this.start.compare(other.start); + return compareStart !== 0 ? compareStart : this.end.compare(other.end); + } + + toString() { + return util.format(']%s, %s]', + this.start.toString(), + this.end.toString() + ); + } +} + +exports.Token = Token; +exports.TokenRange = TokenRange; +exports.ByteOrderedToken = ByteOrderedToken; +exports.Murmur3Token = Murmur3Token; +exports.RandomToken = RandomToken; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/tokenizer.js b/node_modules/cassandra-driver/lib/tokenizer.js new file mode 100644 index 0000000..57340e6 --- /dev/null +++ b/node_modules/cassandra-driver/lib/tokenizer.js @@ -0,0 +1,584 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const types = require('./types'); +const token = require('./token'); +const utils = require('./utils'); +const MutableLong = require('./types/mutable-long'); +const { Integer } = types; + +// Murmur3 constants +//-0x783C846EEEBDAC2B +const mconst1 = new MutableLong(0x53d5, 0x1142, 0x7b91, 0x87c3); +//0x4cf5ad432745937f +const mconst2 = new MutableLong(0x937f, 0x2745, 0xad43, 0x4cf5); +const mlongFive = MutableLong.fromNumber(5); +//0xff51afd7ed558ccd +const mconst3 = new MutableLong(0x8ccd, 0xed55, 0xafd7, 0xff51); +//0xc4ceb9fe1a85ec53 +const mconst4 = new MutableLong(0xec53, 0x1a85, 0xb9fe, 0xc4ce); +const mconst5 = MutableLong.fromNumber(0x52dce729); +const mconst6 = MutableLong.fromNumber(0x38495ab5); + +/** + * Represents a set of methods that are able to generate and parse tokens for the C* partitioner. + * @abstract + */ +class Tokenizer { + constructor() { + + } + + /** + * Creates a token based on the Buffer value provided + * @abstract + * @param {Buffer|Array} value + * @returns {Token} Computed token + */ + hash(value) { + throw new Error('You must implement a hash function for the tokenizer'); + } + + /** + * Parses a token string and returns a representation of the token + * @abstract + * @param {String} value + */ + parse(value) { + throw new Error('You must implement a parse function for the tokenizer'); + } + + minToken() { + throw new Error('You must implement a minToken function for the tokenizer'); + } + + /** + * Splits the range specified by start and end into numberOfSplits equal parts. + * @param {Token} start Starting token + * @param {Token} end End token + * @param {Number} numberOfSplits Number of splits to make. + */ + split(start, end, numberOfSplits) { + throw new Error('You must implement a split function for the tokenizer'); + } + + /** + * Common implementation for splitting token ranges when start is in + * a shared Integer format. + * + * @param {Integer} start Starting token + * @param {Integer} range How large the range of the split is + * @param {Integer} ringEnd The end point of the ring so we know where to wrap + * @param {Integer} ringLength The total size of the ring + * @param {Number} numberOfSplits The number of splits to make + * @returns {Array} The evenly-split points on the range + */ + splitBase(start, range, ringEnd, ringLength, numberOfSplits) { + const numberOfSplitsInt = Integer.fromInt(numberOfSplits); + const divider = range.divide(numberOfSplitsInt); + let remainder = range.modulo(numberOfSplitsInt); + + const results = []; + let current = start; + const dividerPlusOne = divider.add(Integer.ONE); + + for(let i = 1; i < numberOfSplits; i++) { + if (remainder.greaterThan(Integer.ZERO)) { + current = current.add(dividerPlusOne); + } else { + current = current.add(divider); + } + if (ringLength && current.greaterThan(ringEnd)) { + current = current.subtract(ringLength); + } + results.push(current); + remainder = remainder.subtract(Integer.ONE); + } + return results; + } + + /** + * Return internal string based representation of a Token. + * @param {Token} token + */ + stringify(token) { + return token.getValue().toString(); + } +} + +/** + * Uniformly distributes data across the cluster based on Cassandra flavored Murmur3 hashed values. + */ +class Murmur3Tokenizer extends Tokenizer { + + constructor() { + super(); + } + + /** + * @param {Buffer} value + * @return {Murmur3Token} + */ + hash(value) { + // This is an adapted version of the MurmurHash.hash3_x64_128 from Cassandra used + // for M3P. Compared to that methods, there's a few inlining of arguments and we + // only return the first 64-bits of the result since that's all M3 partitioner uses. + + const data = value; + let offset = 0; + const length = data.length; + + const nblocks = length >> 4; // Process as 128-bit blocks. + + const h1 = new MutableLong(); + const h2 = new MutableLong(); + let k1 = new MutableLong(); + let k2 = new MutableLong(); + + for (let i = 0; i < nblocks; i++) { + k1 = this.getBlock(data, offset, i * 2); + k2 = this.getBlock(data, offset, i * 2 + 1); + + k1.multiply(mconst1); + this.rotl64(k1, 31); + k1.multiply(mconst2); + + h1.xor(k1); + this.rotl64(h1, 27); + h1.add(h2); + h1.multiply(mlongFive).add(mconst5); + + k2.multiply(mconst2); + this.rotl64(k2, 33); + k2.multiply(mconst1); + h2.xor(k2); + this.rotl64(h2, 31); + h2.add(h1); + h2.multiply(mlongFive).add(mconst6); + } + //---------- + // tail + + // Advance offset to the unprocessed tail of the data. + offset += nblocks * 16; + + k1 = new MutableLong(); + k2 = new MutableLong(); + + /* eslint-disable no-fallthrough */ + switch(length & 15) { + case 15: + k2.xor(fromSignedByte(data[offset+14]).shiftLeft(48)); + case 14: + k2.xor(fromSignedByte(data[offset+13]).shiftLeft(40)); + case 13: + k2.xor(fromSignedByte(data[offset+12]).shiftLeft(32)); + case 12: + k2.xor(fromSignedByte(data[offset+11]).shiftLeft(24)); + case 11: + k2.xor(fromSignedByte(data[offset+10]).shiftLeft(16)); + case 10: + k2.xor(fromSignedByte(data[offset+9]).shiftLeft(8)); + case 9: + k2.xor(fromSignedByte(data[offset+8])); + k2.multiply(mconst2); + this.rotl64(k2, 33); + k2.multiply(mconst1); + h2.xor(k2); + case 8: + k1.xor(fromSignedByte(data[offset+7]).shiftLeft(56)); + case 7: + k1.xor(fromSignedByte(data[offset+6]).shiftLeft(48)); + case 6: + k1.xor(fromSignedByte(data[offset+5]).shiftLeft(40)); + case 5: + k1.xor(fromSignedByte(data[offset+4]).shiftLeft(32)); + case 4: + k1.xor(fromSignedByte(data[offset+3]).shiftLeft(24)); + case 3: + k1.xor(fromSignedByte(data[offset+2]).shiftLeft(16)); + case 2: + k1.xor(fromSignedByte(data[offset+1]).shiftLeft(8)); + case 1: + k1.xor(fromSignedByte(data[offset])); + k1.multiply(mconst1); + this.rotl64(k1,31); + k1.multiply(mconst2); + h1.xor(k1); + } + /* eslint-enable no-fallthrough */ + + h1.xor(MutableLong.fromNumber(length)); + h2.xor(MutableLong.fromNumber(length)); + + h1.add(h2); + h2.add(h1); + + + this.fmix(h1); + this.fmix(h2); + + h1.add(h2); + + return new token.Murmur3Token(h1); + } + + /** + * + * @param {Array} key + * @param {Number} offset + * @param {Number} index + * @return {MutableLong} + */ + getBlock(key, offset, index) { + const i8 = index << 3; + const blockOffset = offset + i8; + return new MutableLong( + (key[blockOffset]) | (key[blockOffset + 1] << 8), + (key[blockOffset + 2]) | (key[blockOffset + 3] << 8), + (key[blockOffset + 4]) | (key[blockOffset + 5] << 8), + (key[blockOffset + 6]) | (key[blockOffset + 7] << 8) + ); + } + + /** + * @param {MutableLong} v + * @param {Number} n + */ + rotl64(v, n) { + const left = v.clone().shiftLeft(n); + v.shiftRightUnsigned(64 - n).or(left); + } + + /** @param {MutableLong} k */ + fmix(k) { + k.xor(new MutableLong(k.getUint16(2) >>> 1 | ((k.getUint16(3) << 15) & 0xffff), k.getUint16(3) >>> 1, 0, 0)); + k.multiply(mconst3); + const other = new MutableLong( + (k.getUint16(2) >>> 1) | ((k.getUint16(3) << 15) & 0xffff), + k.getUint16(3) >>> 1, + 0, + 0 + ); + k.xor(other); + k.multiply(mconst4); + k.xor(new MutableLong(k.getUint16(2) >>> 1 | (k.getUint16(3) << 15 & 0xffff), k.getUint16(3) >>> 1, 0, 0)); + } + + /** + * Parses a int64 decimal string representation into a MutableLong. + * @param {String} value + * @returns {Murmur3Token} + */ + parse(value) { + return new token.Murmur3Token(MutableLong.fromString(value)); + } + + minToken() { + if (!this._minToken) { + // minimum long value. + this._minToken = this.parse('-9223372036854775808'); + } + return this._minToken; + } + + maxToken() { + if (!this._maxToken) { + this._maxToken = this.parse('9223372036854775807'); + } + return this._maxToken; + } + + maxValue() { + if (!this._maxValue) { + this._maxValue = Integer.fromString('9223372036854775807'); + } + return this._maxValue; + } + + minValue() { + if (!this._minValue) { + this._minValue = Integer.fromString('-9223372036854775808'); + } + return this._minValue; + } + + ringLength() { + if (!this._ringLength) { + this._ringLength = this.maxValue().subtract(this.minValue()); + } + return this._ringLength; + } + + split(start, end, numberOfSplits) { + // ]min, min] means the whole ring. + if (start.equals(end) && start.equals(this.minToken())) { + end = this.maxToken(); + } + + const startVal = Integer.fromString(start.getValue().toString()); + const endVal = Integer.fromString(end.getValue().toString()); + + let range = endVal.subtract(startVal); + if (range.isNegative()) { + range = range.add(this.ringLength()); + } + + const values = this.splitBase(startVal, range, this.maxValue(), this.ringLength(), numberOfSplits); + return values.map(v => this.parse(v.toString())); + } + + stringify(token) { + // Get the underlying MutableLong + const value = token.getValue(); + // We need a way to uniquely represent a token, it doesn't have to be the decimal string representation + // Using the uint16 avoids divisions and other expensive operations on the longs + return value.getUint16(0) + ',' + value.getUint16(1) + ',' + value.getUint16(2) + ',' + value.getUint16(3); + } +} + +/** + * Uniformly distributes data across the cluster based on MD5 hash values. + */ +class RandomTokenizer extends Tokenizer { + constructor() { + super(); + // eslint-disable-next-line + this._crypto = require('crypto'); + } + + /** + * @param {Buffer|Array} value + * @returns {RandomToken} + */ + hash(value) { + if (Array.isArray(value)) { + value = utils.allocBufferFromArray(value); + } + const hashedValue = this._crypto.createHash('md5').update(value).digest(); + return new token.RandomToken(Integer.fromBuffer(hashedValue).abs()); + } + + /** + * @returns {Token} + */ + parse(value) { + return new token.RandomToken(Integer.fromString(value)); + } + + minToken() { + if (!this._minToken) { + this._minToken = this.parse('-1'); + } + return this._minToken; + } + + maxValue() { + if (!this._maxValue) { + this._maxValue = Integer.fromNumber(Math.pow(2, 127)); + } + return this._maxValue; + } + + maxToken() { + if (!this._maxToken) { + this._maxToken = new token.RandomToken(this.maxValue()); + } + return this._maxToken; + } + + ringLength() { + if (!this._ringLength) { + this._ringLength = this.maxValue().add(Integer.ONE); + } + return this._ringLength; + } + + split(start, end, numberOfSplits) { + // ]min, min] means the whole ring. + if (start.equals(end) && start.equals(this.minToken())) { + end = this.maxToken(); + } + + const startVal = start.getValue(); + const endVal = end.getValue(); + + let range = endVal.subtract(startVal); + if (range.lessThan(Integer.ZERO)) { + range = range.add(this.ringLength()); + } + + const values = this.splitBase(startVal, range, this.maxValue(), this.ringLength(), numberOfSplits); + return values.map(v => new token.RandomToken(v)); + } +} + +class ByteOrderedTokenizer extends Tokenizer { + constructor() { + super(); + } + + /** + * @param {Buffer} value + * @returns {ByteOrderedToken} + */ + hash(value) { + // strip any trailing zeros as tokens with trailing zeros are equivalent + // to those who don't have them. + if (Array.isArray(value)) { + value = utils.allocBufferFromArray(value); + } + let zeroIndex = value.length; + for(let i = value.length - 1; i > 0; i--) { + if(value[i] === 0) { + zeroIndex = i; + } else { + break; + } + } + return new token.ByteOrderedToken(value.slice(0, zeroIndex)); + } + + stringify(token) { + return token.getValue().toString('hex'); + } + + parse(value) { + return this.hash(utils.allocBufferFromString(value, 'hex')); + } + + minToken() { + if (!this._minToken) { + this._minToken = this.hash([]); + } + return this._minToken; + } + + _toNumber(buffer, significantBytes) { + // Convert a token's byte array to a number in order to perform computations. + // This depends on the number of significant bytes that is used to normalize all tokens + // to the same size. For example if the token is 0x01 but significant bytes is 2, the + // result is 0x0100. + let target = buffer; + if(buffer.length !== significantBytes) { + target = Buffer.alloc(significantBytes); + buffer.copy(target); + } + + // similar to Integer.fromBuffer except we force the sign to 0. + const bits = new Array(Math.ceil(target.length / 4)); + for (let i = 0; i < bits.length; i++) { + let offset = target.length - ((i + 1) * 4); + let value; + if (offset < 0) { + //The buffer length is not multiple of 4 + offset = offset + 4; + value = 0; + for (let j = 0; j < offset; j++) { + const byte = target[j]; + value = value | (byte << (offset - j - 1) * 8); + } + } + else { + value = target.readInt32BE(offset); + } + bits[i] = value; + } + return new Integer(bits, 0); + } + + _toBuffer(number, significantBytes) { + // Convert numeric representation back to a buffer. + const buffer = Integer.toBuffer(number); + if (buffer.length === significantBytes) { + return buffer; + } + + // if first byte is a sign byte, skip it. + let start, length; + if (buffer[0] === 0) { + start = 1; + length = buffer.length - 1; + } else { + start = 0; + length = buffer.length; + } + + const target = Buffer.alloc(significantBytes); + buffer.copy(target, significantBytes - length, start, length + start); + return target; + } + + split(start, end, numberOfSplits) { + const tokenOrder = start.compare(end); + + if (tokenOrder === 0 && start.equals(this.minToken())) { + throw new Error("Cannot split whole ring with ordered partitioner"); + } + + let startVal, endVal, range, ringLength, ringEnd; + const intNumberOfSplits = Integer.fromNumber(numberOfSplits); + // Since tokens are compared lexicographically, convert to numbers using the + // largest length (i.e. given 0x0A and 0x0BCD, switch to 0x0A00 and 0x0BCD) + let significantBytes = Math.max(start.getValue().length, end.getValue().length); + if (tokenOrder < 0) { + let addedBytes = 0; + while (true) { + startVal = this._toNumber(start.getValue(), significantBytes); + endVal = this._toNumber(end.getValue(), significantBytes); + range = endVal.subtract(startVal); + if (addedBytes === 4 || range.compare(intNumberOfSplits) >= 0) { + break; + } + significantBytes += 1; + addedBytes += 1; + } + } else { + let addedBytes = 0; + while (true) { + startVal = this._toNumber(start.getValue(), significantBytes); + endVal = this._toNumber(end.getValue(), significantBytes); + ringLength = Integer.fromNumber(Math.pow(2, significantBytes * 8)); + ringEnd = ringLength.subtract(Integer.ONE); + range = endVal.subtract(startVal).add(ringLength); + if (addedBytes === 4 || range.compare(intNumberOfSplits) >= 0) { + break; + } + significantBytes += 1; + addedBytes += 1; + } + } + + const values = this.splitBase(startVal, range, ringEnd, ringLength, numberOfSplits); + return values.map(v => new token.ByteOrderedToken(this._toBuffer(v, significantBytes))); + } +} + +/** + * @param {Number} value + * @return {MutableLong} + */ +function fromSignedByte(value) { + if (value < 128) { + return new MutableLong(value, 0, 0, 0); + } + return new MutableLong((value - 256) & 0xffff, 0xffff, 0xffff, 0xffff); +} + +exports.Murmur3Tokenizer = Murmur3Tokenizer; +exports.RandomTokenizer = RandomTokenizer; +exports.ByteOrderedTokenizer = ByteOrderedTokenizer; diff --git a/node_modules/cassandra-driver/lib/tracker/index.d.ts b/node_modules/cassandra-driver/lib/tracker/index.d.ts new file mode 100644 index 0000000..e39e8ba --- /dev/null +++ b/node_modules/cassandra-driver/lib/tracker/index.d.ts @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ExecutionOptions, Host } from '../../'; + +export namespace tracker { + interface RequestTracker { + onError( + host: Host, + query: string | Array<{ query: string, params?: any }>, + parameters: any[] | { [key: string]: any } | null, + executionOptions: ExecutionOptions, + requestLength: number, + err: Error, + latency: number[]): void; + + onSuccess( + host: Host, + query: string | Array<{ query: string, params?: any }>, + parameters: any[] | { [key: string]: any } | null, + executionOptions: ExecutionOptions, + requestLength: number, + responseLength: number, + latency: number[]): void; + + shutdown(): void; + } + + class RequestLogger implements RequestTracker { + constructor(options: { + slowThreshold?: number; + logNormalRequests?: boolean; + logErroredRequests?: boolean; + messageMaxQueryLength?: number; + messageMaxParameterValueLength?: number; + messageMaxErrorStackTraceLength?: number; + }); + + onError(host: Host, query: string | Array<{ query: string; params?: any }>, parameters: any[] | { [p: string]: any } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]): void; + + onSuccess(host: Host, query: string | Array<{ query: string; params?: any }>, parameters: any[] | { [p: string]: any } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]): void; + + shutdown(): void; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/tracker/index.js b/node_modules/cassandra-driver/lib/tracker/index.js new file mode 100644 index 0000000..848d5bb --- /dev/null +++ b/node_modules/cassandra-driver/lib/tracker/index.js @@ -0,0 +1,25 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * Tracker module. + * @module tracker + */ + +exports.RequestLogger = require('./request-logger'); +exports.RequestTracker = require('./request-tracker'); \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/tracker/request-logger.js b/node_modules/cassandra-driver/lib/tracker/request-logger.js new file mode 100644 index 0000000..da3d208 --- /dev/null +++ b/node_modules/cassandra-driver/lib/tracker/request-logger.js @@ -0,0 +1,294 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const events = require('events'); +const RequestTracker = require('./request-tracker'); +const errors = require('../errors'); +const { format } = require('util'); + +const nanosToMillis = 1000000; +const defaultMessageMaxQueryLength = 500; +const defaultMaxParameterValueLength = 50; +const defaultMaxErrorStackTraceLength = 200; + +/** + * A request tracker that logs the requests executed through the session, according to a set of + * configurable options. + * @implements {module:tracker~RequestTracker} + * @alias module:tracker~RequestLogger + * @example Logging slow queries + * const requestLogger = new RequestLogger({ slowThreshold: 1000 }); + * requestLogger.emitter.on('show', message => console.log(message)); + * // Add the requestLogger to the client options + * const client = new Client({ contactPoints, requestTracker: requestLogger }); + */ +class RequestLogger extends RequestTracker { + + /** + * Creates a new instance of {@link RequestLogger}. + * @param {Object} options + * @param {Number} [options.slowThreshold] The threshold in milliseconds beyond which queries are considered 'slow' + * and logged as such by the driver. + * @param {Number} [options.requestSizeThreshold] The threshold in bytes beyond which requests are considered 'large' + * and logged as such by the driver. + * @param {Boolean} [options.logNormalRequests] Determines whether it should emit 'normal' events for every + * EXECUTE, QUERY and BATCH request executed successfully, useful only for debugging. This option can be modified + * after the client is connected using the property {@link RequestLogger#logNormalRequests}. + * @param {Boolean} [options.logErroredRequests] Determines whether it should emit 'failure' events for every + * EXECUTE, QUERY and BATCH request execution that resulted in an error. This option can be modified + * after the client is connected using the property {@link RequestLogger#logErroredRequests}. + * @param {Number} [options.messageMaxQueryLength] The maximum amount of characters that are logged from the query + * portion of the message. Defaults to 500. + * @param {Number} [options.messageMaxParameterValueLength] The maximum amount of characters of each query parameter + * value that will be included in the message. Defaults to 50. + * @param {Number} [options.messageMaxErrorStackTraceLength] The maximum amount of characters of the stack trace + * that will be included in the message. Defaults to 200. + */ + constructor(options) { + super(); + if (!options) { + throw new errors.ArgumentError('RequestLogger options parameter is required'); + } + + this._options = options; + + /** + * Determines whether it should emit 'normal' events for every EXECUTE, QUERY and BATCH request executed + * successfully, useful only for debugging + * @type {Boolean} + */ + this.logNormalRequests = this._options.logNormalRequests; + + /** + * Determines whether it should emit 'failure' events for every EXECUTE, QUERY and BATCH request execution that + * resulted in an error + * @type {Boolean} + */ + this.logErroredRequests = this._options.logErroredRequests; + + /** + * The object instance that emits 'slow', 'large', 'normal' and + * 'failure' events. + * @type {EventEmitter} + */ + this.emitter = new events.EventEmitter(); + } + + /** + * Logs message if request execution was deemed too slow, large or if normal requests are logged. + * @override + */ + onSuccess(host, query, parameters, execOptions, requestLength, responseLength, latency) { + if (this._options.slowThreshold > 0 && toMillis(latency) > this._options.slowThreshold) { + this._logSlow(host, query, parameters, execOptions, requestLength, responseLength, latency); + } + else if (this._options.requestSizeThreshold > 0 && requestLength > this._options.requestSizeThreshold) { + this._logLargeRequest(host, query, parameters, execOptions, requestLength, responseLength, latency); + } + else if (this.logNormalRequests) { + this._logNormalRequest(host, query, parameters, execOptions, requestLength, responseLength, latency); + } + } + + /** + * Logs message if request execution was too large and/or encountered an error. + * @override + */ + onError(host, query, parameters, execOptions, requestLength, err, latency) { + if (this._options.requestSizeThreshold > 0 && requestLength > this._options.requestSizeThreshold) { + this._logLargeErrorRequest(host, query, parameters, execOptions, requestLength, err, latency); + } + else if (this.logErroredRequests) { + this._logErrorRequest(host, query, parameters, execOptions, requestLength, err, latency); + } + } + + _logSlow(host, query, parameters, execOptions, requestLength, responseLength, latency) { + const message = format('[%s] Slow request, took %d ms (%s): %s', host.address, Math.floor(toMillis(latency)), + getPayloadSizes(requestLength, responseLength), getStatementInfo(query, parameters, execOptions, this._options)); + this.emitter.emit('slow', message); + } + + _logLargeRequest(host, query, parameters, execOptions, requestLength, responseLength, latency) { + const message = format('[%s] Request exceeded length, %s (took %d ms): %s', host.address, + getPayloadSizes(requestLength, responseLength), ~~toMillis(latency), + getStatementInfo(query, parameters, execOptions, this._options)); + this.emitter.emit('large', message); + } + + _logNormalRequest(host, query, parameters, execOptions, requestLength, responseLength, latency) { + const message = format('[%s] Request completed normally, took %d ms (%s): %s', host.address, ~~toMillis(latency), + getPayloadSizes(requestLength, responseLength), getStatementInfo(query, parameters, execOptions, this._options)); + this.emitter.emit('normal', message); + } + + _logLargeErrorRequest(host, query, parameters, execOptions, requestLength, err, latency) { + const maxStackTraceLength = this._options.messageMaxErrorStackTraceLength || defaultMaxErrorStackTraceLength; + const message = format('[%s] Request exceeded length and execution failed, %s (took %d ms): %s; error: %s', + host.address, getPayloadSizes(requestLength), ~~toMillis(latency), + getStatementInfo(query, parameters, execOptions, this._options), err.stack.substr(0, maxStackTraceLength)); + + // Use 'large' event and not 'failure' as this log is caused by exceeded length + this.emitter.emit('large', message); + } + + _logErrorRequest(host, query, parameters, execOptions, requestLength, err, latency) { + const maxStackTraceLength = this._options.messageMaxErrorStackTraceLength || defaultMaxErrorStackTraceLength; + const message = format('[%s] Request execution failed, took %d ms (%s): %s; error: %s', host.address, + ~~toMillis(latency), getPayloadSizes(requestLength), + getStatementInfo(query, parameters, execOptions, this._options), err.stack.substr(0, maxStackTraceLength)); + + // Avoid using 'error' as its a special event + this.emitter.emit('failure', message); + } +} + +function toMillis(latency) { + return latency[0] * 1000 + latency[1] / nanosToMillis; +} + +function getStatementInfo(query, parameters, execOptions, options) { + const maxQueryLength = options.messageMaxQueryLength || defaultMessageMaxQueryLength; + const maxParameterLength = options.messageMaxParameterValueLength || defaultMaxParameterValueLength; + + if (Array.isArray(query)) { + return getBatchStatementInfo(query, execOptions, maxQueryLength, maxParameterLength); + } + + // String concatenation is usually faster than Array#join() in V8 + let message = query.substr(0, maxQueryLength); + const remaining = maxQueryLength - message.length - 1; + message += getParametersInfo(parameters, remaining, maxParameterLength); + + if (!execOptions.isPrepared()) { + // This part of the message is not accounted for in "maxQueryLength" + message += ' (not prepared)'; + } + + return message; +} + +function getBatchStatementInfo(queries, execOptions, maxQueryLength, maxParameterLength) { + // This part of the message is not accounted for in "maxQueryLength" + let message = (execOptions.isBatchLogged() ? 'LOGGED ' : '') + 'BATCH w/ ' + queries.length + + (!execOptions.isPrepared() ? ' not prepared' : '') + ' queries ('; + let remaining = maxQueryLength; + let i; + + for (i = 0; i < queries.length && remaining > 0; i++) { + let q = queries[i]; + const params = q.params; + if (typeof q !== 'string') { + q = q.query; + } + + if (i > 0) { + message += ','; + remaining--; + } + + const queryLength = Math.min(remaining, q.length); + message += q.substr(0, queryLength); + remaining -= queryLength; + + if (remaining <= 0) { + break; + } + + const parameters = getParametersInfo(params, remaining, maxParameterLength); + remaining -= parameters.length; + message += parameters; + } + + message += i < queries.length ? ',...)' : ')'; + return message; +} + +function getParametersInfo(params, remaining, maxParameterLength) { + if (remaining <= 3) { + // We need at least 3 chars to describe the parameters + // its OK to add more chars in an effort to be descriptive + return ' [...]'; + } + + if (!params) { + return ' []'; + } + + let paramStringifier = (index, length) => formatParam(params[index], length); + if (!Array.isArray(params)) { + const obj = params; + params = Object.keys(params); + paramStringifier = (index, length) => { + const key = params[index]; + let result = key.substr(0, length); + const rem = length - result.length - 1; + if (rem <= 0) { + return result; + } + result += ":" + formatParam(obj[key], rem); + return result; + }; + } + + let message = ' ['; + let i; + for (i = 0; remaining > 0 && i < params.length; i++) { + if (i > 0) { + message += ','; + remaining--; + } + + const paramString = paramStringifier(i, Math.min(maxParameterLength, remaining)); + remaining -= paramString.length; + message += paramString; + } + + if (i < params.length) { + message += '...'; + } + + message += ']'; + return message; +} + +function formatParam(value, maxLength) { + if (value === undefined) { + return 'undefined'; + } + + if (value === null) { + return 'null'; + } + + return value.toString().substr(0, maxLength); +} + +function getPayloadSizes(requestLength, responseLength) { + let message = 'request size ' + formatSize(requestLength); + if (responseLength !== undefined) { + message += ' / response size ' + formatSize(responseLength); + } + return message; +} + +function formatSize(length) { + return length > 1000 ? Math.round(length / 1024) + ' KB' : length + ' bytes'; +} + +module.exports = RequestLogger; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/tracker/request-tracker.js b/node_modules/cassandra-driver/lib/tracker/request-tracker.js new file mode 100644 index 0000000..3a00732 --- /dev/null +++ b/node_modules/cassandra-driver/lib/tracker/request-tracker.js @@ -0,0 +1,74 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +/** + * Tracks request execution for a {@link Client}. + *

+ * A {@link RequestTracker} can be configured in the client options. The Client will execute + * {@link RequestTracker#onSuccess} or {@link RequestTracker#onError} for every query or batch + * executed (QUERY, EXECUTE and BATCH requests). + *

+ * @interface + * @alias module:tracker~RequestTracker + */ +class RequestTracker { + + /** + * Invoked each time a query or batch request succeeds. + * @param {Host} host The node that acted as coordinator of the request. + * @param {String|Array} query In the case of prepared or unprepared query executions, the provided + * query string. For batch requests, an Array containing the queries and parameters provided. + * @param {Array|Object|null} parameters In the case of prepared or unprepared query executions, the provided + * parameters. + * @param {ExecutionOptions} executionOptions The information related to the execution of the request. + * @param {Number} requestLength Length of the body of the request. + * @param {Number} responseLength Length of the body of the response. + * @param {Array} latency An array containing [seconds, nanoseconds] tuple, where nanoseconds is the + * remaining part of the real time that can't be represented in second precision (see process.hrtime()). + */ + onSuccess(host, query, parameters, executionOptions, requestLength, responseLength, latency) { + + } + + /** + * Invoked each time a query or batch request fails. + * @param {Host} host The node that acted as coordinator of the request. + * @param {String|Array} query In the case of prepared or unprepared query executions, the provided + * query string. For batch requests, an Array containing the queries and parameters provided. + * @param {Array|Object|null} parameters In the case of prepared or unprepared query executions, the provided + * parameters. + * @param {ExecutionOptions} executionOptions The information related to the execution of the request. + * @param {Number} requestLength Length of the body of the request. When the failure occurred before the request was + * written to the wire, the length will be 0. + * @param {Error} err The error that caused that caused the request to fail. + * @param {Array} latency An array containing [seconds, nanoseconds] tuple, where nanoseconds is the + * remaining part of the real time that can't be represented in second precision (see process.hrtime()). + */ + onError(host, query, parameters, executionOptions, requestLength, err, latency) { + + } + + /** + * Invoked when the Client is being shutdown. + */ + shutdown() { + + } +} + +module.exports = RequestTracker; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/big-decimal.js b/node_modules/cassandra-driver/lib/types/big-decimal.js new file mode 100644 index 0000000..39ec9f7 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/big-decimal.js @@ -0,0 +1,271 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const Integer = require('./integer'); +const utils = require('../utils'); + +/** @module types */ +/** + * Constructs an immutable arbitrary-precision signed decimal number. + * A BigDecimal consists of an [arbitrary precision integer]{@link module:types~Integer} + * unscaled value and a 32-bit integer scale. If zero + * or positive, the scale is the number of digits to the right of the + * decimal point. If negative, the unscaled value of the number is + * multiplied by ten to the power of the negation of the scale. The + * value of the number represented by the BigDecimal is + * therefore (unscaledValue × 10-scale). + * @class + * @classdesc The BigDecimal class provides operations for + * arithmetic, scale manipulation, rounding, comparison and + * format conversion. The {@link #toString} method provides a + * canonical representation of a BigDecimal. + * @param {Integer|Number} unscaledValue The integer part of the decimal. + * @param {Number} scale The scale of the decimal. + * @constructor + */ +function BigDecimal(unscaledValue, scale) { + if (typeof unscaledValue === 'number') { + unscaledValue = Integer.fromNumber(unscaledValue); + } + /** + * @type {Integer} + * @private + */ + this._intVal = unscaledValue; + /** + * @type {Number} + * @private + */ + this._scale = scale; +} + +/** + * Returns the BigDecimal representation of a buffer composed of the scale (int32BE) and the unsigned value (varint BE) + * @param {Buffer} buf + * @returns {BigDecimal} + */ +BigDecimal.fromBuffer = function (buf) { + const scale = buf.readInt32BE(0); + const unscaledValue = Integer.fromBuffer(buf.slice(4)); + return new BigDecimal(unscaledValue, scale); +}; + +/** + * Returns a buffer representation composed of the scale as a BE int 32 and the unsigned value as a BE varint + * @param {BigDecimal} value + * @returns {Buffer} + */ +BigDecimal.toBuffer = function (value) { + const unscaledValueBuffer = Integer.toBuffer(value._intVal); + const scaleBuffer = utils.allocBufferUnsafe(4); + scaleBuffer.writeInt32BE(value._scale, 0); + return Buffer.concat([scaleBuffer, unscaledValueBuffer], scaleBuffer.length + unscaledValueBuffer.length); +}; + +/** + * Returns a BigDecimal representation of the string + * @param {String} value + * @returns {BigDecimal} + */ +BigDecimal.fromString = function (value) { + if (!value) { + throw new TypeError('Invalid null or undefined value'); + } + value = value.trim(); + const scaleIndex = value.indexOf('.'); + let scale = 0; + if (scaleIndex >= 0) { + scale = value.length - 1 - scaleIndex; + value = value.substr(0, scaleIndex) + value.substr(scaleIndex + 1); + } + return new BigDecimal(Integer.fromString(value), scale); +}; + +/** + * Returns a BigDecimal representation of the Number + * @param {Number} value + * @returns {BigDecimal} + */ +BigDecimal.fromNumber = function (value) { + if (isNaN(value)) { + return new BigDecimal(Integer.ZERO, 0); + } + let textValue = value.toString(); + if (textValue.indexOf('e') >= 0) { + //get until scale 20 + textValue = value.toFixed(20); + } + return BigDecimal.fromString(textValue); +}; + +/** + * Returns true if the value of the BigDecimal instance and other are the same + * @param {BigDecimal} other + * @returns {Boolean} + */ +BigDecimal.prototype.equals = function (other) { + return ((other instanceof BigDecimal) && this.compare(other) === 0); +}; + +BigDecimal.prototype.inspect = function () { + return this.constructor.name + ': ' + this.toString(); +}; + +/** + * @param {BigDecimal} other + * @returns {boolean} + */ +BigDecimal.prototype.notEquals = function (other) { + return !this.equals(other); +}; + +/** + * Compares this BigDecimal with the given one. + * @param {BigDecimal} other Integer to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ +BigDecimal.prototype.compare = function (other) { + const diff = this.subtract(other); + if (diff.isNegative()) { + return -1; + } + if (diff.isZero()) { + return 0; + } + return +1; +}; + +/** + * Returns the difference of this and the given BigDecimal. + * @param {BigDecimal} other The BigDecimal to subtract from this. + * @return {!BigDecimal} The BigDecimal result. + */ +BigDecimal.prototype.subtract = function (other) { + const first = this; + if (first._scale === other._scale) { + return new BigDecimal(first._intVal.subtract(other._intVal), first._scale); + } + let diffScale; + let unscaledValue; + if (first._scale < other._scale) { + //The scale of this is lower + diffScale = other._scale - first._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .multiply(Integer.fromNumber(Math.pow(10, diffScale))) + .subtract(other._intVal); + return new BigDecimal(unscaledValue, other._scale); + } + //The scale of this is higher + diffScale = first._scale - other._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .subtract( + other._intVal.multiply(Integer.fromNumber(Math.pow(10, diffScale)))); + return new BigDecimal(unscaledValue, first._scale); +}; + +/** + * Returns the sum of this and the given BigDecimal. + * @param {BigDecimal} other The BigDecimal to sum to this. + * @return {!BigDecimal} The BigDecimal result. + */ +BigDecimal.prototype.add = function (other) { + const first = this; + if (first._scale === other._scale) { + return new BigDecimal(first._intVal.add(other._intVal), first._scale); + } + let diffScale; + let unscaledValue; + if (first._scale < other._scale) { + //The scale of this is lower + diffScale = other._scale - first._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .multiply(Integer.fromNumber(Math.pow(10, diffScale))) + .add(other._intVal); + return new BigDecimal(unscaledValue, other._scale); + } + //The scale of this is higher + diffScale = first._scale - other._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .add( + other._intVal.multiply(Integer.fromNumber(Math.pow(10, diffScale)))); + return new BigDecimal(unscaledValue, first._scale); +}; + +/** + * Returns true if the current instance is greater than the other + * @param {BigDecimal} other + * @returns {boolean} + */ +BigDecimal.prototype.greaterThan = function (other) { + return this.compare(other) === 1; +}; + +/** @return {boolean} Whether this value is negative. */ +BigDecimal.prototype.isNegative = function () { + return this._intVal.isNegative(); +}; + +/** @return {boolean} Whether this value is zero. */ +BigDecimal.prototype.isZero = function () { + return this._intVal.isZero(); +}; + +/** + * Returns the string representation of this BigDecimal + * @returns {string} + */ +BigDecimal.prototype.toString = function () { + let intString = this._intVal.toString(); + if (this._scale === 0) { + return intString; + } + let signSymbol = ''; + if (intString.charAt(0) === '-') { + signSymbol = '-'; + intString = intString.substr(1); + } + let separatorIndex = intString.length - this._scale; + if (separatorIndex <= 0) { + //add zeros at the beginning, plus an additional zero + intString = utils.stringRepeat('0', (-separatorIndex) + 1) + intString; + separatorIndex = intString.length - this._scale; + } + return signSymbol + intString.substr(0, separatorIndex) + '.' + intString.substr(separatorIndex); +}; + +/** + * Returns a Number representation of this BigDecimal. + * @returns {Number} + */ +BigDecimal.prototype.toNumber = function () { + return parseFloat(this.toString()); +}; + +/** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ +BigDecimal.prototype.toJSON = function () { + return this.toString(); +}; + + +module.exports = BigDecimal; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/duration.js b/node_modules/cassandra-driver/lib/types/duration.js new file mode 100644 index 0000000..aa1b419 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/duration.js @@ -0,0 +1,714 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const Long = require('long'); +const util = require('util'); +const utils = require('../utils'); + +/** @module types */ + +// Reuse the same buffers that should perform slightly better than built-in buffer pool +const reusableBuffers = { + months: utils.allocBuffer(9), + days: utils.allocBuffer(9), + nanoseconds: utils.allocBuffer(9) +}; + +const maxInt32 = 0x7FFFFFFF; +const longOneThousand = Long.fromInt(1000); +const nanosPerMicro = longOneThousand; +const nanosPerMilli = longOneThousand.multiply(nanosPerMicro); +const nanosPerSecond = longOneThousand.multiply(nanosPerMilli); +const nanosPerMinute = Long.fromInt(60).multiply(nanosPerSecond); +const nanosPerHour = Long.fromInt(60).multiply(nanosPerMinute); +const daysPerWeek = 7; +const monthsPerYear = 12; +const standardRegex = /(\d+)(y|mo|w|d|h|s|ms|us|µs|ns|m)/gi; +const iso8601Regex = /P((\d+)Y)?((\d+)M)?((\d+)D)?(T((\d+)H)?((\d+)M)?((\d+)S)?)?/; +const iso8601WeekRegex = /P(\d+)W/; +const iso8601AlternateRegex = /P(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})/; + +/** + * Creates a new instance of {@link Duration}. + * @classdesc + * Represents a duration. A duration stores separately months, days, and seconds due to the fact that the number of + * days in a month varies, and a day can have 23 or 25 hours if a daylight saving is involved. + * @param {Number} months The number of months. + * @param {Number} days The number of days. + * @param {Number|Long} nanoseconds The number of nanoseconds. + * @constructor + */ +function Duration(months, days, nanoseconds) { + /** + * Gets the number of months. + * @type {Number} + */ + this.months = months; + /** + * Gets the number of days. + * @type {Number} + */ + this.days = days; + /** + * Gets the number of nanoseconds represented as a int64. + * @type {Long} + */ + this.nanoseconds = typeof nanoseconds === 'number' ? Long.fromNumber(nanoseconds) : nanoseconds; +} + +Duration.prototype.equals = function (other) { + if (!(other instanceof Duration)) { + return false; + } + return this.months === other.months && + this.days === other.days && + this.nanoseconds.equals(other.nanoseconds); +}; + +/** + * Serializes the duration and returns the representation of the value in bytes. + * @returns {Buffer} + */ +Duration.prototype.toBuffer = function () { + const lengthMonths = VIntCoding.writeVInt(Long.fromNumber(this.months), reusableBuffers.months); + const lengthDays = VIntCoding.writeVInt(Long.fromNumber(this.days), reusableBuffers.days); + const lengthNanoseconds = VIntCoding.writeVInt(this.nanoseconds, reusableBuffers.nanoseconds); + const buffer = utils.allocBufferUnsafe(lengthMonths + lengthDays + lengthNanoseconds); + reusableBuffers.months.copy(buffer, 0, 0, lengthMonths); + let offset = lengthMonths; + reusableBuffers.days.copy(buffer, offset, 0, lengthDays); + offset += lengthDays; + reusableBuffers.nanoseconds.copy(buffer, offset, 0, lengthNanoseconds); + return buffer; +}; + +/** + * Returns the string representation of the value. + * @return {string} + */ +Duration.prototype.toString = function () { + let value = ''; + function append(dividend, divisor, unit) { + if (dividend === 0 || dividend < divisor) { + return dividend; + } + // string concatenation is supposed to be fasted than join() + value += (dividend / divisor).toFixed(0) + unit; + return dividend % divisor; + } + function append64(dividend, divisor, unit) { + if (dividend.equals(Long.ZERO) || dividend.lessThan(divisor)) { + return dividend; + } + // string concatenation is supposed to be fasted than join() + value += dividend.divide(divisor).toString() + unit; + return dividend.modulo(divisor); + } + if (this.months < 0 || this.days < 0 || this.nanoseconds.isNegative()) { + value = '-'; + } + let remainder = append(Math.abs(this.months), monthsPerYear, "y"); + append(remainder, 1, "mo"); + append(Math.abs(this.days), 1, "d"); + + if (!this.nanoseconds.equals(Long.ZERO)) { + const nanos = this.nanoseconds.isNegative() ? this.nanoseconds.negate() : this.nanoseconds; + remainder = append64(nanos, nanosPerHour, "h"); + remainder = append64(remainder, nanosPerMinute, "m"); + remainder = append64(remainder, nanosPerSecond, "s"); + remainder = append64(remainder, nanosPerMilli, "ms"); + remainder = append64(remainder, nanosPerMicro, "us"); + append64(remainder, Long.ONE, "ns"); + } + return value; +}; + +/** + * Creates a new {@link Duration} instance from the binary representation of the value. + * @param {Buffer} buffer + * @returns {Duration} + */ +Duration.fromBuffer = function (buffer) { + const offset = { value: 0 }; + const months = VIntCoding.readVInt(buffer, offset).toNumber(); + const days = VIntCoding.readVInt(buffer, offset).toNumber(); + const nanoseconds = VIntCoding.readVInt(buffer, offset); + return new Duration(months, days, nanoseconds); +}; + +/** + * Creates a new {@link Duration} instance from the string representation of the value. + *

+ * Accepted formats: + *

+ *
    + *
  • multiple digits followed by a time unit like: 12h30m where the time unit can be: + *
      + *
    • {@code y}: years
    • + *
    • {@code m}: months
    • + *
    • {@code w}: weeks
    • + *
    • {@code d}: days
    • + *
    • {@code h}: hours
    • + *
    • {@code m}: minutes
    • + *
    • {@code s}: seconds
    • + *
    • {@code ms}: milliseconds
    • + *
    • {@code us} or {@code µs}: microseconds
    • + *
    • {@code ns}: nanoseconds
    • + *
    + *
  • + *
  • ISO 8601 format: P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W
  • + *
  • ISO 8601 alternative format: P[YYYY]-[MM]-[DD]T[hh]:[mm]:[ss]
  • + *
+ * @param {String} input + * @returns {Duration} + */ +Duration.fromString = function (input) { + const isNegative = input.charAt(0) === '-'; + const source = isNegative ? input.substr(1) : input; + if (source.charAt(0) === 'P') { + if (source.charAt(source.length - 1) === 'W') { + return parseIso8601WeekFormat(isNegative, source); + } + if (source.indexOf('-') > 0) { + return parseIso8601AlternativeFormat(isNegative, source); + } + return parseIso8601Format(isNegative, source); + } + return parseStandardFormat(isNegative, source); +}; + +/** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ +function parseStandardFormat(isNegative, source) { + const builder = new Builder(isNegative); + standardRegex.lastIndex = 0; + let matches; + while ((matches = standardRegex.exec(source)) && matches.length <= 3) { + builder.add(matches[1], matches[2]); + } + return builder.build(); +} + +/** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ +function parseIso8601Format(isNegative, source) { + const matches = iso8601Regex.exec(source); + if (!matches || matches[0] !== source) { + throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); + } + const builder = new Builder(isNegative); + if (matches[1]) { + builder.addYears(matches[2]); + } + if (matches[3]) { + builder.addMonths(matches[4]); + } + if (matches[5]) { + builder.addDays(matches[6]); + } + if (matches[7]) { + if (matches[8]) { + builder.addHours(matches[9]); + } + if (matches[10]) { + builder.addMinutes(matches[11]); + } + if (matches[12]) { + builder.addSeconds(matches[13]); + } + } + return builder.build(); +} + +/** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ +function parseIso8601WeekFormat(isNegative, source) { + const matches = iso8601WeekRegex.exec(source); + if (!matches || matches[0] !== source) { + throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); + } + return new Builder(isNegative) + .addWeeks(matches[1]) + .build(); +} + +/** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ +function parseIso8601AlternativeFormat(isNegative, source) { + const matches = iso8601AlternateRegex.exec(source); + if (!matches || matches[0] !== source) { + throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); + } + return new Builder(isNegative).addYears(matches[1]) + .addMonths(matches[2]) + .addDays(matches[3]) + .addHours(matches[4]) + .addMinutes(matches[5]) + .addSeconds(matches[6]) + .build(); +} + +/** + * @param {Boolean} isNegative + * @private + * @constructor + */ +function Builder(isNegative) { + this._isNegative = isNegative; + this._unitIndex = 0; + this._months = 0; + this._days = 0; + this._nanoseconds = Long.ZERO; + this._addMethods = { + 'y': this.addYears, + 'mo': this.addMonths, + 'w': this.addWeeks, + 'd': this.addDays, + 'h': this.addHours, + 'm': this.addMinutes, + 's': this.addSeconds, + 'ms': this.addMillis, + // µs + '\u00B5s': this.addMicros, + 'us': this.addMicros, + 'ns': this.addNanos + }; + this._unitByIndex = [ + null, 'years', 'months', 'weeks', 'days', 'hours', 'minutes', 'seconds', 'milliseconds', 'microseconds', + 'nanoseconds' + ]; +} + +Builder.prototype._validateOrder = function (unitIndex) { + if (unitIndex === this._unitIndex) { + throw new TypeError(util.format("Invalid duration. The %s are specified multiple times", this._getUnitName(unitIndex))); + } + + if (unitIndex <= this._unitIndex) { + throw new TypeError(util.format("Invalid duration. The %s should be after %s", + this._getUnitName(this._unitIndex), + this._getUnitName(unitIndex))); + } + this._unitIndex = unitIndex; +}; + +/** + * @param {Number} units + * @param {Number} monthsPerUnit + */ +Builder.prototype._validateMonths = function(units, monthsPerUnit) { + this._validate32(units, (maxInt32 - this._months) / monthsPerUnit, "months"); +}; + +/** + * @param {Number} units + * @param {Number} daysPerUnit + */ +Builder.prototype._validateDays = function(units, daysPerUnit) { + this._validate32(units, (maxInt32 - this._days) / daysPerUnit, "days"); +}; + +/** + * @param {Long} units + * @param {Long} nanosPerUnit + */ +Builder.prototype._validateNanos = function(units, nanosPerUnit) { + this._validate64(units, Long.MAX_VALUE.subtract(this._nanoseconds).divide(nanosPerUnit), "nanoseconds"); +}; + +/** + * @param {Number} units + * @param {Number} limit + * @param {String} unitName + */ +Builder.prototype._validate32 = function(units, limit, unitName) { + if (units > limit) { + throw new TypeError(util.format('Invalid duration. The total number of %s must be less or equal to %s', + unitName, + maxInt32)); + } +}; + +/** + * @param {Long} units + * @param {Long} limit + * @param {String} unitName + */ +Builder.prototype._validate64 = function(units, limit, unitName) { + if (units.greaterThan(limit)) { + throw new TypeError(util.format('Invalid duration. The total number of %s must be less or equal to %s', + unitName, + Long.MAX_VALUE.toString())); + } +}; + +Builder.prototype._getUnitName = function(unitIndex) { + const name = this._unitByIndex[+unitIndex]; + if (!name) { + throw new Error('unknown unit index: ' + unitIndex); + } + return name; +}; + +Builder.prototype.add = function (textValue, symbol) { + const addMethod = this._addMethods[symbol.toLowerCase()]; + if (!addMethod) { + throw new TypeError(util.format("Unknown duration symbol '%s'", symbol)); + } + return addMethod.call(this, textValue); +}; + +/** + * @param {String|Number} years + * @return {Builder} + */ +Builder.prototype.addYears = function (years) { + const value = +years; + this._validateOrder(1); + this._validateMonths(value, monthsPerYear); + this._months += value * monthsPerYear; + return this; +}; + +/** + * @param {String|Number} months + * @return {Builder} + */ +Builder.prototype.addMonths = function(months) { + const value = +months; + this._validateOrder(2); + this._validateMonths(value, 1); + this._months += value; + return this; +}; + +/** + * @param {String|Number} weeks + * @return {Builder} + */ +Builder.prototype.addWeeks = function(weeks) { + const value = +weeks; + this._validateOrder(3); + this._validateDays(value, daysPerWeek); + this._days += value * daysPerWeek; + return this; +}; + +/** + * @param {String|Number} days + * @return {Builder} + */ +Builder.prototype.addDays = function(days) { + const value = +days; + this._validateOrder(4); + this._validateDays(value, 1); + this._days += value; + return this; +}; + +/** + * @param {String|Long} hours + * @return {Builder} + */ +Builder.prototype.addHours = function(hours) { + const value = typeof hours === 'string' ? Long.fromString(hours) : hours; + this._validateOrder(5); + this._validateNanos(value, nanosPerHour); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerHour)); + return this; +}; + +/** + * @param {String|Long} minutes + * @return {Builder} + */ +Builder.prototype.addMinutes = function(minutes) { + const value = typeof minutes === 'string' ? Long.fromString(minutes) : minutes; + this._validateOrder(6); + this._validateNanos(value, nanosPerMinute); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMinute)); + return this; +}; + +/** + * @param {String|Long} seconds + * @return {Builder} + */ +Builder.prototype.addSeconds = function(seconds) { + const value = typeof seconds === 'string' ? Long.fromString(seconds) : seconds; + this._validateOrder(7); + this._validateNanos(value, nanosPerSecond); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerSecond)); + return this; +}; + +/** + * @param {String|Long} millis + * @return {Builder} + */ +Builder.prototype.addMillis = function(millis) { + const value = typeof millis === 'string' ? Long.fromString(millis) : millis; + this._validateOrder(8); + this._validateNanos(value, nanosPerMilli); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMilli)); + return this; +}; + +/** + * @param {String|Long} micros + * @return {Builder} + */ +Builder.prototype.addMicros = function(micros) { + const value = typeof micros === 'string' ? Long.fromString(micros) : micros; + this._validateOrder(9); + this._validateNanos(value, nanosPerMicro); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMicro)); + return this; +}; + +/** + * @param {String|Long} nanos + * @return {Builder} + */ +Builder.prototype.addNanos = function(nanos) { + const value = typeof nanos === 'string' ? Long.fromString(nanos) : nanos; + this._validateOrder(10); + this._validateNanos(value, Long.ONE); + this._nanoseconds = this._nanoseconds.add(value); + return this; +}; + +/** @return {Duration} */ +Builder.prototype.build = function () { + return (this._isNegative ? + new Duration(-this._months, -this._days, this._nanoseconds.negate()) : + new Duration(this._months, this._days, this._nanoseconds)); +}; + +/** + * Contains the methods for reading and writing vints into binary format. + * Exposes only 2 internal methods, the rest are hidden. + * @private + */ +const VIntCoding = (function () { + /** @param {Long} n */ + function encodeZigZag64(n) { + // (n << 1) ^ (n >> 63); + return n.toUnsigned().shiftLeft(1).xor(n.shiftRight(63)); + } + + /** @param {Long} n */ + function decodeZigZag64(n) { + // (n >>> 1) ^ -(n & 1); + return n.shiftRightUnsigned(1).xor(n.and(Long.ONE).negate()); + } + + /** + * @param {Long} value + * @param {Buffer} buffer + * @returns {Number} + */ + function writeVInt(value, buffer) { + return writeUnsignedVInt(encodeZigZag64(value), buffer); + } + + /** + * @param {Long} value + * @param {Buffer} buffer + * @returns {number} + */ + function writeUnsignedVInt(value, buffer) { + const size = computeUnsignedVIntSize(value); + if (size === 1) { + buffer[0] = value.getLowBits(); + return 1; + } + encodeVInt(value, size, buffer); + return size; + } + + /** + * @param {Long} value + * @returns {number} + */ + function computeUnsignedVIntSize(value) { + const magnitude = numberOfLeadingZeros(value.or(Long.ONE)); + return (639 - magnitude * 9) >> 6; + } + + /** + * @param {Long} value + * @param {Number} size + * @param {Buffer} buffer + */ + function encodeVInt(value, size, buffer) { + const extraBytes = size - 1; + let intValue = value.getLowBits(); + let i; + let intBytes = 4; + for (i = extraBytes; i >= 0 && (intBytes--) > 0; i--) { + buffer[i] = 0xFF & intValue; + intValue >>= 8; + } + intValue = value.getHighBits(); + for (; i >= 0; i--) { + buffer[i] = 0xFF & intValue; + intValue >>= 8; + } + buffer[0] |= encodeExtraBytesToRead(extraBytes); + } + /** + * Returns the number of zero bits preceding the highest-order one-bit in the binary representation of the value. + * @param {Long} value + * @returns {Number} + */ + function numberOfLeadingZeros(value) { + if (value.equals(Long.ZERO)) { + return 64; + } + let n = 1; + let x = value.getHighBits(); + if (x === 0) { + n += 32; + x = value.getLowBits(); + } + if (x >>> 16 === 0) { + n += 16; + x <<= 16; + } + if (x >>> 24 === 0) { + n += 8; + x <<= 8; + } + if (x >>> 28 === 0) { + n += 4; + x <<= 4; + } + if (x >>> 30 === 0) { + n += 2; + x <<= 2; + } + n -= x >>> 31; + return n; + } + + + function encodeExtraBytesToRead(extraBytesToRead) { + return ~(0xff >> extraBytesToRead); + } + + /** + * @param {Buffer} buffer + * @param {{value: number}} offset + * @returns {Long} + */ + function readVInt(buffer, offset) { + return decodeZigZag64(readUnsignedVInt(buffer, offset)); + } + + /** + * @param {Buffer} input + * @param {{ value: number}} offset + * @returns {Long} + */ + function readUnsignedVInt(input, offset) { + const firstByte = input[offset.value++]; + if ((firstByte & 0x80) === 0) { + return Long.fromInt(firstByte); + } + const sByteInt = fromSignedByteToInt(firstByte); + const size = numberOfExtraBytesToRead(sByteInt); + let result = Long.fromInt(sByteInt & firstByteValueMask(size)); + for (let ii = 0; ii < size; ii++) { + const b = Long.fromInt(input[offset.value++]); + // (result << 8) | b + result = result.shiftLeft(8).or(b); + } + return result; + } + + function fromSignedByteToInt(value) { + if (value > 0x7f) { + return value - 0x0100; + } + return value; + } + + function numberOfLeadingZerosInt32(i) { + if (i === 0) { + return 32; + } + let n = 1; + if (i >>> 16 === 0) { + n += 16; + i <<= 16; + } + if (i >>> 24 === 0) { + n += 8; + i <<= 8; + } + if (i >>> 28 === 0) { + n += 4; + i <<= 4; + } + if (i >>> 30 === 0) { + n += 2; + i <<= 2; + } + n -= i >>> 31; + return n; + } + + /** + * @param {Number} firstByte + * @returns {Number} + */ + function numberOfExtraBytesToRead(firstByte) { + // Instead of counting 1s of the byte, we negate and count 0 of the byte + return numberOfLeadingZerosInt32(~firstByte) - 24; + } + + /** + * @param {Number} extraBytesToRead + * @returns {Number} + */ + function firstByteValueMask(extraBytesToRead) { + return 0xff >> extraBytesToRead; + } + + return { + readVInt: readVInt, + writeVInt: writeVInt + }; +})(); + +module.exports = Duration; diff --git a/node_modules/cassandra-driver/lib/types/index.d.ts b/node_modules/cassandra-driver/lib/types/index.d.ts new file mode 100644 index 0000000..b03186c --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/index.d.ts @@ -0,0 +1,427 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import _Long = require('long'); +import * as stream from 'stream'; +import { ValueCallback } from '../../'; + +export namespace types { + class Long extends _Long { + + } + + enum consistencies { + any = 0x00, + one = 0x01, + two = 0x02, + three = 0x03, + quorum = 0x04, + all = 0x05, + localQuorum = 0x06, + eachQuorum = 0x07, + serial = 0x08, + localSerial = 0x09, + localOne = 0x0a + } + + enum dataTypes { + custom = 0x0000, + ascii = 0x0001, + bigint = 0x0002, + blob = 0x0003, + boolean = 0x0004, + counter = 0x0005, + decimal = 0x0006, + double = 0x0007, + float = 0x0008, + int = 0x0009, + text = 0x000a, + timestamp = 0x000b, + uuid = 0x000c, + varchar = 0x000d, + varint = 0x000e, + timeuuid = 0x000f, + inet = 0x0010, + date = 0x0011, + time = 0x0012, + smallint = 0x0013, + tinyint = 0x0014, + duration = 0x0015, + list = 0x0020, + map = 0x0021, + set = 0x0022, + udt = 0x0030, + tuple = 0x0031, + } + + enum distance { + local = 0, + remote, + ignored + } + + enum responseErrorCodes { + serverError = 0x0000, + protocolError = 0x000A, + badCredentials = 0x0100, + unavailableException = 0x1000, + overloaded = 0x1001, + isBootstrapping = 0x1002, + truncateError = 0x1003, + writeTimeout = 0x1100, + readTimeout = 0x1200, + readFailure = 0x1300, + functionFailure = 0x1400, + writeFailure = 0x1500, + syntaxError = 0x2000, + unauthorized = 0x2100, + invalid = 0x2200, + configError = 0x2300, + alreadyExists = 0x2400, + unprepared = 0x2500, + clientWriteFailure = 0x8000 + } + + enum protocolVersion { + v1 = 0x01, + v2 = 0x02, + v3 = 0x03, + v4 = 0x04, + v5 = 0x05, + v6 = 0x06, + dseV1 = 0x41, + dseV2 = 0x42, + maxSupported = dseV2, + minSupported = v1 + } + + namespace protocolVersion { + function isSupported(version: protocolVersion): boolean; + } + + const unset: object; + + class BigDecimal { + constructor(unscaledValue: number, scale: number); + + static fromBuffer(buf: Buffer): BigDecimal; + + static fromString(value: string): BigDecimal; + + static toBuffer(value: BigDecimal): Buffer; + + static fromNumber(value: number): BigDecimal; + + add(other: BigDecimal): BigDecimal; + + compare(other: BigDecimal): number; + + equals(other: BigDecimal): boolean; + + greaterThan(other: BigDecimal): boolean; + + isNegative(): boolean; + + isZero(): boolean; + + notEquals(other: BigDecimal): boolean; + + subtract(other: BigDecimal): BigDecimal; + + toNumber(): number; + + toString(): string; + + toJSON(): string; + } + + class Duration { + constructor(month: number, days: number, nanoseconds: number | Long); + + static fromBuffer(buffer: Buffer): Duration; + + static fromString(input: string): Duration; + + equals(other: Duration): boolean; + + toBuffer(): Buffer; + + toString(): string; + } + + class InetAddress { + length: number; + + version: number; + + constructor(buffer: Buffer); + + static fromString(value: string): InetAddress; + + equals(other: InetAddress): boolean; + + getBuffer(): Buffer; + + toString(): string; + + toJSON(): string; + } + + class Integer { + static ONE: Integer; + static ZERO: Integer; + + constructor(bits: Array, sign: number); + + static fromBits(bits: Array): Integer; + + static fromBuffer(bits: Buffer): Integer; + + static fromInt(value: number): Integer; + + static fromNumber(value: number): Integer; + + static fromString(str: string, opt_radix?: number): Integer; + + static toBuffer(value: Integer): Buffer; + + abs(): Integer; + + add(other: Integer): Integer; + + compare(other: Integer): number; + + divide(other: Integer): Integer; + + equals(other: Integer): boolean; + + getBits(index: number): number; + + getBitsUnsigned(index: number): number; + + getSign(): number; + + greaterThan(other: Integer): boolean; + + greaterThanOrEqual(other: Integer): boolean; + + isNegative(): boolean; + + isOdd(): boolean; + + isZero(): boolean; + + lessThan(other: Integer): boolean; + + lessThanOrEqual(other: Integer): boolean; + + modulo(other: Integer): Integer; + + multiply(other: Integer): Integer; + + negate(): Integer; + + not(): Integer; + + notEquals(other: Integer): boolean; + + or(other: Integer): Integer; + + shiftLeft(numBits: number): Integer; + + shiftRight(numBits: number): Integer; + + shorten(numBits: number): Integer; + + subtract(other: Integer): Integer; + + toInt(): number; + + toJSON(): string; + + toNumber(): number; + + toString(opt_radix?: number): string; + + xor(other: Integer): Integer; + } + + class LocalDate { + year: number; + month: number; + day: number; + + constructor(year: number, month: number, day: number); + + static fromDate(date: Date): LocalDate; + + static fromString(value: string): LocalDate; + + static fromBuffer(buffer: Buffer): LocalDate; + + static now(): LocalDate; + + static utcNow(): LocalDate; + + equals(other: LocalDate): boolean; + + inspect(): string; + + toBuffer(): Buffer; + + toJSON(): string; + + toString(): string; + } + + class LocalTime { + hour: number; + minute: number; + nanosecond: number; + second: number; + + constructor(totalNanoseconds: Long); + + static fromBuffer(value: Buffer): LocalTime; + + static fromDate(date: Date, nanoseconds: number): LocalTime; + + static fromMilliseconds(milliseconds: number, nanoseconds?: number): LocalTime; + + static fromString(value: string): LocalTime; + + static now(nanoseconds?: number): LocalTime; + + compare(other: LocalTime): boolean; + + equals(other: LocalTime): boolean; + + getTotalNanoseconds(): Long; + + inspect(): string; + + toBuffer(): Buffer; + + toJSON(): string; + + toString(): string; + } + + interface ResultSet extends Iterable, AsyncIterable { + info: { + queriedHost: string, + triedHosts: { [key: string]: any; }, + speculativeExecutions: number, + achievedConsistency: consistencies, + traceId: Uuid, + warnings: string[], + customPayload: any + }; + + columns: Array<{ name: string, type: { code: dataTypes, info: any } }>; + nextPage: (() => void) | null; + pageState: string; + rowLength: number; + rows: Row[]; + + first(): Row; + + wasApplied(): boolean; + } + + interface ResultStream extends stream.Readable { + buffer: Buffer; + paused: boolean; + + add(chunk: Buffer): void; + } + + interface Row { + get(columnName: string | number): any; + + keys(): string[]; + + forEach(callback: (row: Row) => void): void; + + values(): any[]; + + [key: string]: any; + } + + class TimeUuid extends Uuid { + static now(): TimeUuid; + + static now(nodeId: string | Buffer, clockId?: string | Buffer): TimeUuid; + + static now(nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; + + static now(callback: ValueCallback): void; + + static fromDate(date: Date, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer): TimeUuid; + + static fromDate( + date: Date, + ticks: number, + nodeId: string | Buffer, + clockId: string | Buffer, + callback: ValueCallback): void; + + static fromString(value: string): TimeUuid; + + static max(date: Date, ticks: number): TimeUuid; + + static min(date: Date, ticks: number): TimeUuid; + + getDatePrecision(): { date: Date, ticks: number }; + + getDate(): Date; + } + + class Tuple { + elements: any[]; + length: number; + + constructor(...args: any[]); + + static fromArray(elements: any[]): Tuple; + + get(index: number): any; + + toString(): string; + + toJSON(): string; + + values(): any[]; + } + + class Uuid { + constructor(buffer: Buffer); + + static fromString(value: string): Uuid; + + static random(callback: ValueCallback): void; + + static random(): Uuid; + + equals(other: Uuid): boolean; + + getBuffer(): Buffer; + + toString(): string; + + toJSON(): string; + } +} \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/index.js b/node_modules/cassandra-driver/lib/types/index.js new file mode 100644 index 0000000..dae3089 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/index.js @@ -0,0 +1,630 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +const errors = require('../errors'); +const TimeUuid = require('./time-uuid'); +const Uuid = require('./uuid'); +const protocolVersion = require('./protocol-version'); +const utils = require('../utils'); + +/** @module types */ +/** + * Long constructor, wrapper of the internal library used: {@link https://github.com/dcodeIO/long.js Long.js}. + * @constructor + */ +const Long = require('long'); + + +/** + * Consistency levels + * @type {Object} + * @property {Number} any Writing: A write must be written to at least one node. If all replica nodes for the given row key are down, the write can still succeed after a hinted handoff has been written. If all replica nodes are down at write time, an ANY write is not readable until the replica nodes for that row have recovered. + * @property {Number} one Returns a response from the closest replica, as determined by the snitch. + * @property {Number} two Returns the most recent data from two of the closest replicas. + * @property {Number} three Returns the most recent data from three of the closest replicas. + * @property {Number} quorum Reading: Returns the record with the most recent timestamp after a quorum of replicas has responded regardless of data center. Writing: A write must be written to the commit log and memory table on a quorum of replica nodes. + * @property {Number} all Reading: Returns the record with the most recent timestamp after all replicas have responded. The read operation will fail if a replica does not respond. Writing: A write must be written to the commit log and memory table on all replica nodes in the cluster for that row. + * @property {Number} localQuorum Reading: Returns the record with the most recent timestamp once a quorum of replicas in the current data center as the coordinator node has reported. Writing: A write must be written to the commit log and memory table on a quorum of replica nodes in the same data center as the coordinator node. Avoids latency of inter-data center communication. + * @property {Number} eachQuorum Reading: Returns the record once a quorum of replicas in each data center of the cluster has responded. Writing: Strong consistency. A write must be written to the commit log and memtable on a quorum of replica nodes in all data centers. + * @property {Number} serial Achieves linearizable consistency for lightweight transactions by preventing unconditional updates. + * @property {Number} localSerial Same as serial but confined to the data center. A write must be written conditionally to the commit log and memtable on a quorum of replica nodes in the same data center. + * @property {Number} localOne Similar to One but only within the DC the coordinator is in. + */ +const consistencies = { + any: 0x00, + one: 0x01, + two: 0x02, + three: 0x03, + quorum: 0x04, + all: 0x05, + localQuorum: 0x06, + eachQuorum: 0x07, + serial: 0x08, + localSerial: 0x09, + localOne: 0x0a +}; + +/** + * Mapping of consistency level codes to their string representation. + * @type {Object} + */ +const consistencyToString = {}; +consistencyToString[consistencies.any] = 'ANY'; +consistencyToString[consistencies.one] = 'ONE'; +consistencyToString[consistencies.two] = 'TWO'; +consistencyToString[consistencies.three] = 'THREE'; +consistencyToString[consistencies.quorum] = 'QUORUM'; +consistencyToString[consistencies.all] = 'ALL'; +consistencyToString[consistencies.localQuorum] = 'LOCAL_QUORUM'; +consistencyToString[consistencies.eachQuorum] = 'EACH_QUORUM'; +consistencyToString[consistencies.serial] = 'SERIAL'; +consistencyToString[consistencies.localSerial] = 'LOCAL_SERIAL'; +consistencyToString[consistencies.localOne] = 'LOCAL_ONE'; + +/** + * CQL data types + * @type {Object} + * @property {Number} custom A custom type. + * @property {Number} ascii ASCII character string. + * @property {Number} bigint 64-bit signed long. + * @property {Number} blob Arbitrary bytes (no validation). + * @property {Number} boolean true or false. + * @property {Number} counter Counter column (64-bit signed value). + * @property {Number} decimal Variable-precision decimal. + * @property {Number} double 64-bit IEEE-754 floating point. + * @property {Number} float 32-bit IEEE-754 floating point. + * @property {Number} int 32-bit signed integer. + * @property {Number} text UTF8 encoded string. + * @property {Number} timestamp A timestamp. + * @property {Number} uuid Type 1 or type 4 UUID. + * @property {Number} varchar UTF8 encoded string. + * @property {Number} varint Arbitrary-precision integer. + * @property {Number} timeuuid Type 1 UUID. + * @property {Number} inet An IP address. It can be either 4 bytes long (IPv4) or 16 bytes long (IPv6). + * @property {Number} date A date without a time-zone in the ISO-8601 calendar system. + * @property {Number} time A value representing the time portion of the day. + * @property {Number} smallint 16-bit two's complement integer. + * @property {Number} tinyint 8-bit two's complement integer. + * @property {Number} list A collection of elements. + * @property {Number} map Key/value pairs. + * @property {Number} set A collection that contains no duplicate elements. + * @property {Number} udt User-defined type. + * @property {Number} tuple A sequence of values. + */ +const dataTypes = { + custom: 0x0000, + ascii: 0x0001, + bigint: 0x0002, + blob: 0x0003, + boolean: 0x0004, + counter: 0x0005, + decimal: 0x0006, + double: 0x0007, + float: 0x0008, + int: 0x0009, + text: 0x000a, + timestamp: 0x000b, + uuid: 0x000c, + varchar: 0x000d, + varint: 0x000e, + timeuuid: 0x000f, + inet: 0x0010, + date: 0x0011, + time: 0x0012, + smallint: 0x0013, + tinyint: 0x0014, + duration: 0x0015, + list: 0x0020, + map: 0x0021, + set: 0x0022, + udt: 0x0030, + tuple: 0x0031, + /** + * Returns the typeInfo of a given type name + * @param name + * @returns {{code: number, info: *|Object}} + */ + getByName: function(name) { + name = name.toLowerCase(); + if (name.indexOf('<') > 0) { + const listMatches = /^(list|set)<(.+)>$/.exec(name); + if (listMatches) { + return { code: this[listMatches[1]], info: this.getByName(listMatches[2])}; + } + const mapMatches = /^(map)< *(.+) *, *(.+)>$/.exec(name); + if (mapMatches) { + return { code: this[mapMatches[1]], info: [this.getByName(mapMatches[2]), this.getByName(mapMatches[3])]}; + } + const udtMatches = /^(udt)<(.+)>$/.exec(name); + if (udtMatches) { + //udt name as raw string + return { code: this[udtMatches[1]], info: udtMatches[2]}; + } + const tupleMatches = /^(tuple)<(.+)>$/.exec(name); + if (tupleMatches) { + //tuple info as an array of types + return { code: this[tupleMatches[1]], info: tupleMatches[2].split(',').map(function (x) { + return this.getByName(x.trim()); + }, this)}; + } + } + const typeInfo = { code: this[name], info: null}; + if (typeof typeInfo.code !== 'number') { + throw new TypeError('Data type with name ' + name + ' not valid'); + } + return typeInfo; + } +}; + +/** + * Map of Data types by code + * @internal + * @private + */ +const _dataTypesByCode = (function () { + const result = {}; + for (const key in dataTypes) { + if (!dataTypes.hasOwnProperty(key)) { + continue; + } + const val = dataTypes[key]; + if (typeof val !== 'number') { + continue; + } + result[val] = key; + } + return result; +})(); + +/** + * Represents the distance of Cassandra node as assigned by a LoadBalancingPolicy relatively to the driver instance. + * @type {Object} + * @property {Number} local A local node. + * @property {Number} remote A remote node. + * @property {Number} ignored A node that is meant to be ignored. + */ +const distance = { + local: 0, + remote: 1, + ignored: 2 +}; + +/** + * An integer byte that distinguish the actual message from and to Cassandra + * @internal + * @ignore + */ +const opcodes = { + error: 0x00, + startup: 0x01, + ready: 0x02, + authenticate: 0x03, + credentials: 0x04, + options: 0x05, + supported: 0x06, + query: 0x07, + result: 0x08, + prepare: 0x09, + execute: 0x0a, + register: 0x0b, + event: 0x0c, + batch: 0x0d, + authChallenge: 0x0e, + authResponse: 0x0f, + authSuccess: 0x10, + cancel: 0xff, + + /** + * Determines if the code is a valid opcode + */ + isInRange: function (code) { + return code > this.error && code > this.event; + } +}; + +/** + * Event types from Cassandra + * @type {{topologyChange: string, statusChange: string, schemaChange: string}} + * @internal + * @ignore + */ +const protocolEvents = { + topologyChange: 'TOPOLOGY_CHANGE', + statusChange: 'STATUS_CHANGE', + schemaChange: 'SCHEMA_CHANGE' +}; + +/** + * Server error codes returned by Cassandra + * @type {Object} + * @property {Number} serverError Something unexpected happened. + * @property {Number} protocolError Some client message triggered a protocol violation. + * @property {Number} badCredentials Authentication was required and failed. + * @property {Number} unavailableException Raised when coordinator knows there is not enough replicas alive to perform a query with the requested consistency level. + * @property {Number} overloaded The request cannot be processed because the coordinator is overloaded. + * @property {Number} isBootstrapping The request was a read request but the coordinator node is bootstrapping. + * @property {Number} truncateError Error encountered during a truncate request. + * @property {Number} writeTimeout Timeout encountered on write query on coordinator waiting for response(s) from replicas. + * @property {Number} readTimeout Timeout encountered on read query on coordinator waitign for response(s) from replicas. + * @property {Number} readFailure A non-timeout error encountered during a read request. + * @property {Number} functionFailure A (user defined) function encountered during execution. + * @property {Number} writeFailure A non-timeout error encountered during a write request. + * @property {Number} syntaxError The submitted query has a syntax error. + * @property {Number} unauthorized The logged user doesn't have the right to perform the query. + * @property {Number} invalid The query is syntactically correct but invalid. + * @property {Number} configError The query is invalid because of some configuration issue. + * @property {Number} alreadyExists The query attempted to create a schema element (i.e. keyspace, table) that already exists. + * @property {Number} unprepared Can be thrown while a prepared statement tries to be executed if the provided statement is not known by the coordinator. + */ +const responseErrorCodes = { + serverError: 0x0000, + protocolError: 0x000A, + badCredentials: 0x0100, + unavailableException: 0x1000, + overloaded: 0x1001, + isBootstrapping: 0x1002, + truncateError: 0x1003, + writeTimeout: 0x1100, + readTimeout: 0x1200, + readFailure: 0x1300, + functionFailure: 0x1400, + writeFailure: 0x1500, + syntaxError: 0x2000, + unauthorized: 0x2100, + invalid: 0x2200, + configError: 0x2300, + alreadyExists: 0x2400, + unprepared: 0x2500, + clientWriteFailure: 0x8000, +}; + +/** + * Type of result included in a response + * @internal + * @ignore + */ +const resultKind = { + voidResult: 0x0001, + rows: 0x0002, + setKeyspace: 0x0003, + prepared: 0x0004, + schemaChange: 0x0005 +}; + +/** + * Message frame flags + * @internal + * @ignore + */ +const frameFlags = { + compression: 0x01, + tracing: 0x02, + customPayload: 0x04, + warning: 0x08 +}; + +/** + * Unset representation. + *

+ * Use this field if you want to set a parameter to unset. Valid for Cassandra 2.2 and above. + *

+ */ +const unset = Object.freeze({'unset': true}); + +/** + * A long representing the value 1000 + * @const + * @private + */ +const _longOneThousand = Long.fromInt(1000); + +/** + * Counter used to generate up to 1000 different timestamp values with the same Date + * @private + */ +let _timestampTicks = 0; + +/** + *

Backward compatibility only, use [TimeUuid]{@link module:types~TimeUuid} instead.

+ * Generates and returns a RFC4122 v1 (timestamp based) UUID in a string representation. + * @param {{msecs, node, clockseq, nsecs}} [options] + * @param {Buffer} [buffer] + * @param {Number} [offset] + * @deprecated Use [TimeUuid]{@link module:types~TimeUuid} instead + */ +function timeuuid(options, buffer, offset) { + let date; + let ticks; + let nodeId; + let clockId; + if (options) { + if (typeof options.msecs === 'number') { + date = new Date(options.msecs); + } + if (options.msecs instanceof Date) { + date = options.msecs; + } + if (Array.isArray(options.node)) { + nodeId = utils.allocBufferFromArray(options.node); + } + if (typeof options.clockseq === 'number') { + clockId = utils.allocBufferUnsafe(2); + clockId.writeUInt16BE(options.clockseq, 0); + } + if (typeof options.nsecs === 'number') { + ticks = options.nsecs; + } + } + const uuid = new TimeUuid(date, ticks, nodeId, clockId); + if (buffer instanceof Buffer) { + //copy the values into the buffer + uuid.getBuffer().copy(buffer, offset || 0); + return buffer; + } + return uuid.toString(); +} + +/** + *

Backward compatibility only, use [Uuid]{@link module:types~Uuid} class instead.

+ * Generate and return a RFC4122 v4 UUID in a string representation. + * @deprecated Use [Uuid]{@link module:types~Uuid} class instead + */ +function uuid(options, buffer, offset) { + let uuid; + if (options) { + if (Array.isArray(options.random)) { + uuid = new Uuid(utils.allocBufferFromArray(options.random)); + } + } + if (!uuid) { + uuid = Uuid.random(); + } + if (buffer instanceof Buffer) { + //copy the values into the buffer + uuid.getBuffer().copy(buffer, offset || 0); + return buffer; + } + return uuid.toString(); +} + +/** + * Gets the data type name for a given type definition + * @internal + * @ignore + * @throws {ArgumentError} + */ +function getDataTypeNameByCode(item) { + if (!item || typeof item.code !== 'number') { + throw new errors.ArgumentError('Invalid signature type definition'); + } + const typeName = _dataTypesByCode[item.code]; + if (!typeName) { + throw new errors.ArgumentError(util.format('Type with code %d not found', item.code)); + } + if (!item.info) { + return typeName; + } + if (Array.isArray(item.info)) { + return (typeName + + '<' + + item.info.map(function (t) { + return getDataTypeNameByCode(t); + }).join(', ') + + '>'); + } + if (typeof item.info.code === 'number') { + return typeName + '<' + getDataTypeNameByCode(item.info) + '>'; + } + return typeName; +} + +//classes + +/** + * Represents a frame header that could be used to read from a Buffer or to write to a Buffer + * @ignore + * @param {Number} version Protocol version + * @param {Number} flags + * @param {Number} streamId + * @param {Number} opcode + * @param {Number} bodyLength + * @constructor + */ +function FrameHeader(version, flags, streamId, opcode, bodyLength) { + this.version = version; + this.flags = flags; + this.streamId = streamId; + this.opcode = opcode; + this.bodyLength = bodyLength; +} + +/** + * The length of the header of the frame based on the protocol version + * @returns {Number} + */ +FrameHeader.size = function (version) { + if (protocolVersion.uses2BytesStreamIds(version)) { + return 9; + } + return 8; +}; + +/** + * Gets the protocol version based on the first byte of the header + * @param {Buffer} buffer + * @returns {Number} + */ +FrameHeader.getProtocolVersion = function (buffer) { + return buffer[0] & 0x7F; +}; + +/** + * @param {Buffer} buf + * @param {Number} [offset] + * @returns {FrameHeader} + */ +FrameHeader.fromBuffer = function (buf, offset) { + let streamId = 0; + if (!offset) { + offset = 0; + } + const version = buf[offset++] & 0x7F; + const flags = buf.readUInt8(offset++); + if (!protocolVersion.uses2BytesStreamIds(version)) { + streamId = buf.readInt8(offset++); + } + else { + streamId = buf.readInt16BE(offset); + offset += 2; + } + return new FrameHeader(version, flags, streamId, buf.readUInt8(offset++), buf.readUInt32BE(offset)); +}; + +/** @returns {Buffer} */ +FrameHeader.prototype.toBuffer = function () { + const buf = utils.allocBufferUnsafe(FrameHeader.size(this.version)); + buf.writeUInt8(this.version, 0); + buf.writeUInt8(this.flags, 1); + let offset = 3; + if (!protocolVersion.uses2BytesStreamIds(this.version)) { + buf.writeInt8(this.streamId, 2); + } + else { + buf.writeInt16BE(this.streamId, 2); + offset = 4; + } + buf.writeUInt8(this.opcode, offset++); + buf.writeUInt32BE(this.bodyLength, offset); + return buf; +}; +/** + * Returns a long representation. + * Used internally for deserialization + */ +Long.fromBuffer = function (value) { + if (!(value instanceof Buffer)) { + throw new TypeError('Expected Buffer, obtained ' + util.inspect(value)); + } + return new Long(value.readInt32BE(4), value.readInt32BE(0)); +}; + +/** + * Returns a big-endian buffer representation of the Long instance + * @param {Long} value + */ +Long.toBuffer = function (value) { + if (!(value instanceof Long)) { + throw new TypeError('Expected Long, obtained ' + util.inspect(value)); + } + const buffer = utils.allocBufferUnsafe(8); + buffer.writeUInt32BE(value.getHighBitsUnsigned(), 0); + buffer.writeUInt32BE(value.getLowBitsUnsigned(), 4); + return buffer; +}; + +/** + * Provide the name of the constructor and the string representation + * @returns {string} + */ +Long.prototype.inspect = function () { + return 'Long: ' + this.toString(); +}; + +/** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance + */ +Long.prototype.toJSON = function () { + return this.toString(); +}; + +/** + * Generates a value representing the timestamp for the query in microseconds based on the date and the microseconds provided + * @param {Date} [date] The date to generate the value, if not provided it will use the current date. + * @param {Number} [microseconds] A number from 0 to 999 used to build the microseconds part of the date. + * @returns {Long} + */ +function generateTimestamp(date, microseconds) { + if (!date) { + date = new Date(); + } + let longMicro = Long.ZERO; + if (typeof microseconds === 'number' && microseconds >= 0 && microseconds < 1000) { + longMicro = Long.fromInt(microseconds); + } + else { + if (_timestampTicks > 999) { + _timestampTicks = 0; + } + longMicro = Long.fromInt(_timestampTicks); + _timestampTicks++; + } + return Long + .fromNumber(date.getTime()) + .multiply(_longOneThousand) + .add(longMicro); +} + +//error classes + +/** @private */ +function QueryParserError(e) { + QueryParserError.super_.call(this, e.message, this.constructor); + this.internalError = e; +} +util.inherits(QueryParserError, errors.DriverError); + +/** @private */ +function TimeoutError (message) { + TimeoutError.super_.call(this, message, this.constructor); + this.info = 'Represents an error that happens when the maximum amount of time for an operation passed.'; +} +util.inherits(TimeoutError, errors.DriverError); + +exports.opcodes = opcodes; +exports.consistencies = consistencies; +exports.consistencyToString = consistencyToString; +exports.dataTypes = dataTypes; +exports.getDataTypeNameByCode = getDataTypeNameByCode; +exports.distance = distance; +exports.frameFlags = frameFlags; +exports.protocolEvents = protocolEvents; +exports.protocolVersion = protocolVersion; +exports.responseErrorCodes = responseErrorCodes; +exports.resultKind = resultKind; +exports.timeuuid = timeuuid; +exports.uuid = uuid; +exports.BigDecimal = require('./big-decimal'); +exports.Duration = require('./duration'); +exports.FrameHeader = FrameHeader; +exports.InetAddress = require('./inet-address'); +exports.Integer = require('./integer'); +exports.LocalDate = require('./local-date'); +exports.LocalTime = require('./local-time'); +exports.Long = Long; +exports.ResultSet = require('./result-set'); +exports.ResultStream = require('./result-stream'); +exports.Row = require('./row'); +//export DriverError for backward-compatibility +exports.DriverError = errors.DriverError; +exports.TimeoutError = TimeoutError; +exports.TimeUuid = TimeUuid; +exports.Tuple = require('./tuple'); +exports.Uuid = Uuid; +exports.unset = unset; +exports.generateTimestamp = generateTimestamp; diff --git a/node_modules/cassandra-driver/lib/types/inet-address.js b/node_modules/cassandra-driver/lib/types/inet-address.js new file mode 100644 index 0000000..1647dea --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/inet-address.js @@ -0,0 +1,248 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const utils = require('../utils'); + +/** @module types */ +/** + * Creates a new instance of InetAddress + * @class + * @classdesc Represents an v4 or v6 Internet Protocol (IP) address. + * @param {Buffer} buffer + * @constructor + */ +function InetAddress(buffer) { + if (!(buffer instanceof Buffer) || (buffer.length !== 4 && buffer.length !== 16)) { + throw new TypeError('The ip address must contain 4 or 16 bytes'); + } + + /** + * Immutable buffer that represents the IP address + * @type Array + */ + this.buffer = buffer; + + /** + * Returns the length of the underlying buffer + * @type Number + */ + this.length = buffer.length; + + /** + * Returns the Ip version (4 or 6) + * @type Number + */ + this.version = buffer.length === 4 ? 4 : 6; +} + +/** + * Parses the string representation and returns an Ip address + * @param {String} value + */ +InetAddress.fromString = function (value) { + if (!value) { + return new InetAddress(utils.allocBufferFromArray([0, 0, 0, 0])); + } + const ipv4Pattern = /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/; + const ipv6Pattern = /^[\da-f:.]+$/i; + let parts; + if (ipv4Pattern.test(value)) { + parts = value.split('.'); + return new InetAddress(utils.allocBufferFromArray(parts)); + } + if (!ipv6Pattern.test(value)) { + throw new TypeError('Value could not be parsed as InetAddress: ' + value); + } + parts = value.split(':'); + if (parts.length < 3) { + throw new TypeError('Value could not be parsed as InetAddress: ' + value); + } + const buffer = utils.allocBufferUnsafe(16); + let filling = 8 - parts.length + 1; + let applied = false; + let offset = 0; + const embeddedIp4 = ipv4Pattern.test(parts[parts.length - 1]); + if (embeddedIp4) { + // Its IPv6 address with an embedded IPv4 address: + // subtract 1 from the potential empty filling as ip4 contains 4 bytes instead of 2 of a ipv6 section + filling -= 1; + } + function writeItem(uIntValue) { + buffer.writeUInt8(+uIntValue, offset++); + } + for (let i = 0; i < parts.length; i++) { + const item = parts[i]; + if (item) { + if (embeddedIp4 && i === parts.length - 1) { + item.split('.').forEach(writeItem); + break; + } + buffer.writeUInt16BE(parseInt(item, 16), offset); + offset = offset + 2; + continue; + } + //its an empty string + if (applied) { + //there could be 2 occurrences of empty string + filling = 1; + } + applied = true; + for (let j = 0; j < filling; j++) { + buffer[offset++] = 0; + buffer[offset++] = 0; + } + } + if (embeddedIp4 && !isValidIPv4Mapped(buffer)) { + throw new TypeError('Only IPv4-Mapped IPv6 addresses are allowed as IPv6 address with embedded IPv4 address'); + } + return new InetAddress(buffer); +}; + +/** + * Compares 2 addresses and returns true if the underlying bytes are the same + * @param {InetAddress} other + * @returns {Boolean} + */ +InetAddress.prototype.equals = function (other) { + if (!(other instanceof InetAddress)) { + return false; + } + return (this.buffer.length === other.buffer.length && + this.buffer.toString('hex') === other.buffer.toString('hex')); +}; + +/** + * Returns the underlying buffer + * @returns {Buffer} + */ +InetAddress.prototype.getBuffer = function () { + return this.buffer; +}; + +/** + * Provide the name of the constructor and the string representation + * @returns {string} + */ +InetAddress.prototype.inspect = function () { + return this.constructor.name + ': ' + this.toString(); +}; + +/** + * Returns the string representation of the IP address. + *

For v4 IP addresses, a string in the form of d.d.d.d is returned.

+ *

+ * For v6 IP addresses, a string in the form of x:x:x:x:x:x:x:x is returned, where the 'x's are the hexadecimal + * values of the eight 16-bit pieces of the address, according to rfc5952. + * In cases where there is more than one field of only zeros, it can be shortened. For example, 2001:0db8:0:0:0:1:0:1 + * will be expressed as 2001:0db8::1:0:1. + *

+ * @param {String} [encoding] + * @returns {String} + */ +InetAddress.prototype.toString = function (encoding) { + if (encoding === 'hex') { + //backward compatibility: behave in the same way as the buffer + return this.buffer.toString('hex'); + } + if (this.buffer.length === 4) { + return ( + this.buffer[0] + '.' + + this.buffer[1] + '.' + + this.buffer[2] + '.' + + this.buffer[3] + ); + } + let start = -1; + const longest = { length: 0, start: -1}; + function checkLongest (i) { + if (start >= 0) { + //close the group + const length = i - start; + if (length > longest.length) { + longest.length = length; + longest.start = start; + start = -1; + } + } + } + //get the longest 16-bit group of zeros + for (let i = 0; i < this.buffer.length; i = i + 2) { + if (this.buffer[i] === 0 && this.buffer[i + 1] === 0) { + //its a group of zeros + if (start < 0) { + start = i; + } + + // at the end of the buffer, make a final call to checkLongest. + if(i === this.buffer.length - 2) { + checkLongest(i+2); + } + continue; + } + //its a group of non-zeros + checkLongest(i); + } + + let address = ''; + for (let h = 0; h < this.buffer.length; h = h + 2) { + if (h === longest.start) { + address += ':'; + continue; + } + if (h < (longest.start + longest.length) && h > longest.start) { + //its a group of zeros + continue; + } + if (address.length > 0) { + address += ':'; + } + address += ((this.buffer[h] << 8) | this.buffer[h+1]).toString(16); + } + if (address.charAt(address.length-1) === ':') { + address += ':'; + } + return address; +}; + +/** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ +InetAddress.prototype.toJSON = function () { + return this.toString(); +}; + +/** + * Validates for a IPv4-Mapped IPv6 according to https://tools.ietf.org/html/rfc4291#section-2.5.5 + * @private + * @param {Buffer} buffer + */ +function isValidIPv4Mapped(buffer) { + // check the form + // | 80 bits | 16 | 32 bits + // +----------------+----+------------- + // |0000........0000|FFFF| IPv4 address + + for (let i = 0; i < buffer.length - 6; i++) { + if (buffer[i] !== 0) { + return false; + } + } + return !(buffer[10] !== 255 || buffer[11] !== 255); +} + +module.exports = InetAddress; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/integer.js b/node_modules/cassandra-driver/lib/types/integer.js new file mode 100644 index 0000000..d70d789 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/integer.js @@ -0,0 +1,855 @@ +// Copyright 2009 The Closure Library Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS-IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** @module types */ + +var utils = require('../utils'); + +/** + * Constructs a two's-complement integer an array containing bits of the + * integer in 32-bit (signed) pieces, given in little-endian order (i.e., + * lowest-order bits in the first piece), and the sign of -1 or 0. + * + * See the from* functions below for other convenient ways of constructing + * Integers. + * + * The internal representation of an integer is an array of 32-bit signed + * pieces, along with a sign (0 or -1) that indicates the contents of all the + * other 32-bit pieces out to infinity. We use 32-bit pieces because these are + * the size of integers on which Javascript performs bit-operations. For + * operations like addition and multiplication, we split each number into 16-bit + * pieces, which can easily be multiplied within Javascript's floating-point + * representation without overflow or change in sign. + * + * @constructor + * @param {Array.} bits Array containing the bits of the number. + * @param {number} sign The sign of the number: -1 for negative and 0 positive. + * @final + */ +function Integer (bits, sign) { + /** + * @type {!Array.} + * @private + */ + this.bits_ = []; + + /** + * @type {number} + * @private + */ + this.sign_ = sign; + + // Copy the 32-bit signed integer values passed in. We prune out those at the + // top that equal the sign since they are redundant. + var top = true; + for (var i = bits.length - 1; i >= 0; i--) { + var val = bits[i] | 0; + if (!top || val != sign) { + this.bits_[i] = val; + top = false; + } + } +} + + +// NOTE: Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the +// from* methods on which they depend. + + +/** + * A cache of the Integer representations of small integer values. + * @type {!Object} + * @private + */ +Integer.IntCache_ = {}; + + +/** + * Returns an Integer representing the given (32-bit) integer value. + * @param {number} value A 32-bit integer value. + * @return {!Integer} The corresponding Integer value. + */ +Integer.fromInt = function(value) { + if (-128 <= value && value < 128) { + var cachedObj = Integer.IntCache_[value]; + if (cachedObj) { + return cachedObj; + } + } + + var obj = new Integer([value | 0], value < 0 ? -1 : 0); + if (-128 <= value && value < 128) { + Integer.IntCache_[value] = obj; + } + return obj; +}; + + +/** + * Returns an Integer representing the given value, provided that it is a finite + * number. Otherwise, zero is returned. + * @param {number} value The value in question. + * @return {!Integer} The corresponding Integer value. + */ +Integer.fromNumber = function(value) { + if (isNaN(value) || !isFinite(value)) { + return Integer.ZERO; + } else if (value < 0) { + return Integer.fromNumber(-value).negate(); + } else { + var bits = []; + var pow = 1; + for (var i = 0; value >= pow; i++) { + bits[i] = (value / pow) | 0; + pow *= Integer.TWO_PWR_32_DBL_; + } + return new Integer(bits, 0); + } +}; + + +/** + * Returns a Integer representing the value that comes by concatenating the + * given entries, each is assumed to be 32 signed bits, given in little-endian + * order (lowest order bits in the lowest index), and sign-extending the highest + * order 32-bit value. + * @param {Array.} bits The bits of the number, in 32-bit signed pieces, + * in little-endian order. + * @return {!Integer} The corresponding Integer value. + */ +Integer.fromBits = function(bits) { + var high = bits[bits.length - 1]; + //noinspection JSBitwiseOperatorUsage + return new Integer(bits, high & (1 << 31) ? -1 : 0); +}; + + +/** + * Returns an Integer representation of the given string, written using the + * given radix. + * @param {string} str The textual representation of the Integer. + * @param {number=} opt_radix The radix in which the text is written. + * @return {!Integer} The corresponding Integer value. + */ +Integer.fromString = function(str, opt_radix) { + if (str.length == 0) { + throw TypeError('number format error: empty string'); + } + + var radix = opt_radix || 10; + if (radix < 2 || 36 < radix) { + throw Error('radix out of range: ' + radix); + } + + if (str.charAt(0) == '-') { + return Integer.fromString(str.substring(1), radix).negate(); + } else if (str.indexOf('-') >= 0) { + throw TypeError('number format error: interior "-" character'); + } + + // Do several (8) digits each time through the loop, so as to + // minimize the calls to the very expensive emulated div. + var radixToPower = Integer.fromNumber(Math.pow(radix, 8)); + + var result = Integer.ZERO; + for (var i = 0; i < str.length; i += 8) { + var size = Math.min(8, str.length - i); + var value = parseInt(str.substring(i, i + size), radix); + if (size < 8) { + var power = Integer.fromNumber(Math.pow(radix, size)); + result = result.multiply(power).add(Integer.fromNumber(value)); + } else { + result = result.multiply(radixToPower); + result = result.add(Integer.fromNumber(value)); + } + } + return result; +}; + +/** + * Returns an Integer representation of a given big endian Buffer. + * The internal representation of bits contains bytes in groups of 4 + * @param {Buffer} buf + * @returns {Integer} + */ +Integer.fromBuffer = function (buf) { + var bits = new Array(Math.ceil(buf.length / 4)); + //noinspection JSBitwiseOperatorUsage + var sign = buf[0] & (1 << 7) ? -1 : 0; + for (var i = 0; i < bits.length; i++) { + var offset = buf.length - ((i + 1) * 4); + var value; + if (offset < 0) { + //The buffer length is not multiple of 4 + offset = offset + 4; + value = 0; + for (var j = 0; j < offset; j++) { + var byte = buf[j]; + if (sign === -1) { + //invert the bits + byte = ~byte & 0xff; + } + value = value | (byte << (offset - j - 1) * 8); + } + if (sign === -1) { + //invert all the bits + value = ~value; + } + } + else { + value = buf.readInt32BE(offset); + } + bits[i] = value; + } + return new Integer(bits, sign); +}; + +/** + * Returns a big endian buffer representation of an Integer. + * Internally the bits are represented using 4 bytes groups (numbers), + * in the Buffer representation there might be the case where we need less than the 4 bytes. + * For example: 0x00000001 -> '01', 0xFFFFFFFF -> 'FF', 0xFFFFFF01 -> 'FF01' + * @param {Integer} value + * @returns {Buffer} +*/ +Integer.toBuffer = function (value) { + var sign = value.sign_; + var bits = value.bits_; + if (bits.length === 0) { + //[0] or [0xffffffff] + return utils.allocBufferFromArray([value.sign_]); + } + //the high bits might need to be represented in less than 4 bytes + var highBits = bits[bits.length-1]; + if (sign === -1) { + highBits = ~highBits; + } + var high = []; + if (highBits >>> 24 > 0) { + high.push((highBits >> 24) & 0xff); + } + if (highBits >>> 16 > 0) { + high.push((highBits >> 16) & 0xff); + } + if (highBits >>> 8 > 0) { + high.push((highBits >> 8) & 0xff); + } + high.push(highBits & 0xff); + if (sign === -1) { + //The byte containing the sign bit got removed + if (high[0] >> 7 !== 0) { + //it is going to be negated + high.unshift(0); + } + } + else if (high[0] >> 7 !== 0) { + //its positive but it lost the byte containing the sign bit + high.unshift(0); + } + var buf = utils.allocBufferUnsafe(high.length + ((bits.length-1) * 4)); + for (var j = 0; j < high.length; j++) { + var b = high[j]; + if (sign === -1) { + buf[j] = ~b; + } + else { + buf[j] = b; + } + } + for (var i = 0; i < bits.length - 1; i++) { + var group = bits[bits.length - 2 - i]; + var offset = high.length + i * 4; + buf.writeInt32BE(group, offset); + } + return buf; +}; + + +/** + * A number used repeatedly in calculations. This must appear before the first + * call to the from* functions below. + * @type {number} + * @private + */ +Integer.TWO_PWR_32_DBL_ = (1 << 16) * (1 << 16); + + +/** @type {!Integer} */ +Integer.ZERO = Integer.fromInt(0); + + +/** @type {!Integer} */ +Integer.ONE = Integer.fromInt(1); + + +/** + * @type {!Integer} + * @private + */ +Integer.TWO_PWR_24_ = Integer.fromInt(1 << 24); + + +/** + * Returns the value, assuming it is a 32-bit integer. + * @return {number} The corresponding int value. + */ +Integer.prototype.toInt = function() { + return this.bits_.length > 0 ? this.bits_[0] : this.sign_; +}; + + +/** @return {number} The closest floating-point representation to this value. */ +Integer.prototype.toNumber = function() { + if (this.isNegative()) { + return -this.negate().toNumber(); + } else { + var val = 0; + var pow = 1; + for (var i = 0; i < this.bits_.length; i++) { + val += this.getBitsUnsigned(i) * pow; + pow *= Integer.TWO_PWR_32_DBL_; + } + return val; + } +}; + + +/** + * @param {number=} opt_radix The radix in which the text should be written. + * @return {string} The textual representation of this value. + * @override + */ +Integer.prototype.toString = function(opt_radix) { + var radix = opt_radix || 10; + if (radix < 2 || 36 < radix) { + throw Error('radix out of range: ' + radix); + } + + if (this.isZero()) { + return '0'; + } else if (this.isNegative()) { + return '-' + this.negate().toString(radix); + } + + // Do several (6) digits each time through the loop, so as to + // minimize the calls to the very expensive emulated div. + var radixToPower = Integer.fromNumber(Math.pow(radix, 6)); + + var rem = this; + var result = ''; + while (true) { + var remDiv = rem.divide(radixToPower); + var intval = rem.subtract(remDiv.multiply(radixToPower)).toInt(); + var digits = intval.toString(radix); + + rem = remDiv; + if (rem.isZero()) { + return digits + result; + } else { + while (digits.length < 6) { + digits = '0' + digits; + } + result = '' + digits + result; + } + } +}; + + +/** + * Returns the index-th 32-bit (signed) piece of the Integer according to + * little-endian order (i.e., index 0 contains the smallest bits). + * @param {number} index The index in question. + * @return {number} The requested 32-bits as a signed number. + */ +Integer.prototype.getBits = function(index) { + if (index < 0) { + return 0; // Allowing this simplifies bit shifting operations below... + } else if (index < this.bits_.length) { + return this.bits_[index]; + } else { + return this.sign_; + } +}; + + +/** + * Returns the index-th 32-bit piece as an unsigned number. + * @param {number} index The index in question. + * @return {number} The requested 32-bits as an unsigned number. + */ +Integer.prototype.getBitsUnsigned = function(index) { + var val = this.getBits(index); + return val >= 0 ? val : Integer.TWO_PWR_32_DBL_ + val; +}; + + +/** @return {number} The sign bit of this number, -1 or 0. */ +Integer.prototype.getSign = function() { + return this.sign_; +}; + + +/** @return {boolean} Whether this value is zero. */ +Integer.prototype.isZero = function() { + if (this.sign_ != 0) { + return false; + } + for (var i = 0; i < this.bits_.length; i++) { + if (this.bits_[i] != 0) { + return false; + } + } + return true; +}; + + +/** @return {boolean} Whether this value is negative. */ +Integer.prototype.isNegative = function() { + return this.sign_ == -1; +}; + + +/** @return {boolean} Whether this value is odd. */ +Integer.prototype.isOdd = function() { + return (this.bits_.length == 0) && (this.sign_ == -1) || + (this.bits_.length > 0) && ((this.bits_[0] & 1) != 0); +}; + + +/** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer equals the other. + */ +Integer.prototype.equals = function(other) { + if (this.sign_ != other.sign_) { + return false; + } + var len = Math.max(this.bits_.length, other.bits_.length); + for (var i = 0; i < len; i++) { + if (this.getBits(i) != other.getBits(i)) { + return false; + } + } + return true; +}; + + +/** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer does not equal the other. + */ +Integer.prototype.notEquals = function(other) { + return !this.equals(other); +}; + + +/** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is greater than the other. + */ +Integer.prototype.greaterThan = function(other) { + return this.compare(other) > 0; +}; + + +/** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is greater than or equal to the other. + */ +Integer.prototype.greaterThanOrEqual = function(other) { + return this.compare(other) >= 0; +}; + + +/** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is less than the other. + */ +Integer.prototype.lessThan = function(other) { + return this.compare(other) < 0; +}; + + +/** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is less than or equal to the other. + */ +Integer.prototype.lessThanOrEqual = function(other) { + return this.compare(other) <= 0; +}; + + +/** + * Compares this Integer with the given one. + * @param {Integer} other Integer to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ +Integer.prototype.compare = function(other) { + var diff = this.subtract(other); + if (diff.isNegative()) { + return -1; + } else if (diff.isZero()) { + return 0; + } else { + return +1; + } +}; + + +/** + * Returns an integer with only the first numBits bits of this value, sign + * extended from the final bit. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} The shorted integer value. + */ +Integer.prototype.shorten = function(numBits) { + var arr_index = (numBits - 1) >> 5; + var bit_index = (numBits - 1) % 32; + var bits = []; + for (var i = 0; i < arr_index; i++) { + bits[i] = this.getBits(i); + } + var sigBits = bit_index == 31 ? 0xFFFFFFFF : (1 << (bit_index + 1)) - 1; + var val = this.getBits(arr_index) & sigBits; + //noinspection JSBitwiseOperatorUsage + if (val & (1 << bit_index)) { + val |= 0xFFFFFFFF - sigBits; + bits[arr_index] = val; + return new Integer(bits, -1); + } else { + bits[arr_index] = val; + return new Integer(bits, 0); + } +}; + + +/** @return {!Integer} The negation of this value. */ +Integer.prototype.negate = function() { + return this.not().add(Integer.ONE); +}; + + +/** + * Returns the sum of this and the given Integer. + * @param {Integer} other The Integer to add to this. + * @return {!Integer} The Integer result. + */ +Integer.prototype.add = function(other) { + var len = Math.max(this.bits_.length, other.bits_.length); + var arr = []; + var carry = 0; + + for (var i = 0; i <= len; i++) { + var a1 = this.getBits(i) >>> 16; + var a0 = this.getBits(i) & 0xFFFF; + + var b1 = other.getBits(i) >>> 16; + var b0 = other.getBits(i) & 0xFFFF; + + var c0 = carry + a0 + b0; + var c1 = (c0 >>> 16) + a1 + b1; + carry = c1 >>> 16; + c0 &= 0xFFFF; + c1 &= 0xFFFF; + arr[i] = (c1 << 16) | c0; + } + return Integer.fromBits(arr); +}; + + +/** + * Returns the difference of this and the given Integer. + * @param {Integer} other The Integer to subtract from this. + * @return {!Integer} The Integer result. + */ +Integer.prototype.subtract = function(other) { + return this.add(other.negate()); +}; + + +/** + * Returns the product of this and the given Integer. + * @param {Integer} other The Integer to multiply against this. + * @return {!Integer} The product of this and the other. + */ +Integer.prototype.multiply = function(other) { + if (this.isZero()) { + return Integer.ZERO; + } else if (other.isZero()) { + return Integer.ZERO; + } + + if (this.isNegative()) { + if (other.isNegative()) { + return this.negate().multiply(other.negate()); + } else { + return this.negate().multiply(other).negate(); + } + } else if (other.isNegative()) { + return this.multiply(other.negate()).negate(); + } + + // If both numbers are small, use float multiplication + if (this.lessThan(Integer.TWO_PWR_24_) && + other.lessThan(Integer.TWO_PWR_24_)) { + return Integer.fromNumber(this.toNumber() * other.toNumber()); + } + + // Fill in an array of 16-bit products. + var len = this.bits_.length + other.bits_.length; + var arr = []; + for (var i = 0; i < 2 * len; i++) { + arr[i] = 0; + } + for (var i = 0; i < this.bits_.length; i++) { + for (var j = 0; j < other.bits_.length; j++) { + var a1 = this.getBits(i) >>> 16; + var a0 = this.getBits(i) & 0xFFFF; + + var b1 = other.getBits(j) >>> 16; + var b0 = other.getBits(j) & 0xFFFF; + + arr[2 * i + 2 * j] += a0 * b0; + Integer.carry16_(arr, 2 * i + 2 * j); + arr[2 * i + 2 * j + 1] += a1 * b0; + Integer.carry16_(arr, 2 * i + 2 * j + 1); + arr[2 * i + 2 * j + 1] += a0 * b1; + Integer.carry16_(arr, 2 * i + 2 * j + 1); + arr[2 * i + 2 * j + 2] += a1 * b1; + Integer.carry16_(arr, 2 * i + 2 * j + 2); + } + } + + // Combine the 16-bit values into 32-bit values. + for (var i = 0; i < len; i++) { + arr[i] = (arr[2 * i + 1] << 16) | arr[2 * i]; + } + for (var i = len; i < 2 * len; i++) { + arr[i] = 0; + } + return new Integer(arr, 0); +}; + + +/** + * Carries any overflow from the given index into later entries. + * @param {Array.} bits Array of 16-bit values in little-endian order. + * @param {number} index The index in question. + * @private + */ +Integer.carry16_ = function(bits, index) { + while ((bits[index] & 0xFFFF) != bits[index]) { + bits[index + 1] += bits[index] >>> 16; + bits[index] &= 0xFFFF; + } +}; + + +/** + * Returns this Integer divided by the given one. + * @param {Integer} other Th Integer to divide this by. + * @return {!Integer} This value divided by the given one. + */ +Integer.prototype.divide = function(other) { + if (other.isZero()) { + throw Error('division by zero'); + } else if (this.isZero()) { + return Integer.ZERO; + } + + if (this.isNegative()) { + if (other.isNegative()) { + return this.negate().divide(other.negate()); + } else { + return this.negate().divide(other).negate(); + } + } else if (other.isNegative()) { + return this.divide(other.negate()).negate(); + } + + // Repeat the following until the remainder is less than other: find a + // floating-point that approximates remainder / other *from below*, add this + // into the result, and subtract it from the remainder. It is critical that + // the approximate value is less than or equal to the real value so that the + // remainder never becomes negative. + var res = Integer.ZERO; + var rem = this; + while (rem.greaterThanOrEqual(other)) { + // Approximate the result of division. This may be a little greater or + // smaller than the actual value. + var approx = Math.max(1, Math.floor(rem.toNumber() / other.toNumber())); + + // We will tweak the approximate result by changing it in the 48-th digit or + // the smallest non-fractional digit, whichever is larger. + var log2 = Math.ceil(Math.log(approx) / Math.LN2); + var delta = (log2 <= 48) ? 1 : Math.pow(2, log2 - 48); + + // Decrease the approximation until it is smaller than the remainder. Note + // that if it is too large, the product overflows and is negative. + var approxRes = Integer.fromNumber(approx); + var approxRem = approxRes.multiply(other); + while (approxRem.isNegative() || approxRem.greaterThan(rem)) { + approx -= delta; + approxRes = Integer.fromNumber(approx); + approxRem = approxRes.multiply(other); + } + + // We know the answer can't be zero... and actually, zero would cause + // infinite recursion since we would make no progress. + if (approxRes.isZero()) { + approxRes = Integer.ONE; + } + + res = res.add(approxRes); + rem = rem.subtract(approxRem); + } + return res; +}; + + +/** + * Returns this Integer modulo the given one. + * @param {Integer} other The Integer by which to mod. + * @return {!Integer} This value modulo the given one. + */ +Integer.prototype.modulo = function(other) { + return this.subtract(this.divide(other).multiply(other)); +}; + + +/** @return {!Integer} The bitwise-NOT of this value. */ +Integer.prototype.not = function() { + var len = this.bits_.length; + var arr = []; + for (var i = 0; i < len; i++) { + arr[i] = ~this.bits_[i]; + } + return new Integer(arr, ~this.sign_); +}; + + +/** + * Returns the bitwise-AND of this Integer and the given one. + * @param {Integer} other The Integer to AND with this. + * @return {!Integer} The bitwise-AND of this and the other. + */ +Integer.prototype.and = function(other) { + var len = Math.max(this.bits_.length, other.bits_.length); + var arr = []; + for (var i = 0; i < len; i++) { + arr[i] = this.getBits(i) & other.getBits(i); + } + return new Integer(arr, this.sign_ & other.sign_); +}; + + +/** + * Returns the bitwise-OR of this Integer and the given one. + * @param {Integer} other The Integer to OR with this. + * @return {!Integer} The bitwise-OR of this and the other. + */ +Integer.prototype.or = function(other) { + var len = Math.max(this.bits_.length, other.bits_.length); + var arr = []; + for (var i = 0; i < len; i++) { + arr[i] = this.getBits(i) | other.getBits(i); + } + return new Integer(arr, this.sign_ | other.sign_); +}; + + +/** + * Returns the bitwise-XOR of this Integer and the given one. + * @param {Integer} other The Integer to XOR with this. + * @return {!Integer} The bitwise-XOR of this and the other. + */ +Integer.prototype.xor = function(other) { + var len = Math.max(this.bits_.length, other.bits_.length); + var arr = []; + for (var i = 0; i < len; i++) { + arr[i] = this.getBits(i) ^ other.getBits(i); + } + return new Integer(arr, this.sign_ ^ other.sign_); +}; + + +/** + * Returns this value with bits shifted to the left by the given amount. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} This shifted to the left by the given amount. + */ +Integer.prototype.shiftLeft = function(numBits) { + var arr_delta = numBits >> 5; + var bit_delta = numBits % 32; + var len = this.bits_.length + arr_delta + (bit_delta > 0 ? 1 : 0); + var arr = []; + for (var i = 0; i < len; i++) { + if (bit_delta > 0) { + arr[i] = (this.getBits(i - arr_delta) << bit_delta) | + (this.getBits(i - arr_delta - 1) >>> (32 - bit_delta)); + } else { + arr[i] = this.getBits(i - arr_delta); + } + } + return new Integer(arr, this.sign_); +}; + + +/** + * Returns this value with bits shifted to the right by the given amount. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} This shifted to the right by the given amount. + */ +Integer.prototype.shiftRight = function(numBits) { + var arr_delta = numBits >> 5; + var bit_delta = numBits % 32; + var len = this.bits_.length - arr_delta; + var arr = []; + for (var i = 0; i < len; i++) { + if (bit_delta > 0) { + arr[i] = (this.getBits(i + arr_delta) >>> bit_delta) | + (this.getBits(i + arr_delta + 1) << (32 - bit_delta)); + } else { + arr[i] = this.getBits(i + arr_delta); + } + } + return new Integer(arr, this.sign_); +}; + +/** + * Provide the name of the constructor and the string representation + * @returns {string} + */ +Integer.prototype.inspect = function () { + return this.constructor.name + ': ' + this.toString(); +}; + +/** + * Returns a Integer whose value is the absolute value of this + * @returns {Integer} + */ +Integer.prototype.abs = function () { + return this.sign_ === 0 ? this : this.negate(); +}; + +/** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ +Integer.prototype.toJSON = function () { + return this.toString(); +}; + +module.exports = Integer; diff --git a/node_modules/cassandra-driver/lib/types/local-date.js b/node_modules/cassandra-driver/lib/types/local-date.js new file mode 100644 index 0000000..9651d73 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/local-date.js @@ -0,0 +1,252 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); + +const utils = require('../utils'); +/** @module types */ + +/** + * @private + * @const + */ +const millisecondsPerDay = 86400000; +/** + * @private + */ +const dateCenter = Math.pow(2,31); +/** + * + * Creates a new instance of LocalDate. + * @class + * @classdesc A date without a time-zone in the ISO-8601 calendar system, such as 2010-08-05. + *

+ * LocalDate is an immutable object that represents a date, often viewed as year-month-day. For example, the value "1st October 2014" can be stored in a LocalDate. + *

+ *

+ * This class does not store or represent a time or time-zone. Instead, it is a description of the date, as used for birthdays. It cannot represent an instant on the time-line without additional information such as an offset or time-zone. + *

+ *

+ * Note that this type can represent dates in the range [-5877641-06-23; 5881580-07-17] while the ES5 date type can only represent values in the range of [-271821-04-20; 275760-09-13]. + * In the event that year, month, day parameters do not fall within the ES5 date range an Error will be thrown. If you wish to represent a date outside of this range, pass a single + * parameter indicating the days since epoch. For example, -1 represents 1969-12-31. + *

+ * @param {Number} year The year or days since epoch. If days since epoch, month and day should not be provided. + * @param {Number} month Between 1 and 12 inclusive. + * @param {Number} day Between 1 and the number of days in the given month of the given year. + * + * @property {Date} date The date representation if falls within a range of an ES5 data type, otherwise an invalid date. + * + * @constructor + */ +function LocalDate(year, month, day) { + //implementation detail: internally uses a UTC based date + if (typeof year === 'number' && typeof month === 'number' && typeof day === 'number') { + // Use setUTCFullYear as if there is a 2 digit year, Date.UTC() assumes + // that is the 20th century. + this.date = new Date(); + this.date.setUTCHours(0, 0, 0, 0); + this.date.setUTCFullYear(year, month-1, day); + if(isNaN(this.date.getTime())) { + throw new Error(util.format('%d-%d-%d does not form a valid ES5 date!', + year, month, day)); + } + } + else if (typeof month === 'undefined' && typeof day === 'undefined') { + if (typeof year === 'number') { + //in days since epoch. + if(year < -2147483648 || year > 2147483647) { + throw new Error('You must provide a valid value for days since epoch (-2147483648 <= value <= 2147483647).'); + } + this.date = new Date(year * millisecondsPerDay); + } + } + + if (typeof this.date === 'undefined') { + throw new Error('You must provide a valid year, month and day'); + } + + /** + * If date cannot be represented yet given a valid days since epoch, track + * it internally. + */ + this._value = isNaN(this.date.getTime()) ? year : null; + + /** + * A number representing the year. May return NaN if cannot be represented as + * a Date. + * @type Number + */ + this.year = this.date.getUTCFullYear(); + /** + * A number between 1 and 12 inclusive representing the month. May return + * NaN if cannot be represented as a Date. + * @type Number + */ + this.month = this.date.getUTCMonth() + 1; + /** + * A number between 1 and the number of days in the given month of the given year (28, 29, 30, 31). + * May return NaN if cannot be represented as a Date. + * @type Number + */ + this.day = this.date.getUTCDate(); +} + +/** + * Creates a new instance of LocalDate using the current year, month and day from the system clock in the default time-zone. + */ +LocalDate.now = function () { + return LocalDate.fromDate(new Date()); +}; + +/** + * Creates a new instance of LocalDate using the current date from the system clock at UTC. + */ +LocalDate.utcNow = function () { + return new LocalDate(Date.now()); +}; + + +/** + * Creates a new instance of LocalDate using the year, month and day from the provided local date time. + * @param {Date} date + */ +LocalDate.fromDate = function (date) { + if (isNaN(date.getTime())) { + throw new TypeError('Invalid date: ' + date); + } + return new LocalDate(date.getFullYear(), date.getMonth() + 1, date.getDate()); +}; + +/** + * Creates a new instance of LocalDate using the year, month and day provided in the form: yyyy-mm-dd or + * days since epoch (i.e. -1 for Dec 31, 1969). + * @param {String} value + */ +LocalDate.fromString = function (value) { + const dashCount = (value.match(/-/g) || []).length; + if(dashCount >= 2) { + let multiplier = 1; + if (value[0] === '-') { + value = value.substring(1); + multiplier = -1; + } + const parts = value.split('-'); + return new LocalDate(multiplier * parseInt(parts[0], 10), parseInt(parts[1], 10), parseInt(parts[2], 10)); + } + if(value.match(/^-?\d+$/)) { + // Parse as days since epoch. + return new LocalDate(parseInt(value, 10)); + } + throw new Error("Invalid input '" + value + "'."); +}; + +/** + * Creates a new instance of LocalDate using the bytes representation. + * @param {Buffer} buffer + */ +LocalDate.fromBuffer = function (buffer) { + //move to unix epoch: 0. + return new LocalDate((buffer.readUInt32BE(0) - dateCenter)); +}; + +/** + * Compares this LocalDate with the given one. + * @param {LocalDate} other date to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ +LocalDate.prototype.compare = function (other) { + const thisValue = isNaN(this.date.getTime()) ? this._value * millisecondsPerDay : this.date.getTime(); + const otherValue = isNaN(other.date.getTime()) ? other._value * millisecondsPerDay : other.date.getTime(); + const diff = thisValue - otherValue; + if (diff < 0) { + return -1; + } + if (diff > 0) { + return 1; + } + return 0; +}; + +/** + * Returns true if the value of the LocalDate instance and other are the same + * @param {LocalDate} other + * @returns {Boolean} + */ +LocalDate.prototype.equals = function (other) { + return ((other instanceof LocalDate)) && this.compare(other) === 0; +}; + +LocalDate.prototype.inspect = function () { + return this.constructor.name + ': ' + this.toString(); +}; + +/** + * Gets the bytes representation of the instance. + * @returns {Buffer} + */ +LocalDate.prototype.toBuffer = function () { + //days since unix epoch + const daysSinceEpoch = isNaN(this.date.getTime()) ? this._value : Math.floor(this.date.getTime() / millisecondsPerDay); + const value = daysSinceEpoch + dateCenter; + const buf = utils.allocBufferUnsafe(4); + buf.writeUInt32BE(value, 0); + return buf; +}; + +/** + * Gets the string representation of the instance in the form: yyyy-mm-dd if + * the value can be parsed as a Date, otherwise days since epoch. + * @returns {String} + */ +LocalDate.prototype.toString = function () { + let result; + //if cannot be parsed as date, return days since epoch representation. + if (isNaN(this.date.getTime())) { + return this._value.toString(); + } + if (this.year < 0) { + result = '-' + fillZeros((this.year * -1).toString(), 4); + } + else { + result = fillZeros(this.year.toString(), 4); + } + result += '-' + fillZeros(this.month.toString(), 2) + '-' + fillZeros(this.day.toString(), 2); + return result; +}; + +/** + * Gets the string representation of the instance in the form: yyyy-mm-dd, valid for JSON. + * @returns {String} + */ +LocalDate.prototype.toJSON = function () { + return this.toString(); +}; + +/** + * @param {String} value + * @param {Number} amount + * @private + */ +function fillZeros(value, amount) { + if (value.length >= amount) { + return value; + } + return utils.stringRepeat('0', amount - value.length) + value; +} + +module.exports = LocalDate; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/local-time.js b/node_modules/cassandra-driver/lib/types/local-time.js new file mode 100644 index 0000000..2b726cf --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/local-time.js @@ -0,0 +1,295 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const Long = require('long'); +const util = require('util'); +const utils = require('../utils'); +/** @module types */ + +/** + * @const + * @private + * */ +const maxNanos = Long.fromString('86399999999999'); +/** + * Nanoseconds in a second + * @const + * @private + * */ +const nanoSecInSec = Long.fromNumber(1000000000); +/** + * Nanoseconds in a millisecond + * @const + * @private + * */ +const nanoSecInMillis = Long.fromNumber(1000000); +/** + * Milliseconds in day + * @const + * @private + * */ +const millisInDay = 86400000; +/** + * + * Creates a new instance of LocalTime. + * @class + * @classdesc A time without a time-zone in the ISO-8601 calendar system, such as 10:30:05. + *

+ * LocalTime is an immutable date-time object that represents a time, often viewed as hour-minute-second. Time is represented to nanosecond precision. For example, the value "13:45.30.123456789" can be stored in a LocalTime. + *

+ * @param {Long} totalNanoseconds Total nanoseconds since midnight. + * @constructor + */ +function LocalTime(totalNanoseconds) { + if (!(totalNanoseconds instanceof Long)) { + throw new Error('You must specify a Long value as totalNanoseconds'); + } + if (totalNanoseconds.lessThan(Long.ZERO) || totalNanoseconds.greaterThan(maxNanos)) { + throw new Error('Total nanoseconds out of range'); + } + this.value = totalNanoseconds; + + /** + * Gets the hour component of the time represented by the current instance, a number from 0 to 23. + * @type Number + */ + this.hour = this._getParts()[0]; + /** + * Gets the minute component of the time represented by the current instance, a number from 0 to 59. + * @type Number + */ + this.minute = this._getParts()[1]; + /** + * Gets the second component of the time represented by the current instance, a number from 0 to 59. + * @type Number + */ + this.second = this._getParts()[2]; + /** + * Gets the nanoseconds component of the time represented by the current instance, a number from 0 to 999999999. + * @type Number + */ + this.nanosecond = this._getParts()[3]; +} + +/** + * Parses an string representation and returns a new LocalDate. + * @param {String} value + * @returns {LocalTime} + */ +LocalTime.fromString = function (value) { + if (typeof value !== 'string') { + throw new Error('Argument type invalid: ' + util.inspect(value)); + } + const parts = value.split(':'); + let millis = parseInt(parts[0], 10) * 3600000 + parseInt(parts[1], 10) * 60000; + let nanos; + if (parts.length === 3) { + const secParts = parts[2].split('.'); + millis += parseInt(secParts[0], 10) * 1000; + if (secParts.length === 2) { + nanos = secParts[1]; + //add zeros at the end + nanos = nanos + utils.stringRepeat('0', 9 - nanos.length); + } + } + return LocalTime.fromMilliseconds(millis, parseInt(nanos, 10) || 0); +}; + +/** + * Uses the current local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. + * @returns {LocalTime} + */ +LocalTime.now = function (nanoseconds) { + return LocalTime.fromDate(new Date(), nanoseconds); +}; + +/** + * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Date} date Local date portion to extract the time passed since midnight. + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the nanosecond time portion. + * @returns {LocalTime} + */ +LocalTime.fromDate = function (date, nanoseconds) { + if (!util.isDate(date)) { + throw new Error('Not a valid date'); + } + //Use the local representation, only the milliseconds portion + const millis = (date.getTime() + date.getTimezoneOffset() * -60000) % millisInDay; + return LocalTime.fromMilliseconds(millis, nanoseconds); +}; + +/** + * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Number} milliseconds A Number from 0 to 86,399,999. + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. + * @returns {LocalTime} + */ +LocalTime.fromMilliseconds = function (milliseconds, nanoseconds) { + if (typeof nanoseconds !== 'number') { + nanoseconds = 0; + } + return new LocalTime(Long + .fromNumber(milliseconds) + .multiply(nanoSecInMillis) + .add(Long.fromNumber(nanoseconds))); +}; + +/** + * Creates a new instance of LocalTime from the bytes representation. + * @param {Buffer} value + * @returns {LocalTime} + */ +LocalTime.fromBuffer = function (value) { + if (!(value instanceof Buffer)) { + throw new TypeError('Expected Buffer, obtained ' + util.inspect(value)); + } + return new LocalTime(new Long(value.readInt32BE(4), value.readInt32BE(0))); +}; + +/** + * Compares this LocalTime with the given one. + * @param {LocalTime} other time to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ +LocalTime.prototype.compare = function (other) { + return this.value.compare(other.value); +}; + +/** + * Returns true if the value of the LocalTime instance and other are the same + * @param {LocalTime} other + * @returns {Boolean} + */ +LocalTime.prototype.equals = function (other) { + return ((other instanceof LocalTime)) && this.compare(other) === 0; +}; + +/** + * Gets the total amount of nanoseconds since midnight for this instance. + * @returns {Long} + */ +LocalTime.prototype.getTotalNanoseconds = function () { + return this.value; +}; + +LocalTime.prototype.inspect = function () { + return this.constructor.name + ': ' + this.toString(); +}; + +/** + * Returns a big-endian bytes representation of the instance + * @returns {Buffer} + */ +LocalTime.prototype.toBuffer = function () { + const buffer = utils.allocBufferUnsafe(8); + buffer.writeUInt32BE(this.value.getHighBitsUnsigned(), 0); + buffer.writeUInt32BE(this.value.getLowBitsUnsigned(), 4); + return buffer; +}; + +/** + * Returns the string representation of the instance in the form of hh:MM:ss.ns + * @returns {String} + */ +LocalTime.prototype.toString = function () { + return formatTime(this._getParts()); +}; + +/** + * Gets the string representation of the instance in the form: hh:MM:ss.ns + * @returns {String} + */ +LocalTime.prototype.toJSON = function () { + return this.toString(); +}; + +/** + * @returns {Array.} + * @ignore + */ +LocalTime.prototype._getParts = function () { + if (!this._partsCache) { + //hours, minutes, seconds and nanos + const parts = [0, 0, 0, 0]; + const secs = this.value.div(nanoSecInSec); + //faster modulo + //total nanos + parts[3] = this.value.subtract(secs.multiply(nanoSecInSec)).toNumber(); + //seconds + parts[2] = secs.toNumber(); + if (parts[2] >= 60) { + //minutes + parts[1] = Math.floor(parts[2] / 60); + parts[2] = parts[2] % 60; + } + if (parts[1] >= 60) { + //hours + parts[0] = Math.floor(parts[1] / 60); + parts[1] = parts[1] % 60; + } + this._partsCache = parts; + } + return this._partsCache; +}; + +/** + * @param {Array.} values + * @private + */ +function formatTime(values) { + let result; + if (values[0] < 10) { + result = '0' + values[0] + ':'; + } + else { + result = values[0] + ':'; + } + if (values[1] < 10) { + result += '0' + values[1] + ':'; + } + else { + result += values[1] + ':'; + } + if (values[2] < 10) { + result += '0' + values[2]; + } + else { + result += values[2]; + } + if (values[3] > 0) { + let nanos = values[3].toString(); + //nine digits + if (nanos.length < 9) { + nanos = utils.stringRepeat('0', 9 - nanos.length) + nanos; + } + let lastPosition; + for (let i = nanos.length - 1; i > 0; i--) { + if (nanos[i] !== '0') { + break; + } + lastPosition = i; + } + if (lastPosition) { + nanos = nanos.substring(0, lastPosition); + } + result += '.' + nanos; + } + return result; +} + +module.exports = LocalTime; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/mutable-long.js b/node_modules/cassandra-driver/lib/types/mutable-long.js new file mode 100644 index 0000000..43992d5 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/mutable-long.js @@ -0,0 +1,329 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +const Long = require('long'); + +const TWO_PWR_16_DBL = 1 << 16; +const TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL; + +/** + * Constructs a signed int64 representation. + * @ignore + */ +class MutableLong { + constructor(b00, b16, b32, b48) { + // Use an array of uint16 + this._arr = [b00 & 0xffff, b16 & 0xffff, b32 & 0xffff, b48 & 0xffff]; + } + + toString() { + return this.toImmutable().toString(); + } + + /** + * Compares this value with the provided value. + * @param {MutableLong} other + * @return {number} + */ + compare(other) { + const thisNeg = this.isNegative(); + const otherNeg = other.isNegative(); + if (thisNeg && !otherNeg) { + return -1; + } + if (!thisNeg && otherNeg) { + return 1; + } + // At this point the sign bits are the same + return this._compareBits(other); + } + + _compareBits(other) { + for (let i = 3; i >= 0; i--) { + if (this._arr[i] > other._arr[i]) { + return 1; + } + if (this._arr[i] < other._arr[i]) { + return -1; + } + } + return 0; + } + + getUint16(index) { + return this._arr[index]; + } + + getLowBitsUnsigned() { + return (this._arr[0] | ((this._arr[1] & 0xffff) << 16)) >>> 0; + } + + getHighBitsUnsigned() { + return (this._arr[2] | (this._arr[3] << 16)) >>> 0; + } + + toNumber() { + return (this._arr[3] << 16 | this._arr[2]) * TWO_PWR_32_DBL + ((this._arr[1] << 16 | this._arr[0]) >>> 0); + } + + /** + * Performs the bitwise NOT of this value. + * @return {MutableLong} + */ + not() { + this._arr[0] = ~this._arr[0] & 0xffff; + this._arr[1] = ~this._arr[1] & 0xffff; + this._arr[2] = ~this._arr[2] & 0xffff; + this._arr[3] = ~this._arr[3] & 0xffff; + return this; + } + + add(addend) { + let c48 = 0, c32 = 0, c16 = 0, c00 = 0; + c00 += this._arr[0] + addend._arr[0]; + this._arr[0] = c00 & 0xffff; + c16 += c00 >>> 16; + c16 += this._arr[1] + addend._arr[1]; + this._arr[1] = c16 & 0xffff; + c32 += c16 >>> 16; + c32 += this._arr[2] + addend._arr[2]; + this._arr[2] = c32 & 0xffff; + c48 += c32 >>> 16; + c48 += this._arr[3] + addend._arr[3]; + this._arr[3] = c48 & 0xffff; + return this; + } + + shiftLeft(numBits) { + if (numBits === 0) { + return this; + } + if (numBits >= 64) { + return this.toZero(); + } + const remainingBits = numBits % 16; + const pos = Math.floor(numBits / 16); + if (pos > 0) { + this._arr[3] = this._arr[3 - pos]; + this._arr[2] = pos > 2 ? 0 : this._arr[2 - pos]; + this._arr[1] = pos > 1 ? 0 : this._arr[0]; + this._arr[0] = 0; + } + if (remainingBits > 0) { + // shift left within the int16 and the next one + this._arr[3] = ((this._arr[3] << remainingBits) | (this._arr[2] >>> (16 - remainingBits))) & 0xffff; + this._arr[2] = ((this._arr[2] << remainingBits) | (this._arr[1] >>> (16 - remainingBits))) & 0xffff; + this._arr[1] = ((this._arr[1] << remainingBits) | (this._arr[0] >>> (16 - remainingBits))) & 0xffff; + this._arr[0] = (this._arr[0] << remainingBits) & 0xffff; + } + return this; + } + + shiftRightUnsigned(numBits) { + if (numBits === 0) { + return this; + } + if (numBits >= 64) { + return this.toZero(); + } + const remainingBits = numBits % 16; + const pos = Math.floor(numBits / 16); + if (pos > 0) { + this._arr[0] = this._arr[pos]; + this._arr[1] = pos > 2 ? 0 : this._arr[1 + pos]; + this._arr[2] = pos > 1 ? 0 : this._arr[3]; + this._arr[3] = 0; + } + if (remainingBits > 0) { + this._arr[0] = (this._arr[0] >>> remainingBits) | ((this._arr[1] << (16 - remainingBits)) & 0xffff); + this._arr[1] = (this._arr[1] >>> remainingBits) | ((this._arr[2] << (16 - remainingBits)) & 0xffff); + this._arr[2] = (this._arr[2] >>> remainingBits) | ((this._arr[3] << (16 - remainingBits)) & 0xffff); + this._arr[3] = this._arr[3] >>> remainingBits; + } + return this; + } + + or(other) { + this._arr[0] |= other._arr[0]; + this._arr[1] |= other._arr[1]; + this._arr[2] |= other._arr[2]; + this._arr[3] |= other._arr[3]; + return this; + } + + /** + * Returns the bitwise XOR of this Long and the given one. + * @param {MutableLong} other + * @returns {MutableLong} this instance. + */ + xor(other) { + this._arr[0] ^= other._arr[0]; + this._arr[1] ^= other._arr[1]; + this._arr[2] ^= other._arr[2]; + this._arr[3] ^= other._arr[3]; + return this; + } + + clone() { + return new MutableLong(this._arr[0], this._arr[1], this._arr[2], this._arr[3]); + } + + /** + * Performs the product of this and the specified Long. + * @param {MutableLong} multiplier + * @returns {MutableLong} this instance. + */ + multiply(multiplier) { + let negate = false; + if (this.isZero() || multiplier.isZero()) { + return this.toZero(); + } + if (this.isNegative()) { + this.negate(); + negate = !negate; + } + if (multiplier.isNegative()) { + multiplier = multiplier.clone().negate(); + negate = !negate; + } + // We can skip products that would overflow. + let c48 = 0, c32 = 0, c16 = 0, c00 = 0; + c00 += this._arr[0] * multiplier._arr[0]; + c16 += c00 >>> 16; + c16 += this._arr[1] * multiplier._arr[0]; + c32 += c16 >>> 16; + c16 &= 0xFFFF; + c16 += this._arr[0] * multiplier._arr[1]; + c32 += c16 >>> 16; + c32 += this._arr[2] * multiplier._arr[0]; + c48 += c32 >>> 16; + c32 &= 0xFFFF; + c32 += this._arr[1] * multiplier._arr[1]; + c48 += c32 >>> 16; + c32 &= 0xFFFF; + c32 += this._arr[0] * multiplier._arr[2]; + c48 += c32 >>> 16; + c48 += this._arr[3] * multiplier._arr[0] + this._arr[2] * multiplier._arr[1] + + this._arr[1] * multiplier._arr[2] + this._arr[0] * multiplier._arr[3]; + this._arr[0] = c00 & 0xffff; + this._arr[1] = c16 & 0xffff; + this._arr[2] = c32 & 0xffff; + this._arr[3] = c48 & 0xffff; + if (negate) { + this.negate(); + } + return this; + } + + toZero() { + this._arr[3] = this._arr[2] = this._arr[1] = this._arr[0] = 0; + return this; + } + + isZero() { + return (this._arr[3] === 0 && this._arr[2] === 0 && this._arr[1] === 0 && this._arr[0] === 0); + } + + isNegative() { + // most significant bit turned on + return (this._arr[3] & 0x8000) > 0; + } + + /** + * Negates this value. + * @return {MutableLong} + */ + negate() { + return this.not().add(MutableLong.one); + } + + equals(other) { + if (!(other instanceof MutableLong)) { + return false; + } + return (this._arr[0] === other._arr[0] && this._arr[1] === other._arr[1] && + this._arr[2] === other._arr[2] && this._arr[3] === other._arr[3]); + } + + toImmutable() { + return Long.fromBits(this.getLowBitsUnsigned(), this.getHighBitsUnsigned(), false); + } + + static fromNumber(value) { + if (isNaN(value) || !isFinite(value)) { + return new MutableLong(); + } + if (value < 0) { + return MutableLong.fromNumber(-value).negate(); + } + const low32Bits = value % TWO_PWR_32_DBL; + const high32Bits = value / TWO_PWR_32_DBL; + return MutableLong.fromBits(low32Bits, high32Bits); + } + + static fromBits(low32Bits, high32Bits) { + return new MutableLong(low32Bits, low32Bits >>> 16, high32Bits, high32Bits >>> 16); + } + + /** + * Returns a Long representation of the given string, written using the specified radix. + * @param {String} str + * @param {Number} [radix] + * @return {MutableLong} + */ + static fromString(str, radix) { + if (typeof str !== 'string') { + throw new Error('String format is not valid: ' + str); + } + if (str.length === 0) { + throw Error('number format error: empty string'); + } + if (str === "NaN" || str === "Infinity" || str === "+Infinity" || str === "-Infinity") { + return new MutableLong(); + } + radix = radix || 10; + if (radix < 2 || radix > 36) { + throw Error('radix out of range: ' + radix); + } + let p; + if ((p = str.indexOf('-')) > 0) { + throw Error('number format error: interior "-" character: ' + str); + } + if (p === 0) { + return MutableLong.fromString(str.substring(1), radix).negate(); + } + // Do several (8) digits each time through the loop + const radixToPower = MutableLong.fromNumber(Math.pow(radix, 8)); + const result = new MutableLong(); + for (let i = 0; i < str.length; i += 8) { + const size = Math.min(8, str.length - i); + const value = parseInt(str.substring(i, i + size), radix); + if (size < 8) { + const power = MutableLong.fromNumber(Math.pow(radix, size)); + result.multiply(power).add(MutableLong.fromNumber(value)); + break; + } + result.multiply(radixToPower); + result.add(MutableLong.fromNumber(value)); + } + return result; + } +} + +MutableLong.one = new MutableLong(1, 0, 0, 0); + +module.exports = MutableLong; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/protocol-version.js b/node_modules/cassandra-driver/lib/types/protocol-version.js new file mode 100644 index 0000000..4accf2b --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/protocol-version.js @@ -0,0 +1,349 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const utils = require('../utils'); +const VersionNumber = require('./version-number'); +const v200 = VersionNumber.parse('2.0.0'); +const v210 = VersionNumber.parse('2.1.0'); +const v220 = VersionNumber.parse('2.2.0'); +const v300 = VersionNumber.parse('3.0.0'); +const v510 = VersionNumber.parse('5.1.0'); +const v600 = VersionNumber.parse('6.0.0'); + +/** + * Contains information for the different protocol versions supported by the driver. + * @type {Object} + * @property {Number} v1 Cassandra protocol v1, supported in Apache Cassandra 1.2-->2.2. + * @property {Number} v2 Cassandra protocol v2, supported in Apache Cassandra 2.0-->2.2. + * @property {Number} v3 Cassandra protocol v3, supported in Apache Cassandra 2.1-->3.x. + * @property {Number} v4 Cassandra protocol v4, supported in Apache Cassandra 2.2-->3.x. + * @property {Number} v5 Cassandra protocol v5, in beta from Apache Cassandra 3.x+. Currently not supported by the + * driver. + * @property {Number} dseV1 DataStax Enterprise protocol v1, DSE 5.1+ + * @property {Number} dseV2 DataStax Enterprise protocol v2, DSE 6.0+ + * @property {Number} maxSupported Returns the higher protocol version that is supported by this driver. + * @property {Number} minSupported Returns the lower protocol version that is supported by this driver. + * @property {Function} isSupported A function that returns a boolean determining whether a given protocol version + * is supported. + * @alias module:types~protocolVersion + */ +const protocolVersion = { + // Strict equality operators to compare versions are allowed, other comparison operators are discouraged. Instead, + // use a function that checks if a functionality is present on a certain version, for maintainability purposes. + v1: 0x01, + v2: 0x02, + v3: 0x03, + v4: 0x04, + v5: 0x05, + v6: 0x06, + dseV1: 0x41, + dseV2: 0x42, + maxSupported: 0x42, + minSupported: 0x01, + + /** + * Determines whether the protocol version is a DSE-specific protocol version. + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + isDse: function(version) { + return ((version >= this.dseV1 && version <= this.dseV2)); + }, + /** + * Returns true if the protocol version represents a version of Cassandra + * supported by this driver, false otherwise + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + isSupportedCassandra: function(version) { + return (version <= 0x04 && version >= 0x01); + }, + /** + * Determines whether the protocol version is supported by this driver. + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + isSupported: function (version) { + return (this.isDse(version) || this.isSupportedCassandra(version)); + }, + + /** + * Determines whether the protocol includes flags for PREPARE messages. + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + supportsPrepareFlags: function (version) { + return (version === this.dseV2); + }, + /** + * Determines whether the protocol supports sending the keyspace as part of PREPARE, QUERY, EXECUTE, and BATCH. + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + supportsKeyspaceInRequest: function (version) { + return (version === this.dseV2); + }, + /** + * Determines whether the protocol supports result_metadata_id on `prepared` response and + * and `execute` request. + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + supportsResultMetadataId: function (version) { + return (version === this.dseV2); + }, + /** + * Determines whether the protocol supports partition key indexes in the `prepared` RESULT responses. + * @param {Number} version + * @returns {Boolean} + * @ignore + */ + supportsPreparedPartitionKey: function (version) { + return (version >= this.v4); + }, + /** + * Determines whether the protocol supports up to 4 strings (ie: change_type, target, keyspace and table) in the + * schema change responses. + * @param version + * @return {boolean} + * @ignore + */ + supportsSchemaChangeFullMetadata: function (version) { + return (version >= this.v3); + }, + /** + * Determines whether the protocol supports continuous paging. + * @param version + * @return {boolean} + * @ignore + */ + supportsContinuousPaging: function (version) { + return (this.isDse(version)); + }, + /** + * Determines whether the protocol supports paging state and serial consistency parameters in QUERY and EXECUTE + * requests. + * @param version + * @return {boolean} + * @ignore + */ + supportsPaging: function (version) { + return (version >= this.v2); + }, + /** + * Determines whether the protocol supports timestamps parameters in BATCH, QUERY and EXECUTE requests. + * @param {Number} version + * @return {boolean} + * @ignore + */ + supportsTimestamp: function (version) { + return (version >= this.v3); + }, + /** + * Determines whether the protocol supports named parameters in QUERY and EXECUTE requests. + * @param {Number} version + * @return {boolean} + * @ignore + */ + supportsNamedParameters: function (version) { + return (version >= this.v3); + }, + /** + * Determines whether the protocol supports unset parameters. + * @param {Number} version + * @return {boolean} + * @ignore + */ + supportsUnset: function (version) { + return (version >= this.v4); + }, + /** + * Determines whether the protocol provides a reason map for read and write failure errors. + * @param version + * @return {boolean} + * @ignore + */ + supportsFailureReasonMap: function (version) { + return (version >= this.v5); + }, + /** + * Determines whether the protocol supports timestamp and serial consistency parameters in BATCH requests. + * @param {Number} version + * @return {boolean} + * @ignore + */ + uses2BytesStreamIds: function (version) { + return (version >= this.v3); + }, + /** + * Determines whether the collection length is encoded using 32 bits. + * @param {Number} version + * @return {boolean} + * @ignore + */ + uses4BytesCollectionLength: function (version) { + return (version >= this.v3); + }, + /** + * Determines whether the QUERY, EXECUTE and BATCH flags are encoded using 32 bits. + * @param {Number} version + * @return {boolean} + * @ignore + */ + uses4BytesQueryFlags: function (version) { + return (this.isDse(version)); + }, + /** + * Startup responses using protocol v4+ can be a SERVER_ERROR wrapping a ProtocolException, this method returns true + * when is possible to receive such error. + * @param {Number} version + * @return {boolean} + * @ignore + */ + canStartupResponseErrorBeWrapped: function (version) { + return (version >= this.v4); + }, + /** + * Gets the first version number that is supported, lower than the one provided. + * Returns zero when there isn't a lower supported version. + * @param {Number} version + * @return {Number} + * @ignore + */ + getLowerSupported: function (version) { + if (version >= this.v5) { + return this.v4; + } + if (version <= this.v1) { + return 0; + } + return version - 1; + }, + + /** + * Computes the highest supported protocol version collectively by the given hosts. + * + * Considers the cassandra_version of the input hosts to determine what protocol versions + * are supported and uses the highest common protocol version among them. + * + * If hosts >= C* 3.0 are detected, any hosts older than C* 2.1 will not be considered + * as those cannot be connected to. In general this will not be a problem as C* does + * not support clusters with nodes that have versions that are more than one major + * version away from each other. + * @param {Connection} connection Connection hosts were discovered from. + * @param {Array.} hosts The hosts to determine highest protocol version from. + * @return {Number} Highest supported protocol version among hosts. + */ + getHighestCommon: function(connection, hosts) { + const log = connection.log ? connection.log.bind(connection) : utils.noop; + let maxVersion = connection.protocolVersion; + // whether or not protocol v3 is required (nodes detected that don't support < 3). + let v3Requirement = false; + // track the common protocol version >= v3 in case we encounter older versions. + let maxVersionWith3OrMore = maxVersion; + hosts.forEach(h => { + let dseVersion = null; + if (h.dseVersion) { + // As of DSE 5.1, DSE has it's own specific protocol versions. If we detect 5.1+ + // consider those protocol versions. + dseVersion = VersionNumber.parse(h.dseVersion); + log('verbose', `Encountered host ${h.address} with dse version ${dseVersion}`); + if (dseVersion.compare(v510) >= 0) { + v3Requirement = true; + if (dseVersion.compare(v600) >= 0) { + maxVersion = Math.min(this.dseV2, maxVersion); + } else { + maxVersion = Math.min(this.dseV1, maxVersion); + } + maxVersionWith3OrMore = maxVersion; + return; + } + // If DSE < 5.1, we fall back on the cassandra protocol logic. + } + + if (!h.cassandraVersion || h.cassandraVersion.length === 0) { + log('warning', 'Encountered host ' + h.address + ' with no cassandra version,' + + ' skipping as part of protocol version evaluation'); + return; + } + + try { + const cassandraVersion = VersionNumber.parse(h.cassandraVersion); + if (!dseVersion) { + log('verbose', 'Encountered host ' + h.address + ' with cassandra version ' + cassandraVersion); + } + if (cassandraVersion.compare(v300) >= 0) { + // Anything 3.0.0+ has a max protocol version of V4 and requires at least V3. + v3Requirement = true; + maxVersion = Math.min(this.v4, maxVersion); + maxVersionWith3OrMore = maxVersion; + } else if (cassandraVersion.compare(v220) >= 0) { + // Cassandra 2.2.x has a max protocol version of V4. + maxVersion = Math.min(this.v4, maxVersion); + maxVersionWith3OrMore = maxVersion; + } else if (cassandraVersion.compare(v210) >= 0) { + // Cassandra 2.1.x has a max protocol version of V3. + maxVersion = Math.min(this.v3, maxVersion); + maxVersionWith3OrMore = maxVersion; + } else if (cassandraVersion.compare(v200) >= 0) { + // Cassandra 2.0.x has a max protocol version of V2. + maxVersion = Math.min(this.v2, maxVersion); + } else { + // Anything else is < 2.x and requires protocol version V1. + maxVersion = this.v1; + } + } catch (e) { + log('warning', 'Encountered host ' + h.address + ' with unparseable cassandra version ' + h.cassandraVersion + + ' skipping as part of protocol version evaluation'); + } + }); + + if (v3Requirement && maxVersion < this.v3) { + const addendum = '. This should not be possible as nodes within a cluster can\'t be separated by more than one major version'; + if (maxVersionWith3OrMore < this.v3) { + log('error', 'Detected hosts that require at least protocol version 0x3, but currently connected to ' + + connection.address + ':' + connection.port + ' using protocol version 0x' + maxVersionWith3OrMore + + '. Will not be able to connect to these hosts' + addendum); + } else { + log('error', 'Detected hosts with maximum protocol version of 0x' + maxVersion.toString(16) + + ' but there are some hosts that require at least version 0x3. Will not be able to connect to these older hosts' + + addendum); + } + maxVersion = maxVersionWith3OrMore; + } + + log('verbose', 'Resolved protocol version 0x' + maxVersion.toString(16) + ' as the highest common protocol version among hosts'); + return maxVersion; + }, + + /** + * Determines if the protocol is a BETA version of the protocol. + * @param {Number} version + * @return {Number} + */ + isBeta: function (version) { + return version === this.v5; + } +}; + +module.exports = protocolVersion; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/result-set.js b/node_modules/cassandra-driver/lib/types/result-set.js new file mode 100644 index 0000000..4137ddb --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/result-set.js @@ -0,0 +1,275 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const utils = require('../utils'); +const errors = require('../errors'); + +const asyncIteratorSymbol = Symbol.asyncIterator || '@@asyncIterator'; + +/** @module types */ + +/** + * Creates a new instance of ResultSet. + * @class + * @classdesc Represents the result of a query. + * @param {Object} response + * @param {String} host + * @param {Object} triedHosts + * @param {Number} speculativeExecutions + * @param {Number} consistency + * @param {Boolean} isSchemaInAgreement + * @constructor + */ +function ResultSet(response, host, triedHosts, speculativeExecutions, consistency, isSchemaInAgreement) { + // if no execution was made at all, set to 0. + if (speculativeExecutions === -1) { + speculativeExecutions = 0; + } + /** + * Information on the execution of a successful query: + * @member {Object} + * @property {Number} achievedConsistency The consistency level that has been actually achieved by the query. + * @property {String} queriedHost The Cassandra host that coordinated this query. + * @property {Object} triedHosts Gets the associative array of host that were queried before getting a valid response, + * being the last host the one that replied correctly. + * @property {Object} speculativeExecutions The number of speculative executions (not including the first) executed before + * getting a valid response. + * @property {Uuid} traceId Identifier of the trace session. + * @property {Array.} warnings Warning messages generated by the server when executing the query. + * @property {Boolean} isSchemaInAgreement Whether the cluster had reached schema agreement after the execution of + * this query. + *

+ * After a successful schema-altering query (ex: creating a table), the driver will check if + * the cluster's nodes agree on the new schema version. If not, it will keep retrying for a given + * delay (see protocolOptions.maxSchemaAgreementWaitSeconds). + *

+ *

+ * Note that the schema agreement check is only performed for schema-altering queries For other + * query types, this method will always return true. If this method returns false, + * clients can call [Metadata.checkSchemaAgreement()]{@link module:metadata~Metadata#checkSchemaAgreement} later to + * perform the check manually. + *

+ */ + this.info = { + queriedHost: host, + triedHosts: triedHosts, + speculativeExecutions: speculativeExecutions, + achievedConsistency: consistency, + traceId: null, + warnings: null, + customPayload: null, + isSchemaInAgreement + }; + + if (response.flags) { + this.info.traceId = response.flags.traceId; + this.info.warnings = response.flags.warnings; + this.info.customPayload = response.flags.customPayload; + } + + /** + * Gets an array rows returned by the query. + * When the result set represents a response from a write query, this property will be undefined. + * When the read query result contains more rows than the fetch size (5000), this property will only contain the + * first rows up to fetch size. To obtain all the rows, you can use the built-in async iterator that will retrieve the + * following pages of results. + * @type {Array|undefined} + */ + this.rows = response.rows; + + /** + * Gets the row length of the result, regardless if the result has been buffered or not + * @type {Number|undefined} + */ + this.rowLength = this.rows ? this.rows.length : response.rowLength; + + /** + * Gets the columns returned in this ResultSet. + * @type {Array.<{name, type}>} + * @default null + */ + this.columns = null; + + /** + * A string token representing the current page state of query. It can be used in the following executions to + * continue paging and retrieve the remained of the result for the query. + * @type {String|null} + * @default null + */ + this.pageState = null; + + /** + * Method used to manually fetch the next page of results. + * This method is only exposed when using the {@link Client#eachRow} method and there are more rows available in + * following pages. + * @type Function + */ + this.nextPage = undefined; + + /** + * Method used internally to fetch the next page of results using promises. + * @internal + * @ignore + * @type {Function} + */ + this.nextPageAsync = undefined; + + const meta = response.meta; + + if (meta) { + this.columns = meta.columns; + + if (meta.pageState) { + this.pageState = meta.pageState.toString('hex'); + + // Expose rawPageState internally + Object.defineProperty(this, 'rawPageState', { value: meta.pageState, enumerable: false }); + } + } +} + +/** + * Returns the first row or null if the result rows are empty. + */ +ResultSet.prototype.first = function () { + if (this.rows && this.rows.length) { + return this.rows[0]; + } + return null; +}; + +ResultSet.prototype.getPageState = function () { + // backward-compatibility + return this.pageState; +}; + +ResultSet.prototype.getColumns = function () { + // backward-compatibility + return this.columns; +}; + +/** + * When this instance is the result of a conditional update query, it returns whether it was successful. + * Otherwise, it returns true. + *

+ * For consistency, this method always returns true for non-conditional queries (although there is + * no reason to call the method in that case). This is also the case for conditional DDL statements + * (CREATE KEYSPACE... IF NOT EXISTS, CREATE TABLE... IF NOT EXISTS), for which the server doesn't return + * information whether it was applied or not. + *

+ */ +ResultSet.prototype.wasApplied = function () { + if (!this.rows || this.rows.length === 0) { + return true; + } + const firstRow = this.rows[0]; + const applied = firstRow['[applied]']; + return typeof applied === 'boolean' ? applied : true; +}; + +/** + * Gets the iterator function. + *

+ * Retrieves the iterator of the underlying fetched rows, without causing the driver to fetch the following + * result pages. For more information on result paging, + * [visit the documentation]{@link http://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. + *

+ * @alias module:types~ResultSet#@@iterator + * @see {@link module:types~ResultSet#@@asyncIterator} + * @example Using for...of statement + * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * for (const row of result) { + * console.log(row['email']); + * } + * @returns {Iterator.} + */ +ResultSet.prototype[Symbol.iterator] = function getIterator() { + if (!this.rows) { + return utils.emptyArray[Symbol.iterator](); + } + return this.rows[Symbol.iterator](); +}; + +/** + * Gets the async iterator function. + *

+ * Retrieves the async iterator representing the entire query result, the driver will fetch the following result + * pages. + *

+ *

Use the async iterator when the query result might contain more rows than the fetchSize.

+ *

+ * Note that using the async iterator will not affect the internal state of the ResultSet instance. + * You should avoid using both rows property that contains the row instances of the first page of + * results, and the async iterator, that will yield all the rows in the result regardless on the number of pages. + *

+ *

Multiple concurrent async iterations are not supported.

+ * @alias module:types~ResultSet#@@asyncIterator + * @example Using for await...of statement + * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * for await (const row of result) { + * console.log(row['email']); + * } + * @returns {AsyncIterator} + */ +ResultSet.prototype[asyncIteratorSymbol] = function getAsyncGenerator() { + let index = 0; + let pageState = this.rawPageState; + let rows = this.rows; + + if (!rows || rows.length === 0) { + return { next: () => Promise.resolve({ done: true }) }; + } + + const self = this; + + // Async generators are not present in Node.js 8, implement it manually + return { + async next() { + if (index >= rows.length && pageState) { + if (!self.nextPageAsync) { + throw new errors.DriverInternalError('Property nextPageAsync should be set when pageState is defined'); + } + + const rs = await self.nextPageAsync(pageState); + rows = rs.rows; + index = 0; + pageState = rs.rawPageState; + } + + if (index < rows.length) { + return { done: false, value: rows[index++] }; + } + + return { done: true }; + } + }; +}; + +/** + * Determines whether there are more pages of results. + * If so, the driver will initially retrieve and contain only the first page of results. + * To obtain all the rows, use the [AsyncIterator]{@linkcode module:types~ResultSet#@@asyncIterator}. + * @returns {boolean} + */ +ResultSet.prototype.isPaged = function() { + return !!this.rawPageState; +}; + +module.exports = ResultSet; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/result-stream.js b/node_modules/cassandra-driver/lib/types/result-stream.js new file mode 100644 index 0000000..90cb64c --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/result-stream.js @@ -0,0 +1,148 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const { Readable } = require('stream'); +const utils = require('../utils'); +const errors = require('../errors'); +const clientOptions = require('../client-options'); + +/** @module types */ +/** + * Readable stream using to yield data from a result or a field + */ +class ResultStream extends Readable { + constructor(opt) { + super(opt); + this.buffer = []; + this.paused = true; + this._cancelAllowed = false; + this._handlersObject = null; + this._highWaterMarkRows = 0; + } + + _read() { + this.paused = false; + if (this.buffer.length === 0) { + this._readableState.reading = false; + } + while (!this.paused && this.buffer.length > 0) { + this.paused = !this.push(this.buffer.shift()); + } + this._checkBelowHighWaterMark(); + if (!this.paused && !this.buffer.length && this._readNext) { + this._readNext(); + this._readNext = null; + } + } + + /** + * Allows for throttling, helping nodejs keep the internal buffers reasonably sized. + * @param {function} readNext function that triggers reading the next result chunk + * @ignore + */ + _valve(readNext) { + this._readNext = null; + if (!readNext) { + return; + } + if (this.paused || this.buffer.length) { + this._readNext = readNext; + } + else { + readNext(); + } + } + + add(chunk) { + const length = this.buffer.push(chunk); + this.read(0); + this._checkAboveHighWaterMark(); + return length; + } + + _checkAboveHighWaterMark() { + if (!this._handlersObject || !this._handlersObject.resumeReadingHandler) { + return; + } + if (this._highWaterMarkRows === 0 || this.buffer.length !== this._highWaterMarkRows) { + return; + } + this._handlersObject.resumeReadingHandler(false); + } + + _checkBelowHighWaterMark() { + if (!this._handlersObject || !this._handlersObject.resumeReadingHandler) { + return; + } + if (this._highWaterMarkRows === 0 || this.buffer.length >= this._highWaterMarkRows) { + return; + } + // The consumer has dequeued below the watermark + this._handlersObject.resumeReadingHandler(true); + } + + /** + * When continuous paging is enabled, allows the client to notify to the server to stop pushing further pages. + *

Note: This is not part of the public API yet.

+ * @param {Function} [callback] The cancel method accepts an optional callback. + * @example Cancelling a continuous paging execution + * const stream = client.stream(query, params, { prepare: true, continuousPaging: true }); + * // ... + * // Ask the server to stop pushing rows. + * stream.cancel(); + * @ignore + */ + cancel(callback) { + if (!this._cancelAllowed) { + const err = new Error('You can only cancel streaming executions when continuous paging is enabled'); + if (!callback) { + throw err; + } + return callback(err); + } + if (!this._handlersObject) { + throw new errors.DriverInternalError('ResultStream cancel is allowed but the cancel options were not set'); + } + callback = callback || utils.noop; + if (!this._handlersObject.cancelHandler) { + // The handler is not yet set + // Set the callback as a flag to identify that the cancel handler must be invoked when set + this._handlersObject.cancelHandler = callback; + return; + } + this._handlersObject.cancelHandler(callback); + } + + /** + * Sets the pointer to the handler to be used to cancel the continuous page execution. + * @param options + * @internal + * @ignore + */ + setHandlers(options) { + if (!options.continuousPaging) { + return; + } + this._cancelAllowed = true; + this._handlersObject = options; + this._highWaterMarkRows = + options.continuousPaging.highWaterMarkRows || clientOptions.continuousPageDefaultHighWaterMark; + } +} + +module.exports = ResultStream; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/row.js b/node_modules/cassandra-driver/lib/types/row.js new file mode 100644 index 0000000..fca7132 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/row.js @@ -0,0 +1,80 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +/** @module types */ +/** + * Represents a result row + * @param {Array} columns + * @constructor + */ +function Row(columns) { + if (!columns) { + throw new Error('Columns not defined'); + } + //Private non-enumerable properties, with double underscore to avoid interfering with column names + Object.defineProperty(this, '__columns', { value: columns, enumerable: false, writable: false}); +} + +/** + * Returns the cell value. + * @param {String|Number} columnName Name or index of the column + */ +Row.prototype.get = function (columnName) { + if (typeof columnName === 'number') { + //its an index + return this[this.__columns[columnName].name]; + } + return this[columnName]; +}; + +/** + * Returns an array of the values of the row + * @returns {Array} + */ +Row.prototype.values = function () { + const valuesArray = []; + this.forEach(function (val) { + valuesArray.push(val); + }); + return valuesArray; +}; + +/** + * Returns an array of the column names of the row + * @returns {Array} + */ +Row.prototype.keys = function () { + const keysArray = []; + this.forEach(function (val, key) { + keysArray.push(key); + }); + return keysArray; +}; + +/** + * Executes the callback for each field in the row, containing the value as first parameter followed by the columnName + * @param {Function} callback + */ +Row.prototype.forEach = function (callback) { + for (const columnName in this) { + if (!this.hasOwnProperty(columnName)) { + continue; + } + callback(this[columnName], columnName); + } +}; + +module.exports = Row; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/time-uuid.js b/node_modules/cassandra-driver/lib/types/time-uuid.js new file mode 100644 index 0000000..d4caae6 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/time-uuid.js @@ -0,0 +1,410 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const util = require('util'); +const crypto = require('crypto'); +const Long = require('long'); + +const Uuid = require('./uuid'); +const utils = require('../utils'); + +/** @module types */ +/** + * Oct 15, 1582 in milliseconds since unix epoch + * @const + * @private + */ +const _unixToGregorian = 12219292800000; +/** + * 10,000 ticks in a millisecond + * @const + * @private + */ +const _ticksInMs = 10000; + +const minNodeId = utils.allocBufferFromString('808080808080', 'hex'); +const minClockId = utils.allocBufferFromString('8080', 'hex'); +const maxNodeId = utils.allocBufferFromString('7f7f7f7f7f7f', 'hex'); +const maxClockId = utils.allocBufferFromString('7f7f', 'hex'); + +/** + * Counter used to generate up to 10000 different timeuuid values with the same Date + * @private + * @type {number} + */ +let _ticks = 0; +/** + * Counter used to generate ticks for the current time + * @private + * @type {number} + */ +let _ticksForCurrentTime = 0; +/** + * Remember the last time when a ticks for the current time so that it can be reset + * @private + * @type {number} + */ +let _lastTimestamp = 0; + +/** + * Creates a new instance of Uuid based on the parameters provided according to rfc4122. + * If any of the arguments is not provided, it will be randomly generated, except for the date that will use the current + * date. + *

+ * Note that when nodeId and/or clockId portions are not provided, the constructor will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of the static methods fromDate() or + * now() in that case. + *

+ * @class + * @classdesc Represents an immutable version 1 universally unique identifier (UUID). A UUID represents a 128-bit value. + *

Usage: TimeUuid.now()

+ * @extends module:types~Uuid + * @param {Date} [value] The datetime for the instance, if not provided, it will use the current Date. + * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, + * as Ecmascript Dates have only milliseconds precision. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * @constructor + */ +function TimeUuid(value, ticks, nodeId, clockId) { + let buffer; + if (value instanceof Buffer) { + if (value.length !== 16) { + throw new Error('Buffer for v1 uuid not valid'); + } + buffer = value; + } + else { + buffer = generateBuffer(value, ticks, nodeId, clockId); + } + Uuid.call(this, buffer); +} + +util.inherits(TimeUuid, Uuid); + +/** + * Generates a TimeUuid instance based on the Date provided using random node and clock values. + * @param {Date} date Date to generate the v1 uuid. + * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, + * as Ecmascript Dates have only milliseconds precision. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * If not provided, a random nodeId will be generated. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * If not provided a random clockId will be generated. + * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created + * TimeUuid as second parameter. When a callback is provided, the random portions of the + * TimeUuid instance are created asynchronously. + *

+ * When nodeId and/or clockId portions are not provided, this method will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of this method in that case. + *

+ * @example Generate a TimeUuid from a ECMAScript Date + * const timeuuid = TimeUuid.fromDate(new Date()); + * @example Generate a TimeUuid from a Date with ticks portion + * const timeuuid = TimeUuid.fromDate(new Date(), 1203); + * @example Generate a TimeUuid from a Date without any random portion + * const timeuuid = TimeUuid.fromDate(new Date(), 1203, 'host01', '02'); + * @example Generate a TimeUuid from a Date with random node and clock identifiers + * TimeUuid.fromDate(new Date(), 1203, function (err, timeuuid) { + * // do something with the generated timeuuid + * }); + */ +TimeUuid.fromDate = function (date, ticks, nodeId, clockId, callback) { + if (typeof ticks === 'function') { + callback = ticks; + ticks = nodeId = clockId = null; + } else if (typeof nodeId === 'function') { + callback = nodeId; + nodeId = clockId = null; + } else if (typeof clockId === 'function') { + callback = clockId; + clockId = null; + } + + if (!callback) { + return new TimeUuid(date, ticks, nodeId, clockId); + } + + utils.parallel([ + next => getOrGenerateRandom(nodeId, 6, (err, buffer) => next(err, nodeId = buffer)), + next => getOrGenerateRandom(clockId, 2, (err, buffer) => next(err, clockId = buffer)), + ], (err) => { + if (err) { + return callback(err); + } + + let timeUuid; + try { + timeUuid = new TimeUuid(date, ticks, nodeId, clockId); + } + catch (e) { + return callback(e); + } + + callback(null, timeUuid); + }); +}; + +/** + * Parses a string representation of a TimeUuid + * @param {String} value + * @returns {TimeUuid} + */ +TimeUuid.fromString = function (value) { + return new TimeUuid(Uuid.fromString(value).getBuffer()); +}; + +/** + * Returns the smaller possible type 1 uuid with the provided Date. + */ +TimeUuid.min = function (date, ticks) { + return new TimeUuid(date, ticks, minNodeId, minClockId); +}; + +/** + * Returns the biggest possible type 1 uuid with the provided Date. + */ +TimeUuid.max = function (date, ticks) { + return new TimeUuid(date, ticks, maxNodeId, maxClockId); +}; + +/** + * Generates a TimeUuid instance based on the current date using random node and clock values. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * If not provided, a random nodeId will be generated. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * If not provided a random clockId will be generated. + * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created + * TimeUuid as second parameter. When a callback is provided, the random portions of the + * TimeUuid instance are created asynchronously. + *

+ * When nodeId and/or clockId portions are not provided, this method will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of this method in that case. + *

+ * @example Generate a TimeUuid from a Date without any random portion + * const timeuuid = TimeUuid.now('host01', '02'); + * @example Generate a TimeUuid with random node and clock identifiers + * TimeUuid.now(function (err, timeuuid) { + * // do something with the generated timeuuid + * }); + * @example Generate a TimeUuid based on the current date (might block) + * const timeuuid = TimeUuid.now(); + */ +TimeUuid.now = function (nodeId, clockId, callback) { + return TimeUuid.fromDate(null, null, nodeId, clockId, callback); +}; + + +/** + * Gets the Date and 100-nanoseconds units representation of this instance. + * @returns {{date: Date, ticks: Number}} + */ +TimeUuid.prototype.getDatePrecision = function () { + const timeLow = this.buffer.readUInt32BE(0); + + let timeHigh = 0; + timeHigh |= ( this.buffer[4] & 0xff ) << 8; + timeHigh |= this.buffer[5] & 0xff; + timeHigh |= ( this.buffer[6] & 0x0f ) << 24; + timeHigh |= ( this.buffer[7] & 0xff ) << 16; + + const val = Long.fromBits(timeLow, timeHigh); + const ticksInMsLong = Long.fromNumber(_ticksInMs); + const ticks = val.modulo(ticksInMsLong); + const time = val + .div(ticksInMsLong) + .subtract(Long.fromNumber(_unixToGregorian)); + return { date: new Date(time.toNumber()), ticks: ticks.toNumber()}; +}; + +/** + * Gets the Date representation of this instance. + * @returns {Date} + */ +TimeUuid.prototype.getDate = function () { + return this.getDatePrecision().date; +}; + +/** + * Returns the node id this instance + * @returns {Buffer} + */ +TimeUuid.prototype.getNodeId = function () { + return this.buffer.slice(10); +}; + +/** + * Returns the clock id this instance, with the variant applied (first 2 msb being 1 and 0). + * @returns {Buffer} + */ +TimeUuid.prototype.getClockId = function () { + return this.buffer.slice(8, 10); +}; + +/** + * Returns the node id this instance as an ascii string + * @returns {String} + */ +TimeUuid.prototype.getNodeIdString = function () { + return this.buffer.slice(10).toString('ascii'); +}; + +function writeTime(buffer, time, ticks) { + //value time expressed in ticks precision + const val = Long + .fromNumber(time + _unixToGregorian) + .multiply(Long.fromNumber(10000)) + .add(Long.fromNumber(ticks)); + const timeHigh = val.getHighBitsUnsigned(); + buffer.writeUInt32BE(val.getLowBitsUnsigned(), 0); + buffer.writeUInt16BE(timeHigh & 0xffff, 4); + buffer.writeUInt16BE(timeHigh >>> 16 & 0xffff, 6); +} + +/** + * Returns a buffer of length 2 representing the clock identifier + * @param {String|Buffer} clockId + * @returns {Buffer} + * @private + */ +function getClockId(clockId) { + let buffer = clockId; + if (typeof clockId === 'string') { + buffer = utils.allocBufferFromString(clockId, 'ascii'); + } + if (!(buffer instanceof Buffer)) { + //Generate + buffer = getRandomBytes(2); + } + else if (buffer.length !== 2) { + throw new Error('Clock identifier must have 2 bytes'); + } + return buffer; +} + +/** + * Returns a buffer of length 6 representing the clock identifier + * @param {String|Buffer} nodeId + * @returns {Buffer} + * @private + */ +function getNodeId(nodeId) { + let buffer = nodeId; + if (typeof nodeId === 'string') { + buffer = utils.allocBufferFromString(nodeId, 'ascii'); + } + if (!(buffer instanceof Buffer)) { + //Generate + buffer = getRandomBytes(6); + } + else if (buffer.length !== 6) { + throw new Error('Node identifier must have 6 bytes'); + } + return buffer; +} + +/** + * Returns the ticks portion of a timestamp. If the ticks are not provided an internal counter is used that gets reset at 10000. + * @private + * @param {Number} [ticks] + * @returns {Number} + */ +function getTicks(ticks) { + if (typeof ticks !== 'number'|| ticks >= _ticksInMs) { + _ticks++; + if (_ticks >= _ticksInMs) { + _ticks = 0; + } + ticks = _ticks; + } + return ticks; +} + +/** + * Returns an object with the time representation of the date expressed in milliseconds since unix epoch + * and a ticks property for the 100-nanoseconds precision. + * @private + * @returns {{time: Number, ticks: Number}} + */ +function getTimeWithTicks(date, ticks) { + if (!(date instanceof Date) || isNaN(date.getTime())) { + // time with ticks for the current time + date = new Date(); + const time = date.getTime(); + _ticksForCurrentTime++; + if(_ticksForCurrentTime > _ticksInMs || time > _lastTimestamp) { + _ticksForCurrentTime = 0; + _lastTimestamp = time; + } + ticks = _ticksForCurrentTime; + } + return { + time: date.getTime(), + ticks: getTicks(ticks) + }; +} + +function getRandomBytes(length) { + return crypto.randomBytes(length); +} + +function getOrGenerateRandom(id, length, callback) { + if (id) { + return callback(null, id); + } + crypto.randomBytes(length, callback); +} + +/** + * Generates a 16-length Buffer instance + * @private + * @param {Date} date + * @param {Number} ticks + * @param {String|Buffer} nodeId + * @param {String|Buffer} clockId + * @returns {Buffer} + */ +function generateBuffer(date, ticks, nodeId, clockId) { + const timeWithTicks = getTimeWithTicks(date, ticks); + nodeId = getNodeId(nodeId); + clockId = getClockId(clockId); + const buffer = utils.allocBufferUnsafe(16); + //Positions 0-7 Timestamp + writeTime(buffer, timeWithTicks.time, timeWithTicks.ticks); + //Position 8-9 Clock + clockId.copy(buffer, 8, 0); + //Positions 10-15 Node + nodeId.copy(buffer, 10, 0); + //Version Byte: Time based + //0001xxxx + //turn off first 4 bits + buffer[6] = buffer[6] & 0x0f; + //turn on fifth bit + buffer[6] = buffer[6] | 0x10; + + //IETF Variant Byte: 1.0.x + //10xxxxxx + //turn off first 2 bits + buffer[8] = buffer[8] & 0x3f; + //turn on first bit + buffer[8] = buffer[8] | 0x80; + return buffer; +} + +module.exports = TimeUuid; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/tuple.js b/node_modules/cassandra-driver/lib/types/tuple.js new file mode 100644 index 0000000..f8989cc --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/tuple.js @@ -0,0 +1,102 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +/** @module types */ + +/** + * Creates a new sequence of immutable objects with the parameters provided. + * @class + * @classdesc A tuple is a sequence of immutable objects. + * Tuples are sequences, just like [Arrays]{@link Array}. The only difference is that tuples can't be changed. + *

+ * As tuples can be used as a Map keys, the {@link Tuple#toString toString()} method calls toString of each element, + * to try to get a unique string key. + *

+ * @param args The sequence elements as arguments. + * @constructor + */ +function Tuple(...args) { + + /** + * Immutable elements of Tuple object. + * @type Array + */ + this.elements = args; + + if (this.elements.length === 0) { + throw new TypeError('Tuple must contain at least one value'); + } + + /** + * Returns the number of the elements. + * @type Number + */ + this.length = this.elements.length; +} + +/** + * Creates a new instance of a tuple based on the Array + * @param {Array} elements + * @returns {Tuple} + */ +Tuple.fromArray = function (elements) { + // Apply the elements Array as parameters + return new Tuple(...elements); +}; + +/** + * Returns the value located at the index. + * @param {Number} index Element index + */ +Tuple.prototype.get = function (index) { + return this.elements[index || 0]; +}; + +/** + * Returns the string representation of the sequence surrounded by parenthesis, ie: (1, 2). + *

+ * The returned value attempts to be a unique string representation of its values. + *

+ * @returns {string} + */ +Tuple.prototype.toString = function () { + return ('(' + + this.elements.reduce(function (prev, x, i) { + return prev + (i > 0 ? ',' : '') + x.toString(); + }, '') + + ')'); +}; + +/** + * Returns the Array representation of the sequence. + * @returns {Array} + */ +Tuple.prototype.toJSON = function () { + return this.elements; +}; + +/** + * Gets the elements as an array + * @returns {Array} + */ +Tuple.prototype.values = function () { + // Clone the elements + return this.elements.slice(0); +}; + +module.exports = Tuple; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/types/uuid.js b/node_modules/cassandra-driver/lib/types/uuid.js new file mode 100644 index 0000000..56281d5 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/uuid.js @@ -0,0 +1,153 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const crypto = require('crypto'); +const utils = require('../utils'); + +/** @module types */ + +/** + * Creates a new instance of Uuid based on a Buffer + * @class + * @classdesc Represents an immutable universally unique identifier (UUID). A UUID represents a 128-bit value. + * @param {Buffer} buffer The 16-length buffer. + * @constructor + */ +function Uuid(buffer) { + if (!buffer || buffer.length !== 16) { + throw new Error('You must provide a buffer containing 16 bytes'); + } + this.buffer = buffer; +} + +/** + * Parses a string representation of a Uuid + * @param {String} value + * @returns {Uuid} + */ +Uuid.fromString = function (value) { + //36 chars: 32 + 4 hyphens + if (typeof value !== 'string' || value.length !== 36) { + throw new Error('Invalid string representation of Uuid, it should be in the 00000000-0000-0000-0000-000000000000'); + } + return new Uuid(utils.allocBufferFromString(value.replace(/-/g, ''), 'hex')); +}; + +/** + * Creates a new random (version 4) Uuid. + * @param {function} [callback] Optional callback to be invoked with the error as first parameter and the created Uuid as + * second parameter. + * @returns {Uuid} + */ +Uuid.random = function (callback) { + if (callback) { + getRandomBytes(function(err, buffer) { + if (err) { + return callback(err); + } + return callback(null, createUuidFromBuffer(buffer)); + }); + } else { + const buffer = getRandomBytes(); + return createUuidFromBuffer(buffer); + } +}; + +/** + * Gets the bytes representation of a Uuid + * @returns {Buffer} + */ +Uuid.prototype.getBuffer = function () { + return this.buffer; +}; +/** + * Compares this object to the specified object. + * The result is true if and only if the argument is not null, is a UUID object, and contains the same value, bit for bit, as this UUID. + * @param {Uuid} other The other value to test for equality. + */ +Uuid.prototype.equals = function (other) { + return other instanceof Uuid && this.buffer.equals(other.buffer); +}; + +/** + * Returns a string representation of the value of this Uuid instance. + * 32 hex separated by hyphens, in the form of 00000000-0000-0000-0000-000000000000. + * @returns {String} + */ +Uuid.prototype.toString = function () { + //32 hex representation of the Buffer + const hexValue = getHex(this); + return ( + hexValue.substr(0, 8) + '-' + + hexValue.substr(8, 4) + '-' + + hexValue.substr(12, 4) + '-' + + hexValue.substr(16, 4) + '-' + + hexValue.substr(20, 12)); +}; + +/** + * Provide the name of the constructor and the string representation + * @returns {string} + */ +Uuid.prototype.inspect = function () { + return this.constructor.name + ': ' + this.toString(); +}; + +/** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ +Uuid.prototype.toJSON = function () { + return this.toString(); +}; + + +/** + * Returns new Uuid + * @private + * @returns {Uuid} + */ +function createUuidFromBuffer (buffer) { + //clear the version + buffer[6] &= 0x0f; + //set the version 4 + buffer[6] |= 0x40; + //clear the variant + buffer[8] &= 0x3f; + //set the IETF variant + buffer[8] |= 0x80; + return new Uuid(buffer); +} + +/** + * @private + * @returns {String} 32 hex representation of the instance, without separators + */ +function getHex (uuid) { + return uuid.buffer.toString('hex'); +} + +/** + * Gets a crypto generated 16 bytes + * @private + * @returns {Buffer} + */ +function getRandomBytes (cb) { + return crypto.randomBytes(16, cb); +} + +module.exports = Uuid; diff --git a/node_modules/cassandra-driver/lib/types/version-number.js b/node_modules/cassandra-driver/lib/types/version-number.js new file mode 100644 index 0000000..7f5b743 --- /dev/null +++ b/node_modules/cassandra-driver/lib/types/version-number.js @@ -0,0 +1,144 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const _versionPattern = /(\d+)\.(\d+)(?:\.(\d+))?(?:\.(\d+)?)?(?:[-~]([\w+]*(?:-\w[.\w]*)*))?(?:\+([.\w]+))?/; + +/** + * Represents a version number in the form of X.Y.Z with optional pre-release and build metadata. + * + * Version numbers compare the usual way, the major version number (X) is compared first, then + * the minor one (Y) and then the patch level one (Z). If pre-release or other build metadata + * is present for a version, that version is considered less than an otherwise equivalent version + * that doesn't have these labels, otherwise they are considered equal. + * + * As of initial implementation versions are only compared against those with at most patch versions + * more refined comparisons are not needed. + * + * @property {Number} major The major version, X of X.Y.Z. + * @property {Number} minor The minor version, Y of X.Y.Z. + * @property {Number} patch The patch version, Z of X.Y.Z. + * @property {Number} dsePatch The dsePatch version, A of X.Y.Z.A or undefined if not present. + * @property {String[]} preReleases Prerelease indicators if present, i.e. SNAPSHOT of X.Y.Z-SNAPSHOT. + * @property {String} build Build string if present, i.e. build1 of X.Y.Z+build1. + * + * @ignore + */ +class VersionNumber { + constructor(major, minor, patch, dsePatch, preReleases, build) { + this.major = major; + this.minor = minor; + this.patch = patch; + this.dsePatch = dsePatch; + this.preReleases = preReleases; + this.build = build; + } + + /** + * @return {String} String representation of this version. + */ + toString() { + let str = this.major + '.' + this.minor; + if (this.patch !== undefined) { + str += '.' + this.patch; + } + if (this.dsePatch !== undefined) { + str += '.' + this.dsePatch; + } + if (this.preReleases !== undefined) { + this.preReleases.forEach((preRelease) => { + str += '-' + preRelease; + }); + } + if (this.build) { + str += '+' + this.build; + } + return str; + } + + /** + * Compares this version with the provided version. + * @param {VersionNumber} other + * @return {Number} -1 if less than other, 0 if equal, 1 if greater than. + */ + compare(other) { + if (this.major < other.major) { + return -1; + } else if (this.major > other.major) { + return 1; + } else if (this.minor < other.minor) { + return -1; + } else if (this.minor > other.minor) { + return 1; + } + + // sanitize patch by setting to 0 if undefined. + const thisPatch = this.patch || 0; + const otherPatch = other.patch || 0; + if (thisPatch < otherPatch) { + return -1; + } else if (thisPatch > otherPatch) { + return 1; + } + + // if dsePatch is set in one case, but not other, consider the one where it is set as greater. + if (this.dsePatch === undefined) { + if (other.dsePatch !== undefined) { + return -1; + } + } else if (other.dsePatch === undefined) { + return 1; + } else { + if (this.dsePatch < other.dsePatch) { + return -1; + } else if (this.dsePatch > other.dsePatch) { + return 1; + } + } + + // If prereleases are present, consider less than those that don't have any. + if (this.preReleases === undefined) { + if (other.preReleases !== undefined) { + return 1; + } + } else if (other.preReleases === undefined) { + return -1; + } + + // Don't consider build. + return 0; + } + + static parse(version) { + if (!version) { + return null; + } + + const match = version.match(_versionPattern); + if (match) { + const major = parseInt(match[1], 10); + const minor = parseInt(match[2], 10); + const patch = match[3] ? parseInt(match[3], 10) : undefined; + const dsePatch = match[4] ? parseInt(match[4], 10) : undefined; + const preReleases = match[5] ? match[5].split('-') : undefined; + const build = match[6]; + return new VersionNumber(major, minor, patch, dsePatch, preReleases, build); + } + throw new TypeError('Could not extract version from \'' + version + '\''); + } +} + +module.exports = VersionNumber; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/utils.js b/node_modules/cassandra-driver/lib/utils.js new file mode 100644 index 0000000..1ce7c4b --- /dev/null +++ b/node_modules/cassandra-driver/lib/utils.js @@ -0,0 +1,1087 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; + +const util = require('util'); +const net = require('net'); +const { EventEmitter } = require('events'); + +const errors = require('./errors'); +const promiseUtils = require('./promise-utils'); + +/** + * Max int that can be accurately represented with 64-bit Number (2^53) + * @type {number} + * @const + */ +const maxInt = 9007199254740992; + +const maxInt32 = 0x7fffffff; + +const emptyObject = Object.freeze({}); + +const emptyArray = Object.freeze([]); + +function noop() {} + +/** + * Forward-compatible allocation of buffer, filled with zeros. + * @type {Function} + */ +const allocBuffer = Buffer.alloc || allocBufferFillDeprecated; + +/** + * Forward-compatible unsafe allocation of buffer. + * @type {Function} + */ +const allocBufferUnsafe = Buffer.allocUnsafe || allocBufferDeprecated; + +/** + * Forward-compatible allocation of buffer to contain a string. + * @type {Function} + */ +const allocBufferFromString = (Int8Array.from !== Buffer.from && Buffer.from) || allocBufferFromStringDeprecated; + +/** + * Forward-compatible allocation of buffer from an array of bytes + * @type {Function} + */ +const allocBufferFromArray = (Int8Array.from !== Buffer.from && Buffer.from) || allocBufferFromArrayDeprecated; + +function allocBufferDeprecated(size) { + // eslint-disable-next-line + return new Buffer(size); +} + +function allocBufferFillDeprecated(size) { + const b = allocBufferDeprecated(size); + b.fill(0); + return b; +} + +function allocBufferFromStringDeprecated(text, encoding) { + if (typeof text !== 'string') { + throw new TypeError('Expected string, obtained ' + util.inspect(text)); + } + // eslint-disable-next-line + return new Buffer(text, encoding); +} + +function allocBufferFromArrayDeprecated(arr) { + if (!Array.isArray(arr)) { + throw new TypeError('Expected Array, obtained ' + util.inspect(arr)); + } + // eslint-disable-next-line + return new Buffer(arr); +} + +/** + * @returns {Function} Returns a wrapper function that invokes the underlying callback only once. + * @param {Function} callback + */ +function callbackOnce(callback) { + let cb = callback; + + return (function wrapperCallback(err, result) { + cb(err, result); + cb = noop; + }); +} + +/** + * Creates a copy of a buffer + */ +function copyBuffer(buf) { + const targetBuffer = allocBufferUnsafe(buf.length); + buf.copy(targetBuffer); + return targetBuffer; +} + +/** + * Appends the original stack trace to the error after a tick of the event loop + */ +function fixStack(stackTrace, error) { + if (stackTrace) { + error.stack += '\n (event loop)\n' + stackTrace.substr(stackTrace.indexOf("\n") + 1); + } + return error; +} + +/** + * Uses the logEmitter to emit log events + * @param {String} type + * @param {String} info + * @param [furtherInfo] + */ +function log(type, info, furtherInfo, options) { + if (!this.logEmitter) { + const effectiveOptions = options || this.options; + if (!effectiveOptions || !effectiveOptions.logEmitter) { + throw new Error('Log emitter not defined'); + } + this.logEmitter = effectiveOptions.logEmitter; + } + this.logEmitter('log', type, this.constructor.name, info, furtherInfo || ''); +} + +/** + * Gets the sum of the length of the items of an array + */ +function totalLength (arr) { + if (arr.length === 1) { + return arr[0].length; + } + let total = 0; + arr.forEach(function (item) { + let length = item.length; + length = length ? length : 0; + total += length; + }); + return total; +} + +/** + * Merge the contents of two or more objects together into the first object. Similar to jQuery.extend / Object.assign. + * The main difference between this method is that declared properties with an undefined value are not set + * to the target. + */ +function extend(target) { + const sources = Array.prototype.slice.call(arguments, 1); + sources.forEach(function (source) { + if (!source) { + return; + } + const keys = Object.keys(source); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + const value = source[key]; + if (value === undefined) { + continue; + } + target[key] = value; + } + }); + return target; +} + +/** + * Returns a new object with the property names set to lowercase. + */ +function toLowerCaseProperties(obj) { + const keys = Object.keys(obj); + const result = {}; + for (let i = 0; i < keys.length; i++) { + const k = keys[i]; + result[k.toLowerCase()] = obj[k]; + } + return result; +} + +/** + * Extends the target by the most inner props of sources + * @param {Object} target + * @returns {Object} + */ +function deepExtend(target) { + const sources = Array.prototype.slice.call(arguments, 1); + sources.forEach(function (source) { + for (const prop in source) { + // eslint-disable-next-line no-prototype-builtins + if (!source.hasOwnProperty(prop)) { + continue; + } + const targetProp = target[prop]; + const targetType = (typeof targetProp); + //target prop is + // a native single type + // or not existent + // or is not an anonymous object (not class instance) + if (!targetProp || + targetType === 'number' || + targetType === 'string' || + Array.isArray(targetProp) || + util.isDate(targetProp) || + targetProp.constructor.name !== 'Object') { + target[prop] = source[prop]; + } + else { + //inner extend + target[prop] = deepExtend({}, targetProp, source[prop]); + } + } + }); + return target; +} + +function propCompare(propName) { + return function (a, b) { + if (a[propName] > b[propName]) { + return 1; + } + if (a[propName] < b[propName]) { + return -1; + } + return 0; + }; +} + +function funcCompare(name, argArray) { + return (function (a, b) { + if (typeof a[name] === 'undefined') { + return 0; + } + const valA = a[name].apply(a, argArray); + const valB = b[name].apply(b, argArray); + if (valA > valB) { + return 1; + } + if (valA < valB) { + return -1; + } + return 0; + }); +} +/** + * Uses the iterator protocol to go through the items of the Array + * @param {Array} arr + * @returns {Iterator} + */ +function arrayIterator (arr) { + return arr[Symbol.iterator](); +} + +/** + * Convert the iterator values into an array + * @param iterator + * @returns {Array} + */ +function iteratorToArray(iterator) { + const values = []; + let item = iterator.next(); + while (!item.done) { + values.push(item.value); + item = iterator.next(); + } + return values; +} + +/** + * Searches the specified Array for the provided key using the binary + * search algorithm. The Array must be sorted. + * @param {Array} arr + * @param key + * @param {function} compareFunc + * @returns {number} The position of the key in the Array, if it is found. + * If it is not found, it returns a negative number which is the bitwise complement of the index of the first element that is larger than key. + */ +function binarySearch(arr, key, compareFunc) { + let low = 0; + let high = arr.length-1; + + while (low <= high) { + const mid = (low + high) >>> 1; + const midVal = arr[mid]; + const cmp = compareFunc(midVal, key); + if (cmp < 0) { + low = mid + 1; + } + else if (cmp > 0) { + high = mid - 1; + } + else + { + //The key was found in the Array + return mid; + } + } + // key not found + return ~low; +} + +/** + * Inserts the value in the position determined by its natural order determined by the compare func + * @param {Array} arr + * @param item + * @param {function} compareFunc + */ +function insertSorted(arr, item, compareFunc) { + if (arr.length === 0) { + return arr.push(item); + } + let position = binarySearch(arr, item, compareFunc); + if (position < 0) { + position = ~position; + } + arr.splice(position, 0, item); +} + +/** + * Validates the provided parameter is of type function. + * @param {Function} fn The instance to validate. + * @param {String} [name] Name of the function to use in the error message. Defaults to 'callback'. + * @returns {Function} + */ +function validateFn(fn, name) { + if (typeof fn !== 'function') { + throw new errors.ArgumentError(util.format('%s is not a function', name || 'callback')); + } + return fn; +} + +/** + * Adapts the parameters based on the prepared metadata. + * If the params are passed as an associative array (Object), + * it adapts the object into an array with the same order as columns + * @param {Array|Object} params + * @param {Array} columns + * @returns {Array} Returns an array of parameters. + * @throws {Error} In case a parameter with a specific name is not defined + */ +function adaptNamedParamsPrepared(params, columns) { + if (!params || Array.isArray(params) || !columns || columns.length === 0) { + // params is an array or there aren't parameters + return params; + } + const paramsArray = new Array(columns.length); + params = toLowerCaseProperties(params); + const keys = {}; + for (let i = 0; i < columns.length; i++) { + const name = columns[i].name; + // eslint-disable-next-line no-prototype-builtins + if (!params.hasOwnProperty(name)) { + throw new errors.ArgumentError(util.format('Parameter "%s" not defined', name)); + } + paramsArray[i] = params[name]; + keys[name] = i; + } + return paramsArray; +} + +/** + * Adapts the associative-array of parameters and hints for simple statements + * into Arrays based on the (arbitrary) position of the keys. + * @param {Array|Object} params + * @param {ExecutionOptions} execOptions + * @returns {{ params: Array<{name, value}>, namedParameters: boolean, keyIndexes: object }} Returns an array of + * parameters and the keys as an associative array. + */ +function adaptNamedParamsWithHints(params, execOptions) { + if (!params || Array.isArray(params)) { + //The parameters is an Array or there isn't parameter + return { params: params, namedParameters: false, keyIndexes: null }; + } + + const keys = Object.keys(params); + const paramsArray = new Array(keys.length); + const hints = new Array(keys.length); + const userHints = execOptions.getHints() || emptyObject; + const keyIndexes = {}; + + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + // As lower cased identifiers + paramsArray[i] = { name: key.toLowerCase(), value: params[key]}; + hints[i] = userHints[key]; + keyIndexes[key] = i; + } + + execOptions.setHints(hints); + + return { params: paramsArray, namedParameters: true, keyIndexes }; +} + +/** + * Returns a string with a value repeated n times + * @param {String} val + * @param {Number} times + * @returns {String} + */ +function stringRepeat(val, times) { + if (!times || times < 0) { + return null; + } + if (times === 1) { + return val; + } + return new Array(times + 1).join(val); +} + +/** + * Returns an array containing the values of the Object, similar to Object.values(). + * If obj is null or undefined, it will return an empty array. + * @param {Object} obj + * @returns {Array} + */ +function objectValues(obj) { + if (!obj) { + return emptyArray; + } + const keys = Object.keys(obj); + const values = new Array(keys.length); + for (let i = 0; i < keys.length; i++) { + values[i] = obj[keys[i]]; + } + return values; +} + +/** + * Wraps the callback-based method. When no originalCallback is not defined, it returns a Promise. + * @param {ClientOptions} options + * @param {Function} originalCallback + * @param {Function} handler + * @returns {Promise|undefined} + */ +function promiseWrapper(options, originalCallback, handler) { + if (typeof originalCallback === 'function') { + // Callback-based invocation + handler.call(this, originalCallback); + return undefined; + } + const factory = options.promiseFactory || defaultPromiseFactory; + const self = this; + return factory(function handlerWrapper(callback) { + handler.call(self, callback); + }); +} + +/** + * @param {Function} handler + * @returns {Promise} + */ +function defaultPromiseFactory(handler) { + return new Promise(function executor(resolve, reject) { + handler(function handlerCallback(err, result) { + if (err) { + return reject(err); + } + resolve(result); + }); + }); +} + +/** + * Returns the first not undefined param + */ +function ifUndefined(v1, v2) { + return v1 !== undefined ? v1 : v2; +} + +/** + * Returns the first not undefined param + */ +function ifUndefined3(v1, v2, v3) { + if (v1 !== undefined) { + return v1; + } + return v2 !== undefined ? v2 : v3; +} + +/** + * Shuffles an Array in-place. + * @param {Array} arr + * @returns {Array} + * @private + */ +function shuffleArray(arr) { + // Fisher–Yates algorithm + for (let i = arr.length - 1; i > 0; i--) { + // Math.random() has an extremely short permutation cycle length but we don't care about collisions + const j = Math.floor(Math.random() * (i + 1)); + const temp = arr[i]; + arr[i] = arr[j]; + arr[j] = temp; + } + + return arr; +} + +// Classes + +/** + * Represents a unique set of values. + * @constructor + */ +function HashSet() { + this.length = 0; + this.items = {}; +} + +/** + * Adds a new item to the set. + * @param {Object} key + * @returns {boolean} Returns true if it was added to the set; false if the key is already present. + */ +HashSet.prototype.add = function (key) { + if (this.contains(key)) { + return false; + } + this.items[key] = true; + this.length++; + return true; +}; + +/** + * @returns {boolean} Returns true if the key is present in the set. + */ +HashSet.prototype.contains = function (key) { + return this.length > 0 && this.items[key] === true; +}; + +/** + * Removes the item from set. + * @param key + * @return {boolean} Returns true if the key existed and was removed, otherwise it returns false. + */ +HashSet.prototype.remove = function (key) { + if (!this.contains(key)) { + return false; + } + delete this.items[key]; + this.length--; +}; + +/** + * Returns an array containing the set items. + * @returns {Array} + */ +HashSet.prototype.toArray = function () { + return Object.keys(this.items); +}; + +/** + * Utility class that resolves host names into addresses. + */ +class AddressResolver { + + /** + * Creates a new instance of the resolver. + * @param {Object} options + * @param {String} options.nameOrIp + * @param {Object} [options.dns] + */ + constructor(options) { + if (!options || !options.nameOrIp || !options.dns) { + throw new Error('nameOrIp and dns lib must be provided as part of the options'); + } + + this._resolve4 = util.promisify(options.dns.resolve4); + this._nameOrIp = options.nameOrIp; + this._isIp = net.isIP(options.nameOrIp); + this._index = 0; + this._addresses = null; + this._refreshing = null; + } + + /** + * Resolves the addresses for the host name. + */ + async init() { + if (this._isIp) { + return; + } + + await this._resolve(); + } + + /** + * Tries to resolve the addresses for the host name. + */ + async refresh() { + if (this._isIp) { + return; + } + + if (this._refreshing) { + return await promiseUtils.fromEvent(this._refreshing, 'finished'); + } + + this._refreshing = new EventEmitter().setMaxListeners(0); + + try { + await this._resolve(); + } catch (err) { + // Ignore the possible resolution error + } + + this._refreshing.emit('finished'); + this._refreshing = null; + } + + async _resolve() { + const arr = await this._resolve4(this._nameOrIp); + + if (!arr || arr.length === 0) { + throw new Error(`${this._nameOrIp} could not be resolved`); + } + + this._addresses = arr; + } + + /** + * Returns resolved ips in a round-robin fashion. + */ + getIp() { + if (this._isIp) { + return this._nameOrIp; + } + + const item = this._addresses[this._index % this._addresses.length]; + this._index = (this._index !== maxInt32) ? (this._index + 1) : 0; + + return item; + } +} + +/** + * @param {Array} arr + * @param {Function} fn + * @param {Function} [callback] + */ +function each(arr, fn, callback) { + if (!Array.isArray(arr)) { + throw new TypeError('First parameter is not an Array'); + } + callback = callback || noop; + const length = arr.length; + if (length === 0) { + return callback(); + } + let completed = 0; + for (let i = 0; i < length; i++) { + fn(arr[i], next); + } + function next(err) { + if (err) { + const cb = callback; + callback = noop; + cb(err); + return; + } + if (++completed !== length) { + return; + } + callback(); + } +} + +/** + * @param {Array} arr + * @param {Function} fn + * @param {Function} [callback] + */ +function eachSeries(arr, fn, callback) { + if (!Array.isArray(arr)) { + throw new TypeError('First parameter is not an Array'); + } + callback = callback || noop; + const length = arr.length; + if (length === 0) { + return callback(); + } + let sync; + let index = 1; + fn(arr[0], next); + if (sync === undefined) { + sync = false; + } + + function next(err) { + if (err) { + return callback(err); + } + if (index >= length) { + return callback(); + } + if (sync === undefined) { + sync = true; + } + if (sync) { + return process.nextTick(function () { + fn(arr[index++], next); + }); + } + fn(arr[index++], next); + } +} + +/** + * @param {Array} arr + * @param {Function} fn + * @param {Function} [callback] + */ +function forEachOf(arr, fn, callback) { + return mapEach(arr, fn, true, callback); +} + +/** + * @param {Array} arr + * @param {Function} fn + * @param {Function} [callback] + */ +function map(arr, fn, callback) { + return mapEach(arr, fn, false, callback); +} + +function mapEach(arr, fn, useIndex, callback) { + if (!Array.isArray(arr)) { + throw new TypeError('First parameter must be an Array'); + } + callback = callback || noop; + const length = arr.length; + if (length === 0) { + return callback(null, []); + } + const result = new Array(length); + let completed = 0; + const invoke = useIndex ? invokeWithIndex : invokeWithoutIndex; + for (let i = 0; i < length; i++) { + invoke(i); + } + + function invokeWithoutIndex(i) { + fn(arr[i], function mapItemCallback(err, transformed) { + result[i] = transformed; + next(err); + }); + } + + function invokeWithIndex(i) { + fn(arr[i], i, function mapItemCallback(err, transformed) { + result[i] = transformed; + next(err); + }); + } + + function next(err) { + if (err) { + const cb = callback; + callback = noop; + cb(err); + return; + } + if (++completed !== length) { + return; + } + callback(null, result); + } +} + +/** + * @param {Array} arr + * @param {Function} fn + * @param {Function} [callback] + */ +function mapSeries(arr, fn, callback) { + if (!Array.isArray(arr)) { + throw new TypeError('First parameter must be an Array'); + } + callback = callback || noop; + const length = arr.length; + if (length === 0) { + return callback(null, []); + } + const result = new Array(length); + let index = 0; + let sync; + invoke(0); + if (sync === undefined) { + sync = false; + } + + function invoke(i) { + fn(arr[i], function mapItemCallback(err, transformed) { + result[i] = transformed; + next(err); + }); + } + + function next(err) { + if (err) { + return callback(err); + } + if (++index === length) { + return callback(null, result); + } + if (sync === undefined) { + sync = true; + } + const i = index; + if (sync) { + return process.nextTick(function () { + invoke(i); + }); + } + invoke(index); + } +} + +/** + * @param {Array.} arr + * @param {Function} [callback] + */ +function parallel(arr, callback) { + if (!Array.isArray(arr)) { + throw new TypeError('First parameter must be an Array'); + } + callback = callback || noop; + const length = arr.length; + let completed = 0; + for (let i = 0; i < length; i++) { + arr[i](next); + } + function next(err) { + if (err) { + const cb = callback; + callback = noop; + return cb(err); + } + if (++completed !== length) { + return; + } + callback(); + } +} + +/** + * Similar to async.series(), but instead accumulating the result in an Array, it callbacks with the result of the last + * function in the array. + * @param {Array.} arr + * @param {Function} [callback] + */ +function series(arr, callback) { + if (!Array.isArray(arr)) { + throw new TypeError('First parameter must be an Array'); + } + callback = callback || noop; + let index = 0; + let sync; + next(); + function next(err, result) { + if (err) { + return callback(err); + } + if (index === arr.length) { + return callback(null, result); + } + if (sync) { + return process.nextTick(function () { + sync = true; + arr[index++](next); + sync = false; + }); + } + sync = true; + arr[index++](next); + sync = false; + } +} + +/** + * @param {Number} count + * @param {Function} iteratorFunc + * @param {Function} [callback] + */ +function times(count, iteratorFunc, callback) { + callback = callback || noop; + count = +count; + if (isNaN(count) || count === 0) { + return callback(); + } + let completed = 0; + for (let i = 0; i < count; i++) { + iteratorFunc(i, next); + } + function next(err) { + if (err) { + const cb = callback; + callback = noop; + return cb(err); + } + if (++completed !== count) { + return; + } + callback(); + } +} + +/** + * @param {Number} count + * @param {Number} limit + * @param {Function} iteratorFunc + * @param {Function} [callback] + */ +function timesLimit(count, limit, iteratorFunc, callback) { + let sync = undefined; + callback = callback || noop; + limit = Math.min(limit, count); + let index = limit - 1; + let i; + let completed = 0; + for (i = 0; i < limit; i++) { + iteratorFunc(i, next); + } + i = -1; + function next(err) { + if (err) { + const cb = callback; + callback = noop; + cb(err); + return; + } + if (++completed === count) { + return callback(); + } + index++; + if (index >= count) { + return; + } + if (sync === undefined) { + sync = (i >= 0); + } + if (sync) { + const captureIndex = index; + return process.nextTick(function () { + iteratorFunc(captureIndex, next); + }); + } + iteratorFunc(index, next); + } +} + +/** + * @param {Number} count + * @param {Function} iteratorFunction + * @param {Function} callback + */ +function timesSeries(count, iteratorFunction, callback) { + count = +count; + if (isNaN(count) || count < 1) { + return callback(); + } + let index = 1; + let sync; + iteratorFunction(0, next); + if (sync === undefined) { + sync = false; + } + function next(err) { + if (err) { + return callback(err); + } + if (index === count) { + return callback(); + } + if (sync === undefined) { + sync = true; + } + const i = index++; + if (sync) { + //Prevent "Maximum call stack size exceeded" + return process.nextTick(function () { + iteratorFunction(i, next); + }); + } + //do a sync call as the callback is going to call on a future tick + iteratorFunction(i, next); + } +} + +/** + * @param {Function} condition + * @param {Function} fn + * @param {Function} callback + */ +function whilst(condition, fn, callback) { + let sync = 0; + next(); + function next(err) { + if (err) { + return callback(err); + } + if (!condition()) { + return callback(); + } + if (sync === 0) { + sync = 1; + fn(function (err) { + if (sync === 1) { + //sync function + sync = 4; + } + next(err); + }); + if (sync === 1) { + //async function + sync = 2; + } + return; + } + if (sync === 4) { + //Prevent "Maximum call stack size exceeded" + return process.nextTick(function () { + fn(next); + }); + } + //do a sync call as the callback is going to call on a future tick + fn(next); + } +} + +exports.adaptNamedParamsPrepared = adaptNamedParamsPrepared; +exports.adaptNamedParamsWithHints = adaptNamedParamsWithHints; +exports.AddressResolver = AddressResolver; +exports.allocBuffer = allocBuffer; +exports.allocBufferUnsafe = allocBufferUnsafe; +exports.allocBufferFromArray = allocBufferFromArray; +exports.allocBufferFromString = allocBufferFromString; +exports.arrayIterator = arrayIterator; +exports.binarySearch = binarySearch; +exports.callbackOnce = callbackOnce; +exports.copyBuffer = copyBuffer; +exports.deepExtend = deepExtend; +exports.each = each; +exports.eachSeries = eachSeries; +/** @const */ +exports.emptyArray = Object.freeze([]); +/** @const */ +exports.emptyObject = emptyObject; +exports.extend = extend; +exports.fixStack = fixStack; +exports.forEachOf = forEachOf; +exports.funcCompare = funcCompare; +exports.ifUndefined = ifUndefined; +exports.ifUndefined3 = ifUndefined3; +exports.insertSorted = insertSorted; +exports.iteratorToArray = iteratorToArray; +exports.log = log; +exports.map = map; +exports.mapSeries = mapSeries; +exports.maxInt = maxInt; +exports.noop = noop; +exports.objectValues = objectValues; +exports.parallel = parallel; +exports.promiseWrapper = promiseWrapper; +exports.propCompare = propCompare; +exports.series = series; +exports.shuffleArray = shuffleArray; +exports.stringRepeat = stringRepeat; +exports.times = times; +exports.timesLimit = timesLimit; +exports.timesSeries = timesSeries; +exports.totalLength = totalLength; +exports.validateFn = validateFn; +exports.whilst = whilst; +exports.HashSet = HashSet; \ No newline at end of file diff --git a/node_modules/cassandra-driver/lib/writers.js b/node_modules/cassandra-driver/lib/writers.js new file mode 100644 index 0000000..a3d4f28 --- /dev/null +++ b/node_modules/cassandra-driver/lib/writers.js @@ -0,0 +1,310 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +'use strict'; +const events = require('events'); + +const types = require('./types'); +const utils = require('./utils.js'); +const FrameHeader = types.FrameHeader; + +/** + * Contains the logic to write all the different types to the frame. + */ +class FrameWriter { + /** + * Creates a new instance of FrameWriter. + * @param {Number} opcode + */ + constructor(opcode) { + if (!opcode) { + throw new Error('Opcode not provided'); + } + this.buffers = []; + this.opcode = opcode; + this.bodyLength = 0; + } + + add(buf) { + this.buffers.push(buf); + this.bodyLength += buf.length; + } + + writeShort(num) { + const buf = utils.allocBufferUnsafe(2); + buf.writeUInt16BE(num, 0); + this.add(buf); + } + + writeInt(num) { + const buf = utils.allocBufferUnsafe(4); + buf.writeInt32BE(num, 0); + this.add(buf); + } + + /** @param {Long} num */ + writeLong(num) { + this.add(types.Long.toBuffer(num)); + } + + /** + * Writes bytes according to Cassandra + * @param {Buffer|null|types.unset} bytes + */ + writeBytes(bytes) { + if (bytes === null) { + //Only the length buffer containing -1 + this.writeInt(-1); + return; + } + if (bytes === types.unset) { + this.writeInt(-2); + return; + } + //Add the length buffer + this.writeInt(bytes.length); + //Add the actual buffer + this.add(bytes); + } + + /** + * Writes a buffer according to Cassandra protocol: bytes.length (2) + bytes + * @param {Buffer} bytes + */ + writeShortBytes(bytes) { + if(bytes === null) { + //Only the length buffer containing -1 + this.writeShort(-1); + return; + } + //Add the length buffer + this.writeShort(bytes.length); + //Add the actual buffer + this.add(bytes); + } + + /** + * Writes a single byte + * @param {Number} num Value of the byte, a number between 0 and 255. + */ + writeByte(num) { + this.add(utils.allocBufferFromArray([num])); + } + + writeString(str) { + if (typeof str === "undefined") { + throw new Error("can not write undefined"); + } + const len = Buffer.byteLength(str, 'utf8'); + const buf = utils.allocBufferUnsafe(2 + len); + buf.writeUInt16BE(len, 0); + buf.write(str, 2, buf.length-2, 'utf8'); + this.add(buf); + } + + writeLString(str) { + const len = Buffer.byteLength(str, 'utf8'); + const buf = utils.allocBufferUnsafe(4 + len); + buf.writeInt32BE(len, 0); + buf.write(str, 4, buf.length-4, 'utf8'); + this.add(buf); + } + + writeStringList(values) { + this.writeShort(values.length); + values.forEach(this.writeString, this); + } + + writeCustomPayload(payload) { + const keys = Object.keys(payload); + this.writeShort(keys.length); + keys.forEach(k => { + this.writeString(k); + this.writeBytes(payload[k]); + }); + } + + writeStringMap(map) { + const keys = []; + for (const k in map) { + if (map.hasOwnProperty(k)) { + keys.push(k); + } + } + + this.writeShort(keys.length); + + for(let i = 0; i < keys.length; i++) { + const key = keys[i]; + this.writeString(key); + this.writeString(map[key]); + } + } + + /** + * @param {Number} version + * @param {Number} streamId + * @param {Number} [flags] Header flags + * @returns {Buffer} + * @throws {TypeError} + */ + write(version, streamId, flags) { + const header = new FrameHeader(version, flags || 0, streamId, this.opcode, this.bodyLength); + const headerBuffer = header.toBuffer(); + this.buffers.unshift(headerBuffer); + return Buffer.concat(this.buffers, headerBuffer.length + this.bodyLength); + } +} + +/** + * Represents a queue that process one write at a time (FIFO). + * @extends {EventEmitter} + */ +class WriteQueue extends events.EventEmitter { + /** + * Creates a new WriteQueue instance. + * @param {Socket} netClient + * @param {Encoder} encoder + * @param {ClientOptions} options + */ + constructor(netClient, encoder, options) { + super(); + this.netClient = netClient; + this.encoder = encoder; + this.isRunning = false; + /** @type {Array<{operation: OperationState, callback: Function}>} */ + this.queue = []; + this.coalescingThreshold = options.socketOptions.coalescingThreshold; + this.error = null; + this.canWrite = true; + + // Listen to drain event that is going to be fired once + // the underlying buffer is empty + netClient.on('drain', () => { + this.canWrite = true; + this.run(); + }); + } + + /** + * Enqueues a new request + * @param {OperationState} operation + * @param {Function} callback The write callback. + */ + push(operation, callback) { + const self = this; + + if (this.error) { + // There was a write error, there is no point in further trying to write to the socket. + return process.nextTick(function writePushError() { + callback(self.error); + }); + } + + this.queue.push({ operation: operation, callback: callback}); + this.run(); + } + + run() { + if (!this.isRunning && this.canWrite) { + this.isRunning = true; + // Use nextTick to allow the queue to build up on the current phase + process.nextTick(() => this.process()); + } + } + + process() { + if (this.error) { + return; + } + + const buffers = []; + const callbacks = []; + let totalLength = 0; + + while (this.queue.length > 0 && totalLength < this.coalescingThreshold) { + const writeItem = this.queue.shift(); + if (!writeItem.operation.canBeWritten()) { + // Invoke the write callback with an error that is not going to be yielded to user + // as the operation has timed out or was cancelled. + writeItem.callback(new Error('The operation was already cancelled or timeout elapsed')); + continue; + } + let data; + try { + data = writeItem.operation.request.write(this.encoder, writeItem.operation.streamId); + } + catch (err) { + writeItem.callback(err); + continue; + } + totalLength += data.length; + buffers.push(data); + callbacks.push(writeItem.callback); + } + + if (totalLength === 0) { + this.isRunning = false; + return; + } + + // We have to invoke the callbacks to avoid race conditions. + // There is a performance benefit from executing all of them in a loop + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](); + } + + // Concatenate buffers and write it to the socket + // Further writes will be throttled until flushed + this.canWrite = this.netClient.write(Buffer.concat(buffers, totalLength), err => { + if (err) { + this.setWriteError(err); + return; + } + + if (this.queue.length === 0 || !this.canWrite) { + // It will start running once we get the next message or has drained + this.isRunning = false; + return; + } + + // Allow IO between writes + setImmediate(() => this.process()); + }); + } + + /** + * Emits the 'error' event and callbacks items that haven't been written and clears them from the queue. + * @param err + */ + setWriteError(err) { + err.isSocketError = true; + this.error = new types.DriverError('Socket was closed'); + this.error.isSocketError = true; + // Use an special flag for items that haven't been written + this.error.requestNotWritten = true; + this.error.innerError = err; + const q = this.queue; + // Not more items can be added to the queue. + this.queue = utils.emptyArray; + for (let i = 0; i < q.length; i++) { + const item = q[i]; + // Use the error marking that it was not written + item.callback(this.error); + } + } +} + +module.exports = { FrameWriter, WriteQueue }; diff --git a/node_modules/cassandra-driver/package.json b/node_modules/cassandra-driver/package.json new file mode 100644 index 0000000..2bf06d2 --- /dev/null +++ b/node_modules/cassandra-driver/package.json @@ -0,0 +1,56 @@ +{ + "name": "cassandra-driver", + "version": "4.6.4", + "description": "DataStax Node.js Driver for Apache Cassandra", + "author": "DataStax", + "keywords": [ + "cassandra", + "cql", + "cql3", + "connection", + "gremlin", + "datastax", + "nosql", + "driver", + "database", + "dse", + "graph", + "graphdb" + ], + "license": "Apache-2.0", + "types": "./index.d.ts", + "dependencies": { + "@types/long": "^4.0.0", + "@types/node": ">=8", + "adm-zip": "^0.5.3", + "long": "^2.2.0" + }, + "devDependencies": { + "chai": "4.2.0", + "mocha": "~7.1.1", + "proxyquire": "~2.1.3", + "sinon": "~7.5.0", + "temp": ">= 0.8.3" + }, + "repository": { + "type": "git", + "url": "https://github.com/datastax/nodejs-driver.git" + }, + "bugs": { + "url": "https://groups.google.com/a/lists.datastax.com/forum/#!forum/nodejs-driver-user" + }, + "scripts": { + "test": "./node_modules/.bin/mocha test/unit -R spec -t 5000 --recursive", + "unit": "./node_modules/.bin/mocha test/unit -R spec -t 5000 --recursive", + "integration_short": "./node_modules/.bin/mocha test/integration/short -R spec -t 5000 --recursive", + "integration_long": "./node_modules/.bin/mocha test/integration/long -R spec -t 5000 --recursive", + "ci_jenkins": "./node_modules/.bin/mocha test/unit test/integration/short --recursive -R mocha-jenkins-reporter --exit", + "ci_appveyor": "./node_modules/.bin/mocha test/unit test/integration/short --recursive -R mocha-appveyor-reporter --exit", + "ci_unit_appveyor": "./node_modules/.bin/mocha test/unit --recursive -R mocha-appveyor-reporter --exit", + "server_api": "./node_modules/.bin/mocha test/integration/short -g '@SERVER_API' --recursive --exit", + "eslint": "eslint lib test" + }, + "engines": { + "node": ">=8" + } +} diff --git a/node_modules/chalk/index.d.ts b/node_modules/chalk/index.d.ts new file mode 100644 index 0000000..9cd88f3 --- /dev/null +++ b/node_modules/chalk/index.d.ts @@ -0,0 +1,415 @@ +/** +Basic foreground colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type ForegroundColor = + | 'black' + | 'red' + | 'green' + | 'yellow' + | 'blue' + | 'magenta' + | 'cyan' + | 'white' + | 'gray' + | 'grey' + | 'blackBright' + | 'redBright' + | 'greenBright' + | 'yellowBright' + | 'blueBright' + | 'magentaBright' + | 'cyanBright' + | 'whiteBright'; + +/** +Basic background colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type BackgroundColor = + | 'bgBlack' + | 'bgRed' + | 'bgGreen' + | 'bgYellow' + | 'bgBlue' + | 'bgMagenta' + | 'bgCyan' + | 'bgWhite' + | 'bgGray' + | 'bgGrey' + | 'bgBlackBright' + | 'bgRedBright' + | 'bgGreenBright' + | 'bgYellowBright' + | 'bgBlueBright' + | 'bgMagentaBright' + | 'bgCyanBright' + | 'bgWhiteBright'; + +/** +Basic colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type Color = ForegroundColor | BackgroundColor; + +declare type Modifiers = + | 'reset' + | 'bold' + | 'dim' + | 'italic' + | 'underline' + | 'inverse' + | 'hidden' + | 'strikethrough' + | 'visible'; + +declare namespace chalk { + /** + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + type Level = 0 | 1 | 2 | 3; + + interface Options { + /** + Specify the color support for Chalk. + + By default, color support is automatically detected based on the environment. + + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + level?: Level; + } + + /** + Return a new Chalk instance. + */ + type Instance = new (options?: Options) => Chalk; + + /** + Detect whether the terminal supports color. + */ + interface ColorSupport { + /** + The color level used by Chalk. + */ + level: Level; + + /** + Return whether Chalk supports basic 16 colors. + */ + hasBasic: boolean; + + /** + Return whether Chalk supports ANSI 256 colors. + */ + has256: boolean; + + /** + Return whether Chalk supports Truecolor 16 million colors. + */ + has16m: boolean; + } + + interface ChalkFunction { + /** + Use a template string. + + @remarks Template literals are unsupported for nested calls (see [issue #341](https://github.com/chalk/chalk/issues/341)) + + @example + ``` + import chalk = require('chalk'); + + log(chalk` + CPU: {red ${cpu.totalPercent}%} + RAM: {green ${ram.used / ram.total * 100}%} + DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} + `); + ``` + + @example + ``` + import chalk = require('chalk'); + + log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`) + ``` + */ + (text: TemplateStringsArray, ...placeholders: unknown[]): string; + + (...text: unknown[]): string; + } + + interface Chalk extends ChalkFunction { + /** + Return a new Chalk instance. + */ + Instance: Instance; + + /** + The color support for Chalk. + + By default, color support is automatically detected based on the environment. + + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + level: Level; + + /** + Use HEX value to set text color. + + @param color - Hexadecimal value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.hex('#DEADED'); + ``` + */ + hex(color: string): Chalk; + + /** + Use keyword color value to set text color. + + @param color - Keyword value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.keyword('orange'); + ``` + */ + keyword(color: string): Chalk; + + /** + Use RGB values to set text color. + */ + rgb(red: number, green: number, blue: number): Chalk; + + /** + Use HSL values to set text color. + */ + hsl(hue: number, saturation: number, lightness: number): Chalk; + + /** + Use HSV values to set text color. + */ + hsv(hue: number, saturation: number, value: number): Chalk; + + /** + Use HWB values to set text color. + */ + hwb(hue: number, whiteness: number, blackness: number): Chalk; + + /** + Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set text color. + + 30 <= code && code < 38 || 90 <= code && code < 98 + For example, 31 for red, 91 for redBright. + */ + ansi(code: number): Chalk; + + /** + Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. + */ + ansi256(index: number): Chalk; + + /** + Use HEX value to set background color. + + @param color - Hexadecimal value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.bgHex('#DEADED'); + ``` + */ + bgHex(color: string): Chalk; + + /** + Use keyword color value to set background color. + + @param color - Keyword value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.bgKeyword('orange'); + ``` + */ + bgKeyword(color: string): Chalk; + + /** + Use RGB values to set background color. + */ + bgRgb(red: number, green: number, blue: number): Chalk; + + /** + Use HSL values to set background color. + */ + bgHsl(hue: number, saturation: number, lightness: number): Chalk; + + /** + Use HSV values to set background color. + */ + bgHsv(hue: number, saturation: number, value: number): Chalk; + + /** + Use HWB values to set background color. + */ + bgHwb(hue: number, whiteness: number, blackness: number): Chalk; + + /** + Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set background color. + + 30 <= code && code < 38 || 90 <= code && code < 98 + For example, 31 for red, 91 for redBright. + Use the foreground code, not the background code (for example, not 41, nor 101). + */ + bgAnsi(code: number): Chalk; + + /** + Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color. + */ + bgAnsi256(index: number): Chalk; + + /** + Modifier: Resets the current color chain. + */ + readonly reset: Chalk; + + /** + Modifier: Make text bold. + */ + readonly bold: Chalk; + + /** + Modifier: Emitting only a small amount of light. + */ + readonly dim: Chalk; + + /** + Modifier: Make text italic. (Not widely supported) + */ + readonly italic: Chalk; + + /** + Modifier: Make text underline. (Not widely supported) + */ + readonly underline: Chalk; + + /** + Modifier: Inverse background and foreground colors. + */ + readonly inverse: Chalk; + + /** + Modifier: Prints the text, but makes it invisible. + */ + readonly hidden: Chalk; + + /** + Modifier: Puts a horizontal line through the center of the text. (Not widely supported) + */ + readonly strikethrough: Chalk; + + /** + Modifier: Prints the text only when Chalk has a color support level > 0. + Can be useful for things that are purely cosmetic. + */ + readonly visible: Chalk; + + readonly black: Chalk; + readonly red: Chalk; + readonly green: Chalk; + readonly yellow: Chalk; + readonly blue: Chalk; + readonly magenta: Chalk; + readonly cyan: Chalk; + readonly white: Chalk; + + /* + Alias for `blackBright`. + */ + readonly gray: Chalk; + + /* + Alias for `blackBright`. + */ + readonly grey: Chalk; + + readonly blackBright: Chalk; + readonly redBright: Chalk; + readonly greenBright: Chalk; + readonly yellowBright: Chalk; + readonly blueBright: Chalk; + readonly magentaBright: Chalk; + readonly cyanBright: Chalk; + readonly whiteBright: Chalk; + + readonly bgBlack: Chalk; + readonly bgRed: Chalk; + readonly bgGreen: Chalk; + readonly bgYellow: Chalk; + readonly bgBlue: Chalk; + readonly bgMagenta: Chalk; + readonly bgCyan: Chalk; + readonly bgWhite: Chalk; + + /* + Alias for `bgBlackBright`. + */ + readonly bgGray: Chalk; + + /* + Alias for `bgBlackBright`. + */ + readonly bgGrey: Chalk; + + readonly bgBlackBright: Chalk; + readonly bgRedBright: Chalk; + readonly bgGreenBright: Chalk; + readonly bgYellowBright: Chalk; + readonly bgBlueBright: Chalk; + readonly bgMagentaBright: Chalk; + readonly bgCyanBright: Chalk; + readonly bgWhiteBright: Chalk; + } +} + +/** +Main Chalk object that allows to chain styles together. +Call the last one as a method with a string argument. +Order doesn't matter, and later styles take precedent in case of a conflict. +This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`. +*/ +declare const chalk: chalk.Chalk & chalk.ChalkFunction & { + supportsColor: chalk.ColorSupport | false; + Level: chalk.Level; + Color: Color; + ForegroundColor: ForegroundColor; + BackgroundColor: BackgroundColor; + Modifiers: Modifiers; + stderr: chalk.Chalk & {supportsColor: chalk.ColorSupport | false}; +}; + +export = chalk; diff --git a/node_modules/chalk/license b/node_modules/chalk/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json new file mode 100644 index 0000000..47c23f2 --- /dev/null +++ b/node_modules/chalk/package.json @@ -0,0 +1,68 @@ +{ + "name": "chalk", + "version": "4.1.2", + "description": "Terminal string styling done right", + "license": "MIT", + "repository": "chalk/chalk", + "funding": "https://github.com/chalk/chalk?sponsor=1", + "main": "source", + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd", + "bench": "matcha benchmark.js" + }, + "files": [ + "source", + "index.d.ts" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "devDependencies": { + "ava": "^2.4.0", + "coveralls": "^3.0.7", + "execa": "^4.0.0", + "import-fresh": "^3.1.0", + "matcha": "^0.7.0", + "nyc": "^15.0.0", + "resolve-from": "^5.0.0", + "tsd": "^0.7.4", + "xo": "^0.28.2" + }, + "xo": { + "rules": { + "unicorn/prefer-string-slice": "off", + "unicorn/prefer-includes": "off", + "@typescript-eslint/member-ordering": "off", + "no-redeclare": "off", + "unicorn/string-content": "off", + "unicorn/better-regex": "off" + } + } +} diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md new file mode 100644 index 0000000..a055d21 --- /dev/null +++ b/node_modules/chalk/readme.md @@ -0,0 +1,341 @@ +

+
+
+ Chalk +
+
+
+

+ +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) [![Coverage Status](https://coveralls.io/repos/github/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/github/chalk/chalk?branch=master) [![npm dependents](https://badgen.net/npm/dependents/chalk)](https://www.npmjs.com/package/chalk?activeTab=dependents) [![Downloads](https://badgen.net/npm/dt/chalk)](https://www.npmjs.com/package/chalk) [![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/xojs/xo) ![TypeScript-ready](https://img.shields.io/npm/types/chalk.svg) [![run on repl.it](https://repl.it/badge/github/chalk/chalk)](https://repl.it/github/chalk/chalk) + + + +
+ +--- + + + +--- + +
+ +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~50,000 packages](https://www.npmjs.com/browse/depended/chalk) as of January 1, 2020 + +## Install + +```console +$ npm install chalk +``` + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + +## API + +### chalk.`