From c8641ef07b9a42041d1cf9112dc17dc6b9238267 Mon Sep 17 00:00:00 2001 From: Ichinose Shogo Date: Sun, 16 May 2021 12:34:25 +0900 Subject: [PATCH] build v1.12.0 --- dist/index.js | 4 ++++ dist/index.js.map | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/dist/index.js b/dist/index.js index c1bfcd265..1700649c7 100644 --- a/dist/index.js +++ b/dist/index.js @@ -249,6 +249,10 @@ function splitArgs(args) { if (!args) { return []; } + args = args.trim(); + if (args === '') { + return []; + } return args.split(/\s+/); } diff --git a/dist/index.js.map b/dist/index.js.map index ab63651ad..b0e03b037 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../webpack://actions-setup-perl/./lib/cpan-installer.js","../webpack://actions-setup-perl/./lib/installer.js","../webpack://actions-setup-perl/./lib/setup-perl.js","../webpack://actions-setup-perl/./lib/strawberry.js","../webpack://actions-setup-perl/./lib/tool-cache-port.js","../webpack://actions-setup-perl/./lib/utils.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/cache.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/internal/cacheHttpClient.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/internal/cacheUtils.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/internal/constants.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/internal/downloadUtils.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/internal/requestUtils.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/internal/tar.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/lib/options.js","../webpack://actions-setup-perl/./node_modules/@actions/cache/node_modules/semver/semver.js","../webpack://actions-setup-perl/./node_modules/@actions/core/lib/command.js","../webpack://actions-setup-perl/./node_modules/@actions/core/lib/core.js","../webpack://actions-setup-perl/./node_modules/@actions/core/lib/file-command.js","../webpack://actions-setup-perl/./node_modules/@actions/core/lib/utils.js","../webpack://actions-setup-perl/./node_modules/@actions/exec/lib/exec.js","../webpack://actions-setup-perl/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/glob.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-glob-options-helper.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-globber.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-match-kind.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-path-helper.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-path.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-pattern-helper.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-pattern.js","../webpack://actions-setup-perl/./node_modules/@actions/glob/lib/internal-search-state.js","../webpack://actions-setup-perl/./node_modules/@actions/http-client/auth.js","../webpack://actions-setup-perl/./node_modules/@actions/http-client/index.js","../webpack://actions-setup-perl/./node_modules/@actions/http-client/proxy.js","../webpack://actions-setup-perl/./node_modules/@actions/io/lib/io-util.js","../webpack://actions-setup-perl/./node_modules/@actions/io/lib/io.js","../webpack://actions-setup-perl/./node_modules/@actions/tool-cache/lib/manifest.js","../webpack://actions-setup-perl/./node_modules/@actions/tool-cache/lib/retry-helper.js","../webpack://actions-setup-perl/./node_modules/@actions/tool-cache/lib/tool-cache.js","../webpack://actions-setup-perl/./node_modules/@actions/tool-cache/node_modules/semver/semver.js","../webpack://actions-setup-perl/./node_modules/@azure/abort-controller/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/abort-controller/node_modules/tslib/tslib.js","../webpack://actions-setup-perl/./node_modules/@azure/core-asynciterator-polyfill/dist-esm/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-auth/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@azure/core-tracing/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/api/context.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/api/diag.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/api/propagation.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/api/trace.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/baggage/Baggage.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/baggage/Entry.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/baggage/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/common/Exception.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/common/Time.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/context/context.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/context/types.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/diag/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/diag/types.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/internal/global-utils.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/internal/semver.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/platform/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/platform/node/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/Event.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/NoopSpan.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/Sampler.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/TimedEvent.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/attributes.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/link.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/link_context.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/span.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/span_context.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/span_kind.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/status.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/trace_state.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/tracer.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/@opentelemetry/api/build/src/version.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/form-data/lib/populate.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/tslib/tslib.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/md5.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/nil.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/parse.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/regex.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/rng.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/sha1.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/stringify.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/v1.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/v3.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/v35.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/v4.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/v5.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/validate.js","../webpack://actions-setup-perl/./node_modules/@azure/core-http/node_modules/uuid/dist/version.js","../webpack://actions-setup-perl/./node_modules/@azure/core-lro/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-lro/node_modules/tslib/tslib.js","../webpack://actions-setup-perl/./node_modules/@azure/core-paging/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-tracing/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/core-tracing/node_modules/tslib/tslib.js","../webpack://actions-setup-perl/./node_modules/@azure/logger/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/logger/node_modules/tslib/tslib.js","../webpack://actions-setup-perl/./node_modules/@azure/storage-blob/dist/index.js","../webpack://actions-setup-perl/./node_modules/@azure/storage-blob/node_modules/tslib/tslib.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/api/context.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/api/global-utils.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/api/metrics.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/api/propagation.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/api/trace.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/common/Logger.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/common/Time.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/context/propagation/HttpTextPropagator.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/context/propagation/NoopHttpTextPropagator.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/context/propagation/getter.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/context/propagation/setter.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/correlation_context/CorrelationContext.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/correlation_context/EntryValue.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/index.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/BatchObserverResult.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/BoundInstrument.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/Meter.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/Metric.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/Observation.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/metrics/ObserverResult.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/platform/index.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/platform/node/index.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/Event.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/NoopSpan.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/Sampler.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/TimedEvent.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/attributes.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/instrumentation/Plugin.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/link.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/link_context.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/span.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/span_context.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/span_kind.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/status.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/trace_flags.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/trace_state.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/tracer.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/context-base/build/src/NoopContextManager.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/context-base/build/src/context.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/context-base/build/src/index.js","../webpack://actions-setup-perl/./node_modules/@opentelemetry/context-base/build/src/types.js","../webpack://actions-setup-perl/./node_modules/asynckit/index.js","../webpack://actions-setup-perl/./node_modules/asynckit/lib/abort.js","../webpack://actions-setup-perl/./node_modules/asynckit/lib/async.js","../webpack://actions-setup-perl/./node_modules/asynckit/lib/defer.js","../webpack://actions-setup-perl/./node_modules/asynckit/lib/iterate.js","../webpack://actions-setup-perl/./node_modules/asynckit/lib/state.js","../webpack://actions-setup-perl/./node_modules/asynckit/lib/terminator.js","../webpack://actions-setup-perl/./node_modules/asynckit/parallel.js","../webpack://actions-setup-perl/./node_modules/asynckit/serial.js","../webpack://actions-setup-perl/./node_modules/asynckit/serialOrdered.js","../webpack://actions-setup-perl/./node_modules/balanced-match/index.js","../webpack://actions-setup-perl/./node_modules/brace-expansion/index.js","../webpack://actions-setup-perl/./node_modules/combined-stream/lib/combined_stream.js","../webpack://actions-setup-perl/./node_modules/concat-map/index.js","../webpack://actions-setup-perl/./node_modules/delayed-stream/lib/delayed_stream.js","../webpack://actions-setup-perl/./node_modules/lru-cache/index.js","../webpack://actions-setup-perl/./node_modules/mime-db/index.js","../webpack://actions-setup-perl/./node_modules/mime-types/index.js","../webpack://actions-setup-perl/./node_modules/minimatch/minimatch.js","../webpack://actions-setup-perl/./node_modules/node-fetch/lib/index.js","../webpack://actions-setup-perl/./node_modules/psl/index.js","../webpack://actions-setup-perl/./node_modules/sax/lib/sax.js","../webpack://actions-setup-perl/./node_modules/semver/classes/comparator.js","../webpack://actions-setup-perl/./node_modules/semver/classes/range.js","../webpack://actions-setup-perl/./node_modules/semver/classes/semver.js","../webpack://actions-setup-perl/./node_modules/semver/functions/clean.js","../webpack://actions-setup-perl/./node_modules/semver/functions/cmp.js","../webpack://actions-setup-perl/./node_modules/semver/functions/coerce.js","../webpack://actions-setup-perl/./node_modules/semver/functions/compare-build.js","../webpack://actions-setup-perl/./node_modules/semver/functions/compare-loose.js","../webpack://actions-setup-perl/./node_modules/semver/functions/compare.js","../webpack://actions-setup-perl/./node_modules/semver/functions/diff.js","../webpack://actions-setup-perl/./node_modules/semver/functions/eq.js","../webpack://actions-setup-perl/./node_modules/semver/functions/gt.js","../webpack://actions-setup-perl/./node_modules/semver/functions/gte.js","../webpack://actions-setup-perl/./node_modules/semver/functions/inc.js","../webpack://actions-setup-perl/./node_modules/semver/functions/lt.js","../webpack://actions-setup-perl/./node_modules/semver/functions/lte.js","../webpack://actions-setup-perl/./node_modules/semver/functions/major.js","../webpack://actions-setup-perl/./node_modules/semver/functions/minor.js","../webpack://actions-setup-perl/./node_modules/semver/functions/neq.js","../webpack://actions-setup-perl/./node_modules/semver/functions/parse.js","../webpack://actions-setup-perl/./node_modules/semver/functions/patch.js","../webpack://actions-setup-perl/./node_modules/semver/functions/prerelease.js","../webpack://actions-setup-perl/./node_modules/semver/functions/rcompare.js","../webpack://actions-setup-perl/./node_modules/semver/functions/rsort.js","../webpack://actions-setup-perl/./node_modules/semver/functions/satisfies.js","../webpack://actions-setup-perl/./node_modules/semver/functions/sort.js","../webpack://actions-setup-perl/./node_modules/semver/functions/valid.js","../webpack://actions-setup-perl/./node_modules/semver/index.js","../webpack://actions-setup-perl/./node_modules/semver/internal/constants.js","../webpack://actions-setup-perl/./node_modules/semver/internal/debug.js","../webpack://actions-setup-perl/./node_modules/semver/internal/identifiers.js","../webpack://actions-setup-perl/./node_modules/semver/internal/parse-options.js","../webpack://actions-setup-perl/./node_modules/semver/internal/re.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/gtr.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/intersects.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/ltr.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/max-satisfying.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/min-satisfying.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/min-version.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/outside.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/simplify.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/subset.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/to-comparators.js","../webpack://actions-setup-perl/./node_modules/semver/ranges/valid.js","../webpack://actions-setup-perl/./node_modules/tunnel/index.js","../webpack://actions-setup-perl/./node_modules/tunnel/lib/tunnel.js","../webpack://actions-setup-perl/./node_modules/universalify/index.js","../webpack://actions-setup-perl/./node_modules/uuid/index.js","../webpack://actions-setup-perl/./node_modules/uuid/lib/bytesToUuid.js","../webpack://actions-setup-perl/./node_modules/uuid/lib/rng.js","../webpack://actions-setup-perl/./node_modules/uuid/v1.js","../webpack://actions-setup-perl/./node_modules/uuid/v4.js","../webpack://actions-setup-perl/./node_modules/xml2js/lib/bom.js","../webpack://actions-setup-perl/./node_modules/xml2js/lib/builder.js","../webpack://actions-setup-perl/./node_modules/xml2js/lib/defaults.js","../webpack://actions-setup-perl/./node_modules/xml2js/lib/parser.js","../webpack://actions-setup-perl/./node_modules/xml2js/lib/processors.js","../webpack://actions-setup-perl/./node_modules/xml2js/lib/xml2js.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/Utility.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://actions-setup-perl/./node_modules/xmlbuilder/lib/index.js","../webpack://actions-setup-perl/./node_modules/yallist/iterator.js","../webpack://actions-setup-perl/./node_modules/yallist/yallist.js","../webpack://actions-setup-perl/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://actions-setup-perl/external \"assert\"","../webpack://actions-setup-perl/external \"buffer\"","../webpack://actions-setup-perl/external \"child_process\"","../webpack://actions-setup-perl/external \"crypto\"","../webpack://actions-setup-perl/external \"events\"","../webpack://actions-setup-perl/external \"fs\"","../webpack://actions-setup-perl/external \"http\"","../webpack://actions-setup-perl/external \"https\"","../webpack://actions-setup-perl/external \"net\"","../webpack://actions-setup-perl/external \"os\"","../webpack://actions-setup-perl/external \"path\"","../webpack://actions-setup-perl/external \"punycode\"","../webpack://actions-setup-perl/external \"stream\"","../webpack://actions-setup-perl/external \"string_decoder\"","../webpack://actions-setup-perl/external \"timers\"","../webpack://actions-setup-perl/external \"tls\"","../webpack://actions-setup-perl/external \"url\"","../webpack://actions-setup-perl/external \"util\"","../webpack://actions-setup-perl/external \"zlib\"","../webpack://actions-setup-perl/webpack/bootstrap","../webpack://actions-setup-perl/webpack/runtime/compat","../webpack://actions-setup-perl/webpack/startup"],"sourcesContent":["\"use strict\";\n// install CPAN modules and caching\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.install = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst cache = __importStar(require(\"@actions/cache\"));\nconst crypto = __importStar(require(\"crypto\"));\nconst fs = __importStar(require(\"fs\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst path = __importStar(require(\"path\"));\nasync function install(opt) {\n if (!opt.install_modules_with) {\n core.info('nothing to install');\n return;\n }\n let installer;\n switch (opt.install_modules_with) {\n case 'cpanm':\n installer = installWithCpanm;\n break;\n case 'cpm':\n installer = installWithCpm;\n break;\n case 'carton':\n installer = installWithCarton;\n break;\n default:\n core.error(`unknown installer: ${opt.install_modules_with}`);\n return;\n }\n const workingDirectory = path.join(process.cwd(), opt.working_directory || '.');\n const cachePath = path.join(workingDirectory, 'local');\n const paths = [cachePath];\n const baseKey = await cacheKey(opt);\n const cpanfileKey = await hashFiles(opt, path.join(workingDirectory, 'cpanfile'), path.join(workingDirectory, 'cpanfile.snapshot'));\n const installKey = hashString(opt.install_modules || '');\n const key = `${baseKey}-${cpanfileKey}-${installKey}`;\n const restoreKeys = [`${baseKey}-${cpanfileKey}-`, `${baseKey}-`];\n // restore cache\n let cachedKey = undefined;\n if (opt.enable_modules_cache) {\n try {\n cachedKey = await cache.restoreCache(paths, key, restoreKeys);\n }\n catch (error) {\n if (error.name === cache.ValidationError.name) {\n }\n else {\n core.info(`[warning] There was an error restoring the cache ${error.message}`);\n }\n }\n if (cachedKey) {\n core.info(`Found cache for key: ${cachedKey}`);\n }\n else {\n core.info(`cache not found for input keys: ${key}, ${restoreKeys.join(', ')}`);\n }\n }\n // install\n await installer(opt);\n // configure environment values\n core.addPath(path.join(cachePath, 'bin'));\n core.exportVariable('PERL5LIB', path.join(cachePath, 'lib', 'perl5') + path.delimiter + process.env['PERL5LIB']);\n if (opt.enable_modules_cache) {\n // save cache\n if (cachedKey !== key) {\n core.info(`saving cache for ${key}.`);\n try {\n await cache.saveCache(paths, key);\n }\n catch (error) {\n if (error.name === cache.ValidationError.name) {\n throw error;\n }\n else if (error.name === cache.ReserveCacheError.name) {\n core.info(error.message);\n }\n else {\n core.info(`[warning]${error.message}`);\n }\n }\n }\n else {\n core.info(`cache for ${key} already exists, skip saving.`);\n }\n }\n return;\n}\nexports.install = install;\nasync function cacheKey(opt) {\n let key = 'setup-perl-module-cache-v1-';\n key += await digestOfPerlVersion(opt);\n key += '-' + (opt.install_modules_with || 'unknown');\n return key;\n}\n// we use `perl -V` to the cache key.\n// it contains useful information to use as the cache key,\n// e.g. the platform, the version of perl, the compiler option for building perl\nasync function digestOfPerlVersion(opt) {\n const perl = path.join(opt.toolPath, 'bin', 'perl');\n const hash = crypto.createHash('sha256');\n await exec.exec(perl, ['-V'], {\n listeners: {\n stdout: (data) => {\n hash.update(data);\n }\n },\n env: {}\n });\n hash.end();\n return hash.digest('hex');\n}\n// see https://github.com/actions/runner/blob/master/src/Misc/expressionFunc/hashFiles/src/hashFiles.ts\nasync function hashFiles(opt, ...files) {\n const result = crypto.createHash('sha256');\n result.update(opt.install_modules_args || '');\n for (const file of files) {\n try {\n const hash = crypto.createHash('sha256');\n const pipeline = util.promisify(stream.pipeline);\n await pipeline(fs.createReadStream(file), hash);\n result.write(hash.digest());\n }\n catch (err) {\n // skip files that doesn't exist.\n if (err.code !== 'ENOENT') {\n throw err;\n }\n }\n }\n result.end();\n return result.digest('hex');\n}\nfunction hashString(s) {\n const hash = crypto.createHash('sha256');\n hash.update(s, 'utf-8');\n hash.end();\n return hash.digest('hex');\n}\nasync function installWithCpanm(opt) {\n const perl = path.join(opt.toolPath, 'bin', 'perl');\n const cpanm = path.join(__dirname, '..', 'bin', 'cpanm');\n const workingDirectory = path.join(process.cwd(), opt.working_directory || '.');\n const execOpt = {\n cwd: workingDirectory\n };\n const args = [cpanm, '--local-lib-contained', 'local', '--notest'];\n if (core.isDebug()) {\n args.push('--verbose');\n }\n args.push(...splitArgs(opt.install_modules_args));\n if (await exists(path.join(workingDirectory, 'cpanfile'))) {\n await exec.exec(perl, [...args, '--installdeps', '.'], execOpt);\n }\n if (opt.install_modules) {\n const modules = opt.install_modules.split('\\n').map(s => s.trim());\n await exec.exec(perl, [...args, ...modules], execOpt);\n }\n}\nasync function installWithCpm(opt) {\n const perl = path.join(opt.toolPath, 'bin', 'perl');\n const cpm = path.join(__dirname, '..', 'bin', 'cpm');\n const workingDirectory = path.join(process.cwd(), opt.working_directory || '.');\n const execOpt = {\n cwd: workingDirectory\n };\n const args = [cpm, 'install', '--show-build-log-on-failure'];\n if (core.isDebug()) {\n args.push('--verbose');\n }\n args.push(...splitArgs(opt.install_modules_args));\n if ((await exists(path.join(workingDirectory, 'cpanfile'))) ||\n (await exists(path.join(workingDirectory, 'cpanfile.snapshot')))) {\n await exec.exec(perl, [...args], execOpt);\n }\n if (opt.install_modules) {\n const modules = opt.install_modules.split('\\n').map(s => s.trim());\n await exec.exec(perl, [...args, ...modules], execOpt);\n }\n}\nasync function installWithCarton(opt) {\n const perl = path.join(opt.toolPath, 'bin', 'perl');\n const carton = path.join(__dirname, '..', 'bin', 'carton');\n const workingDirectory = path.join(process.cwd(), opt.working_directory || '.');\n const execOpt = {\n cwd: workingDirectory\n };\n const args = [carton, 'install'];\n args.push(...splitArgs(opt.install_modules_args));\n if ((await exists(path.join(workingDirectory, 'cpanfile'))) ||\n (await exists(path.join(workingDirectory, 'cpanfile.snapshot')))) {\n await exec.exec(perl, [...args], execOpt);\n }\n if (opt.install_modules) {\n const cpanm = path.join(__dirname, '..', 'bin', 'cpanm');\n const modules = opt.install_modules.split('\\n').map(s => s.trim());\n const args = [cpanm, '--local-lib-contained', 'local', '--notest'];\n if (core.isDebug()) {\n args.push('--verbose');\n }\n await exec.exec(perl, [...args, ...modules], execOpt);\n }\n}\nasync function exists(path) {\n return new Promise((resolve, reject) => {\n fs.stat(path, err => {\n if (err) {\n if (err.code === 'ENOENT') {\n resolve(false);\n }\n else {\n reject(err);\n }\n return;\n }\n resolve(true);\n });\n });\n}\nfunction splitArgs(args) {\n if (!args) {\n return [];\n }\n return args.split(/\\s+/);\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getPerl = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst tc = __importStar(require(\"@actions/tool-cache\"));\nconst os = __importStar(require(\"os\"));\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\nconst semver = __importStar(require(\"semver\"));\nconst tcp = __importStar(require(\"./tool-cache-port\"));\nconst osPlat = os.platform();\nconst osArch = os.arch();\nasync function getAvailableVersions() {\n return new Promise((resolve, reject) => {\n fs.readFile(path.join(__dirname, '..', 'versions', `${osPlat}.json`), (err, data) => {\n if (err) {\n reject(err);\n }\n const info = JSON.parse(data.toString());\n resolve(info);\n });\n });\n}\nasync function determineVersion(version) {\n const availableVersions = await getAvailableVersions();\n // stable latest version\n if (version === 'latest') {\n return availableVersions[0];\n }\n for (let v of availableVersions) {\n if (semver.satisfies(v, version)) {\n return v;\n }\n }\n throw new Error('unable to get latest version');\n}\nasync function getPerl(version, thread) {\n const selected = await determineVersion(version);\n // check cache\n let toolPath;\n toolPath = tcp.find('perl', selected);\n if (!toolPath) {\n // download, extract, cache\n toolPath = await acquirePerl(selected, thread);\n core.debug('Perl tool is cached under ' + toolPath);\n }\n const bin = path.join(toolPath, 'bin');\n //\n // prepend the tools path. instructs the agent to prepend for future tasks\n //\n core.addPath(bin);\n return {\n installedPath: toolPath\n };\n}\nexports.getPerl = getPerl;\nasync function acquirePerl(version, thread) {\n //\n // Download - a tool installer intimately knows how to get the tool (and construct urls)\n //\n const fileName = getFileName(version, thread);\n const downloadUrl = await getDownloadUrl(fileName);\n let downloadPath = null;\n try {\n downloadPath = await tc.downloadTool(downloadUrl);\n }\n catch (error) {\n core.debug(error);\n throw `Failed to download version ${version}: ${error}`;\n }\n //\n // Extract compressed archive\n //\n const extPath = downloadUrl.endsWith('.zip')\n ? await tc.extractZip(downloadPath)\n : downloadUrl.endsWith('.tar.xz')\n ? await tc.extractTar(downloadPath, '', 'xJ')\n : downloadUrl.endsWith('.tar.bz2')\n ? await tc.extractTar(downloadPath, '', 'xj')\n : await tc.extractTar(downloadPath);\n return await tcp.cacheDir(extPath, 'perl', version + (thread ? '-thr' : ''));\n}\nfunction getFileName(version, thread) {\n const suffix = thread ? '-multi-thread' : '';\n const ext = osPlat === 'win32' ? 'zip' : 'tar.xz';\n return `perl-${version}-${osPlat}-${osArch}${suffix}.${ext}`;\n}\nasync function getDownloadUrl(filename) {\n return new Promise((resolve, reject) => {\n fs.readFile(path.join(__dirname, '..', 'package.json'), (err, data) => {\n if (err) {\n reject(err);\n }\n const info = JSON.parse(data.toString());\n resolve(info);\n });\n }).then(info => {\n const actionsVersion = info.version;\n return `https://setupperl.blob.core.windows.net/actions-setup-perl/v${actionsVersion}/${filename}`;\n });\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst installer = __importStar(require(\"./installer\"));\nconst path = __importStar(require(\"path\"));\nconst strawberry = __importStar(require(\"./strawberry\"));\nconst utils = __importStar(require(\"./utils\"));\nconst cpan = __importStar(require(\"./cpan-installer\"));\nasync function run() {\n try {\n const platform = process.platform;\n let dist = core.getInput('distribution');\n const multiThread = core.getInput('multi-thread');\n const version = core.getInput('perl-version');\n let result;\n await core.group('install perl', async () => {\n let thread;\n if (platform === 'win32') {\n thread = utils.parseBoolean(multiThread || 'true');\n if (dist === 'strawberry' && !thread) {\n core.warning('non-thread Strawberry Perl is not provided.');\n }\n }\n else {\n if (dist === 'strawberry') {\n core.warning('The strawberry distribution is not available on this platform. fallback to the default distribution.');\n dist = 'default';\n }\n thread = utils.parseBoolean(multiThread || 'false');\n }\n if (version) {\n switch (dist) {\n case 'strawberry':\n result = await strawberry.getPerl(version);\n break;\n case 'default':\n result = await installer.getPerl(version, thread);\n break;\n default:\n throw new Error(`unknown distribution: ${dist}`);\n }\n }\n const matchersPath = path.join(__dirname, '..', '.github');\n console.log(`##[add-matcher]${path.join(matchersPath, 'perl.json')}`);\n // for pre-installed scripts\n core.addPath(path.join(__dirname, '..', 'bin'));\n // for pre-installed modules\n core.exportVariable('PERL5LIB', path.join(__dirname, '..', 'scripts', 'lib'));\n });\n await core.group('install CPAN modules', async () => {\n await cpan.install({\n toolPath: result.installedPath,\n install_modules_with: core.getInput('install-modules-with'),\n install_modules_args: core.getInput('install-modules-args'),\n install_modules: core.getInput('install-modules'),\n enable_modules_cache: utils.parseBoolean(core.getInput('enable-modules-cache')),\n working_directory: core.getInput('working-directory')\n });\n });\n }\n catch (error) {\n core.setFailed(error.message);\n }\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getPerl = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst tc = __importStar(require(\"@actions/tool-cache\"));\nconst path = __importStar(require(\"path\"));\nconst semver = __importStar(require(\"semver\"));\nconst fs = __importStar(require(\"fs\"));\nconst tcp = __importStar(require(\"./tool-cache-port\"));\n// NOTE:\n// I don't know why, but 5.18.3 is missing.\n// {\n// version: '5.18.3',\n// path: 'strawberry-perl-5.18.3.1-64bit-portable.zip'\n// },\n// I don't know why, but 5.14.1 and 5.14.0 are missing.\n// {\n// version: '5.14.1',\n// path: 'strawberry-perl-5.14.1.1-64bit-portable.zip'\n// },\n// {\n// version: '5.14.0',\n// path: 'strawberry-perl-5.14.0.1-64bit-portable.zip'\n// },\n// 64 bit Portable binaries are not available with Perl 5.12.x and older.\nasync function getAvailableVersions() {\n return new Promise((resolve, reject) => {\n fs.readFile(path.join(__dirname, '..', 'versions', `strawberry.json`), (err, data) => {\n if (err) {\n reject(err);\n }\n const info = JSON.parse(data.toString());\n resolve(info);\n });\n });\n}\nasync function determineVersion(version) {\n const availableVersions = await getAvailableVersions();\n // stable latest version\n if (version === 'latest') {\n return availableVersions[0];\n }\n for (let v of availableVersions) {\n if (semver.satisfies(v.version, version)) {\n return v;\n }\n }\n throw new Error('unable to get latest version');\n}\nasync function getPerl(version) {\n // check cache\n const selected = await determineVersion(version);\n let toolPath;\n toolPath = tcp.find('perl', selected.version);\n if (!toolPath) {\n // download, extract, cache\n toolPath = await acquirePerl(selected);\n core.debug('Perl tool is cached under ' + toolPath);\n }\n // remove pre-installed Strawberry Perl and MinGW from Path\n let pathEnv = (process.env.PATH || '').split(path.delimiter);\n pathEnv = pathEnv.filter(p => !p.match(/.*(?:Strawberry|mingw).*/i));\n // add our new Strawberry Portable Perl Paths\n // from portableshell.bat https://github.com/StrawberryPerl/Perl-Dist-Strawberry/blob/9fb00a653ce2e6ed336045dd0a180409b98a72a9/share/portable/portableshell.bat#L5\n pathEnv.unshift(path.join(toolPath, 'c', 'bin'));\n pathEnv.unshift(path.join(toolPath, 'perl', 'bin'));\n pathEnv.unshift(path.join(toolPath, 'perl', 'site', 'bin'));\n core.exportVariable('PATH', pathEnv.join(path.delimiter));\n core.addPath(path.join(toolPath, 'c', 'bin'));\n core.addPath(path.join(toolPath, 'perl', 'bin'));\n core.addPath(path.join(toolPath, 'perl', 'site', 'bin'));\n return {\n installedPath: path.join(toolPath, 'perl')\n };\n}\nexports.getPerl = getPerl;\nasync function acquirePerl(version) {\n //\n // Download - a tool installer intimately knows how to get the tool (and construct urls)\n //\n // download from a mirror for actions-setup-perl\n const downloadUrl = `https://setupperl.blob.core.windows.net/actions-setup-perl/strawberry-perl/${version.path}`;\n let downloadPath = null;\n try {\n downloadPath = await tc.downloadTool(downloadUrl);\n }\n catch (error) {\n core.debug(error);\n throw `Failed to download version ${version.version}: ${error}`;\n }\n const extPath = await tc.extractZip(downloadPath);\n return await tcp.cacheDir(extPath, 'strawberry-perl', version.version);\n}\n","\"use strict\";\n// Ports of @actions/tool-cache\n// We use hard-coded paths rather than $RUNNER_TOOL_CACHE\n// because the prebuilt perl binaries cannot be moved anyway\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.cacheDir = exports.find = void 0;\nconst os = __importStar(require(\"os\"));\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\nconst core = __importStar(require(\"@actions/core\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst semver = __importStar(require(\"semver\"));\n// Finds the path to a tool version in the local installed tool cache\nfunction find(toolName, versionSpec, arch) {\n if (!toolName) {\n throw new Error('toolName parameter is required');\n }\n if (!versionSpec) {\n throw new Error('versionSpec parameter is required');\n }\n arch = arch || os.arch();\n versionSpec = semver.clean(versionSpec) || '';\n const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);\n let toolPath = '';\n core.debug(`checking cache: ${cachePath}`);\n if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {\n core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);\n toolPath = cachePath;\n }\n else {\n core.debug('not found');\n }\n return toolPath;\n}\nexports.find = find;\n// Caches a directory and installs it into the tool cacheDir\nasync function cacheDir(sourceDir, tool, version, arch) {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source dir: ${sourceDir}`);\n if (!fs.statSync(sourceDir).isDirectory()) {\n throw new Error('sourceDir is not a directory');\n }\n // Create the tool dir\n const destPath = await _createToolPath(tool, version, arch);\n // copy each child item. do not move. move can fail on Windows\n // due to anti-virus software having an open handle on a file.\n for (const itemName of fs.readdirSync(sourceDir)) {\n const s = path.join(sourceDir, itemName);\n await io.cp(s, destPath, { recursive: true });\n }\n // write .complete\n _completeToolPath(tool, version, arch);\n return destPath;\n}\nexports.cacheDir = cacheDir;\nasync function _createToolPath(tool, version, arch) {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n core.debug(`destination ${folderPath}`);\n const markerPath = `${folderPath}.complete`;\n await io.rmRF(folderPath);\n await io.rmRF(markerPath);\n await io.mkdirP(folderPath);\n return folderPath;\n}\nfunction _completeToolPath(tool, version, arch) {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n const markerPath = `${folderPath}.complete`;\n fs.writeFileSync(markerPath, '');\n core.debug('finished caching tool');\n}\nfunction _getCacheDirectory() {\n if (process.env['ACTIONS_SETUP_PERL_TESTING']) {\n // for testing\n return process.env['RUNNER_TOOL_CACHE'] || '';\n }\n const platform = os.platform();\n if (platform === 'linux') {\n return '/opt/hostedtoolcache';\n }\n else if (platform === 'darwin') {\n return '/Users/runner/hostedtoolcache';\n }\n else if (platform === 'win32') {\n return 'C:\\\\hostedtoolcache\\\\windows';\n }\n throw new Error(`unknown platform: ${platform}`);\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.parseBoolean = void 0;\nfunction parseBoolean(s) {\n // YAML 1.0 compatible boolean values\n switch (s) {\n case 'y':\n case 'Y':\n case 'yes':\n case 'Yes':\n case 'YES':\n case 'true':\n case 'True':\n case 'TRUE':\n return true;\n case 'n':\n case 'N':\n case 'no':\n case 'No':\n case 'NO':\n case 'false':\n case 'False':\n case 'FALSE':\n return false;\n }\n throw `invalid boolean value: ${s}`;\n}\nexports.parseBoolean = parseBoolean;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst path = __importStar(require(\"path\"));\nconst utils = __importStar(require(\"./internal/cacheUtils\"));\nconst cacheHttpClient = __importStar(require(\"./internal/cacheHttpClient\"));\nconst tar_1 = require(\"./internal/tar\");\nclass ValidationError extends Error {\n constructor(message) {\n super(message);\n this.name = 'ValidationError';\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\nexports.ValidationError = ValidationError;\nclass ReserveCacheError extends Error {\n constructor(message) {\n super(message);\n this.name = 'ReserveCacheError';\n Object.setPrototypeOf(this, ReserveCacheError.prototype);\n }\n}\nexports.ReserveCacheError = ReserveCacheError;\nfunction checkPaths(paths) {\n if (!paths || paths.length === 0) {\n throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);\n }\n}\nfunction checkKey(key) {\n if (key.length > 512) {\n throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);\n }\n const regex = /^[^,]*$/;\n if (!regex.test(key)) {\n throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);\n }\n}\n/**\n * Restores cache from keys\n *\n * @param paths a list of file paths to restore from the cache\n * @param primaryKey an explicit key for restoring the cache\n * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key\n * @param downloadOptions cache download options\n * @returns string returns the key for the cache hit, otherwise returns undefined\n */\nfunction restoreCache(paths, primaryKey, restoreKeys, options) {\n return __awaiter(this, void 0, void 0, function* () {\n checkPaths(paths);\n restoreKeys = restoreKeys || [];\n const keys = [primaryKey, ...restoreKeys];\n core.debug('Resolved Keys:');\n core.debug(JSON.stringify(keys));\n if (keys.length > 10) {\n throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);\n }\n for (const key of keys) {\n checkKey(key);\n }\n const compressionMethod = yield utils.getCompressionMethod();\n // path are needed to compute version\n const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {\n compressionMethod\n });\n if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {\n // Cache not found\n return undefined;\n }\n const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n try {\n // Download the cache from the cache entry\n yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);\n if (core.isDebug()) {\n yield tar_1.listTar(archivePath, compressionMethod);\n }\n const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);\n yield tar_1.extractTar(archivePath, compressionMethod);\n core.info('Cache restored successfully');\n }\n finally {\n // Try to delete the archive to save space\n try {\n yield utils.unlinkFile(archivePath);\n }\n catch (error) {\n core.debug(`Failed to delete archive: ${error}`);\n }\n }\n return cacheEntry.cacheKey;\n });\n}\nexports.restoreCache = restoreCache;\n/**\n * Saves a list of files with the specified key\n *\n * @param paths a list of file paths to be cached\n * @param key an explicit key for restoring the cache\n * @param options cache upload options\n * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails\n */\nfunction saveCache(paths, key, options) {\n return __awaiter(this, void 0, void 0, function* () {\n checkPaths(paths);\n checkKey(key);\n const compressionMethod = yield utils.getCompressionMethod();\n core.debug('Reserving Cache');\n const cacheId = yield cacheHttpClient.reserveCache(key, paths, {\n compressionMethod\n });\n if (cacheId === -1) {\n throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);\n }\n core.debug(`Cache ID: ${cacheId}`);\n const cachePaths = yield utils.resolvePaths(paths);\n core.debug('Cache Paths:');\n core.debug(`${JSON.stringify(cachePaths)}`);\n const archiveFolder = yield utils.createTempDirectory();\n const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);\n if (core.isDebug()) {\n yield tar_1.listTar(archivePath, compressionMethod);\n }\n const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit\n const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);\n core.debug(`File Size: ${archiveFileSize}`);\n if (archiveFileSize > fileSizeLimit) {\n throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);\n }\n core.debug(`Saving Cache (ID: ${cacheId})`);\n yield cacheHttpClient.saveCache(cacheId, archivePath, options);\n return cacheId;\n });\n}\nexports.saveCache = saveCache;\n//# sourceMappingURL=cache.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/auth\");\nconst crypto = __importStar(require(\"crypto\"));\nconst fs = __importStar(require(\"fs\"));\nconst url_1 = require(\"url\");\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nconst downloadUtils_1 = require(\"./downloadUtils\");\nconst options_1 = require(\"../options\");\nconst requestUtils_1 = require(\"./requestUtils\");\nconst versionSalt = '1.0';\nfunction getCacheApiUrl(resource) {\n // Ideally we just use ACTIONS_CACHE_URL\n const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||\n process.env['ACTIONS_RUNTIME_URL'] ||\n '').replace('pipelines', 'artifactcache');\n if (!baseUrl) {\n throw new Error('Cache Service Url not found, unable to restore cache.');\n }\n const url = `${baseUrl}_apis/artifactcache/${resource}`;\n core.debug(`Resource Url: ${url}`);\n return url;\n}\nfunction createAcceptHeader(type, apiVersion) {\n return `${type};api-version=${apiVersion}`;\n}\nfunction getRequestOptions() {\n const requestOptions = {\n headers: {\n Accept: createAcceptHeader('application/json', '6.0-preview.1')\n }\n };\n return requestOptions;\n}\nfunction createHttpClient() {\n const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';\n const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);\n return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());\n}\nfunction getCacheVersion(paths, compressionMethod) {\n const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip\n ? []\n : [compressionMethod]);\n // Add salt to cache version to support breaking changes in cache entry\n components.push(versionSalt);\n return crypto\n .createHash('sha256')\n .update(components.join('|'))\n .digest('hex');\n}\nexports.getCacheVersion = getCacheVersion;\nfunction getCacheEntry(keys, paths, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);\n const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;\n const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));\n if (response.statusCode === 204) {\n return null;\n }\n if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {\n throw new Error(`Cache service responded with ${response.statusCode}`);\n }\n const cacheResult = response.result;\n const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;\n if (!cacheDownloadUrl) {\n throw new Error('Cache not found.');\n }\n core.setSecret(cacheDownloadUrl);\n core.debug(`Cache Result:`);\n core.debug(JSON.stringify(cacheResult));\n return cacheResult;\n });\n}\nexports.getCacheEntry = getCacheEntry;\nfunction downloadCache(archiveLocation, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const archiveUrl = new url_1.URL(archiveLocation);\n const downloadOptions = options_1.getDownloadOptions(options);\n if (downloadOptions.useAzureSdk &&\n archiveUrl.hostname.endsWith('.blob.core.windows.net')) {\n // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.\n yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions);\n }\n else {\n // Otherwise, download using the Actions http-client.\n yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath);\n }\n });\n}\nexports.downloadCache = downloadCache;\n// Reserve Cache\nfunction reserveCache(key, paths, options) {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);\n const reserveCacheRequest = {\n key,\n version\n };\n const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);\n }));\n return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;\n });\n}\nexports.reserveCache = reserveCache;\nfunction getContentRange(start, end) {\n // Format: `bytes start-end/filesize\n // start and end are inclusive\n // filesize can be *\n // For a 200 byte chunk starting at byte 0:\n // Content-Range: bytes 0-199/*\n return `bytes ${start}-${end}/*`;\n}\nfunction uploadChunk(httpClient, resourceUrl, openStream, start, end) {\n return __awaiter(this, void 0, void 0, function* () {\n core.debug(`Uploading chunk of size ${end -\n start +\n 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);\n const additionalHeaders = {\n 'Content-Type': 'application/octet-stream',\n 'Content-Range': getContentRange(start, end)\n };\n const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);\n }));\n if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) {\n throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);\n }\n });\n}\nfunction uploadFile(httpClient, cacheId, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n // Upload Chunks\n const fileSize = fs.statSync(archivePath).size;\n const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);\n const fd = fs.openSync(archivePath, 'r');\n const uploadOptions = options_1.getUploadOptions(options);\n const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);\n const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);\n const parallelUploads = [...new Array(concurrency).keys()];\n core.debug('Awaiting all uploads');\n let offset = 0;\n try {\n yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {\n while (offset < fileSize) {\n const chunkSize = Math.min(fileSize - offset, maxChunkSize);\n const start = offset;\n const end = offset + chunkSize - 1;\n offset += maxChunkSize;\n yield uploadChunk(httpClient, resourceUrl, () => fs\n .createReadStream(archivePath, {\n fd,\n start,\n end,\n autoClose: false\n })\n .on('error', error => {\n throw new Error(`Cache upload failed because file read failed with ${error.message}`);\n }), start, end);\n }\n })));\n }\n finally {\n fs.closeSync(fd);\n }\n return;\n });\n}\nfunction commitCache(httpClient, cacheId, filesize) {\n return __awaiter(this, void 0, void 0, function* () {\n const commitCacheRequest = { size: filesize };\n return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);\n }));\n });\n}\nfunction saveCache(cacheId, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n core.debug('Upload cache');\n yield uploadFile(httpClient, cacheId, archivePath, options);\n // Commit Cache\n core.debug('Commiting cache');\n const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);\n const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);\n if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {\n throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);\n }\n core.info('Cache saved successfully');\n });\n}\nexports.saveCache = saveCache;\n//# sourceMappingURL=cacheHttpClient.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst glob = __importStar(require(\"@actions/glob\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\nconst semver = __importStar(require(\"semver\"));\nconst util = __importStar(require(\"util\"));\nconst uuid_1 = require(\"uuid\");\nconst constants_1 = require(\"./constants\");\n// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23\nfunction createTempDirectory() {\n return __awaiter(this, void 0, void 0, function* () {\n const IS_WINDOWS = process.platform === 'win32';\n let tempDirectory = process.env['RUNNER_TEMP'] || '';\n if (!tempDirectory) {\n let baseLocation;\n if (IS_WINDOWS) {\n // On Windows use the USERPROFILE env variable\n baseLocation = process.env['USERPROFILE'] || 'C:\\\\';\n }\n else {\n if (process.platform === 'darwin') {\n baseLocation = '/Users';\n }\n else {\n baseLocation = '/home';\n }\n }\n tempDirectory = path.join(baseLocation, 'actions', 'temp');\n }\n const dest = path.join(tempDirectory, uuid_1.v4());\n yield io.mkdirP(dest);\n return dest;\n });\n}\nexports.createTempDirectory = createTempDirectory;\nfunction getArchiveFileSizeIsBytes(filePath) {\n return fs.statSync(filePath).size;\n}\nexports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;\nfunction resolvePaths(patterns) {\n var e_1, _a;\n var _b;\n return __awaiter(this, void 0, void 0, function* () {\n const paths = [];\n const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();\n const globber = yield glob.create(patterns.join('\\n'), {\n implicitDescendants: false\n });\n try {\n for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {\n const file = _d.value;\n const relativeFile = path\n .relative(workspace, file)\n .replace(new RegExp(`\\\\${path.sep}`, 'g'), '/');\n core.debug(`Matched: ${relativeFile}`);\n // Paths are made relative so the tar entries are all relative to the root of the workspace.\n paths.push(`${relativeFile}`);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return paths;\n });\n}\nexports.resolvePaths = resolvePaths;\nfunction unlinkFile(filePath) {\n return __awaiter(this, void 0, void 0, function* () {\n return util.promisify(fs.unlink)(filePath);\n });\n}\nexports.unlinkFile = unlinkFile;\nfunction getVersion(app) {\n return __awaiter(this, void 0, void 0, function* () {\n core.debug(`Checking ${app} --version`);\n let versionOutput = '';\n try {\n yield exec.exec(`${app} --version`, [], {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n }\n catch (err) {\n core.debug(err.message);\n }\n versionOutput = versionOutput.trim();\n core.debug(versionOutput);\n return versionOutput;\n });\n}\n// Use zstandard if possible to maximize cache performance\nfunction getCompressionMethod() {\n return __awaiter(this, void 0, void 0, function* () {\n if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {\n // Disable zstd due to bug https://github.com/actions/cache/issues/301\n return constants_1.CompressionMethod.Gzip;\n }\n const versionOutput = yield getVersion('zstd');\n const version = semver.clean(versionOutput);\n if (!versionOutput.toLowerCase().includes('zstd command line interface')) {\n // zstd is not installed\n return constants_1.CompressionMethod.Gzip;\n }\n else if (!version || semver.lt(version, 'v1.3.2')) {\n // zstd is installed but using a version earlier than v1.3.2\n // v1.3.2 is required to use the `--long` options in zstd\n return constants_1.CompressionMethod.ZstdWithoutLong;\n }\n else {\n return constants_1.CompressionMethod.Zstd;\n }\n });\n}\nexports.getCompressionMethod = getCompressionMethod;\nfunction getCacheFileName(compressionMethod) {\n return compressionMethod === constants_1.CompressionMethod.Gzip\n ? constants_1.CacheFilename.Gzip\n : constants_1.CacheFilename.Zstd;\n}\nexports.getCacheFileName = getCacheFileName;\nfunction isGnuTarInstalled() {\n return __awaiter(this, void 0, void 0, function* () {\n const versionOutput = yield getVersion('tar');\n return versionOutput.toLowerCase().includes('gnu tar');\n });\n}\nexports.isGnuTarInstalled = isGnuTarInstalled;\nfunction assertDefined(name, value) {\n if (value === undefined) {\n throw Error(`Expected ${name} but value was undefiend`);\n }\n return value;\n}\nexports.assertDefined = assertDefined;\n//# sourceMappingURL=cacheUtils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nvar CacheFilename;\n(function (CacheFilename) {\n CacheFilename[\"Gzip\"] = \"cache.tgz\";\n CacheFilename[\"Zstd\"] = \"cache.tzst\";\n})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));\nvar CompressionMethod;\n(function (CompressionMethod) {\n CompressionMethod[\"Gzip\"] = \"gzip\";\n // Long range mode was added to zstd in v1.3.2.\n // This enum is for earlier version of zstd that does not have --long support\n CompressionMethod[\"ZstdWithoutLong\"] = \"zstd-without-long\";\n CompressionMethod[\"Zstd\"] = \"zstd\";\n})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));\n// The default number of retry attempts.\nexports.DefaultRetryAttempts = 2;\n// The default delay in milliseconds between retry attempts.\nexports.DefaultRetryDelay = 5000;\n// Socket timeout in milliseconds during download. If no traffic is received\n// over the socket during this period, the socket is destroyed and the download\n// is aborted.\nexports.SocketTimeout = 5000;\n//# sourceMappingURL=constants.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst storage_blob_1 = require(\"@azure/storage-blob\");\nconst buffer = __importStar(require(\"buffer\"));\nconst fs = __importStar(require(\"fs\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nconst requestUtils_1 = require(\"./requestUtils\");\n/**\n * Pipes the body of a HTTP response to a stream\n *\n * @param response the HTTP response\n * @param output the writable stream\n */\nfunction pipeResponseToStream(response, output) {\n return __awaiter(this, void 0, void 0, function* () {\n const pipeline = util.promisify(stream.pipeline);\n yield pipeline(response.message, output);\n });\n}\n/**\n * Class for tracking the download state and displaying stats.\n */\nclass DownloadProgress {\n constructor(contentLength) {\n this.contentLength = contentLength;\n this.segmentIndex = 0;\n this.segmentSize = 0;\n this.segmentOffset = 0;\n this.receivedBytes = 0;\n this.displayedComplete = false;\n this.startTime = Date.now();\n }\n /**\n * Progress to the next segment. Only call this method when the previous segment\n * is complete.\n *\n * @param segmentSize the length of the next segment\n */\n nextSegment(segmentSize) {\n this.segmentOffset = this.segmentOffset + this.segmentSize;\n this.segmentIndex = this.segmentIndex + 1;\n this.segmentSize = segmentSize;\n this.receivedBytes = 0;\n core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);\n }\n /**\n * Sets the number of bytes received for the current segment.\n *\n * @param receivedBytes the number of bytes received\n */\n setReceivedBytes(receivedBytes) {\n this.receivedBytes = receivedBytes;\n }\n /**\n * Returns the total number of bytes transferred.\n */\n getTransferredBytes() {\n return this.segmentOffset + this.receivedBytes;\n }\n /**\n * Returns true if the download is complete.\n */\n isDone() {\n return this.getTransferredBytes() === this.contentLength;\n }\n /**\n * Prints the current download stats. Once the download completes, this will print one\n * last line and then stop.\n */\n display() {\n if (this.displayedComplete) {\n return;\n }\n const transferredBytes = this.segmentOffset + this.receivedBytes;\n const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);\n const elapsedTime = Date.now() - this.startTime;\n const downloadSpeed = (transferredBytes /\n (1024 * 1024) /\n (elapsedTime / 1000)).toFixed(1);\n core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);\n if (this.isDone()) {\n this.displayedComplete = true;\n }\n }\n /**\n * Returns a function used to handle TransferProgressEvents.\n */\n onProgress() {\n return (progress) => {\n this.setReceivedBytes(progress.loadedBytes);\n };\n }\n /**\n * Starts the timer that displays the stats.\n *\n * @param delayInMs the delay between each write\n */\n startDisplayTimer(delayInMs = 1000) {\n const displayCallback = () => {\n this.display();\n if (!this.isDone()) {\n this.timeoutHandle = setTimeout(displayCallback, delayInMs);\n }\n };\n this.timeoutHandle = setTimeout(displayCallback, delayInMs);\n }\n /**\n * Stops the timer that displays the stats. As this typically indicates the download\n * is complete, this will display one last line, unless the last line has already\n * been written.\n */\n stopDisplayTimer() {\n if (this.timeoutHandle) {\n clearTimeout(this.timeoutHandle);\n this.timeoutHandle = undefined;\n }\n this.display();\n }\n}\nexports.DownloadProgress = DownloadProgress;\n/**\n * Download the cache using the Actions toolkit http-client\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n */\nfunction downloadCacheHttpClient(archiveLocation, archivePath) {\n return __awaiter(this, void 0, void 0, function* () {\n const writeStream = fs.createWriteStream(archivePath);\n const httpClient = new http_client_1.HttpClient('actions/cache');\n const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));\n // Abort download if no traffic received over the socket.\n downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {\n downloadResponse.message.destroy();\n core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);\n });\n yield pipeResponseToStream(downloadResponse, writeStream);\n // Validate download size.\n const contentLengthHeader = downloadResponse.message.headers['content-length'];\n if (contentLengthHeader) {\n const expectedLength = parseInt(contentLengthHeader);\n const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);\n if (actualLength !== expectedLength) {\n throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);\n }\n }\n else {\n core.debug('Unable to validate download, no Content-Length header');\n }\n });\n}\nexports.downloadCacheHttpClient = downloadCacheHttpClient;\n/**\n * Download the cache using the Azure Storage SDK. Only call this method if the\n * URL points to an Azure Storage endpoint.\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n * @param options the download options with the defaults set\n */\nfunction downloadCacheStorageSDK(archiveLocation, archivePath, options) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {\n retryOptions: {\n // Override the timeout used when downloading each 4 MB chunk\n // The default is 2 min / MB, which is way too slow\n tryTimeoutInMs: options.timeoutInMs\n }\n });\n const properties = yield client.getProperties();\n const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;\n if (contentLength < 0) {\n // We should never hit this condition, but just in case fall back to downloading the\n // file as one large stream\n core.debug('Unable to determine content length, downloading file with http-client...');\n yield downloadCacheHttpClient(archiveLocation, archivePath);\n }\n else {\n // Use downloadToBuffer for faster downloads, since internally it splits the\n // file into 4 MB chunks which can then be parallelized and retried independently\n //\n // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB\n // on 64-bit systems), split the download into multiple segments\n const maxSegmentSize = buffer.constants.MAX_LENGTH;\n const downloadProgress = new DownloadProgress(contentLength);\n const fd = fs.openSync(archivePath, 'w');\n try {\n downloadProgress.startDisplayTimer();\n while (!downloadProgress.isDone()) {\n const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;\n const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);\n downloadProgress.nextSegment(segmentSize);\n const result = yield client.downloadToBuffer(segmentStart, segmentSize, {\n concurrency: options.downloadConcurrency,\n onProgress: downloadProgress.onProgress()\n });\n fs.writeFileSync(fd, result);\n }\n }\n finally {\n downloadProgress.stopDisplayTimer();\n fs.closeSync(fd);\n }\n }\n });\n}\nexports.downloadCacheStorageSDK = downloadCacheStorageSDK;\n//# sourceMappingURL=downloadUtils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst constants_1 = require(\"./constants\");\nfunction isSuccessStatusCode(statusCode) {\n if (!statusCode) {\n return false;\n }\n return statusCode >= 200 && statusCode < 300;\n}\nexports.isSuccessStatusCode = isSuccessStatusCode;\nfunction isServerErrorStatusCode(statusCode) {\n if (!statusCode) {\n return true;\n }\n return statusCode >= 500;\n}\nexports.isServerErrorStatusCode = isServerErrorStatusCode;\nfunction isRetryableStatusCode(statusCode) {\n if (!statusCode) {\n return false;\n }\n const retryableStatusCodes = [\n http_client_1.HttpCodes.BadGateway,\n http_client_1.HttpCodes.ServiceUnavailable,\n http_client_1.HttpCodes.GatewayTimeout\n ];\n return retryableStatusCodes.includes(statusCode);\n}\nexports.isRetryableStatusCode = isRetryableStatusCode;\nfunction sleep(milliseconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, milliseconds));\n });\n}\nfunction retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {\n return __awaiter(this, void 0, void 0, function* () {\n let errorMessage = '';\n let attempt = 1;\n while (attempt <= maxAttempts) {\n let response = undefined;\n let statusCode = undefined;\n let isRetryable = false;\n try {\n response = yield method();\n }\n catch (error) {\n if (onError) {\n response = onError(error);\n }\n isRetryable = true;\n errorMessage = error.message;\n }\n if (response) {\n statusCode = getStatusCode(response);\n if (!isServerErrorStatusCode(statusCode)) {\n return response;\n }\n }\n if (statusCode) {\n isRetryable = isRetryableStatusCode(statusCode);\n errorMessage = `Cache service responded with ${statusCode}`;\n }\n core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);\n if (!isRetryable) {\n core.debug(`${name} - Error is not retryable`);\n break;\n }\n yield sleep(delay);\n attempt++;\n }\n throw Error(`${name} failed: ${errorMessage}`);\n });\n}\nexports.retry = retry;\nfunction retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, \n // If the error object contains the statusCode property, extract it and return\n // an ITypedResponse so it can be processed by the retry logic.\n (error) => {\n if (error instanceof http_client_1.HttpClientError) {\n return {\n statusCode: error.statusCode,\n result: null,\n headers: {}\n };\n }\n else {\n return undefined;\n }\n });\n });\n}\nexports.retryTypedResponse = retryTypedResponse;\nfunction retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay);\n });\n}\nexports.retryHttpClientResponse = retryHttpClientResponse;\n//# sourceMappingURL=requestUtils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst exec_1 = require(\"@actions/exec\");\nconst io = __importStar(require(\"@actions/io\"));\nconst fs_1 = require(\"fs\");\nconst path = __importStar(require(\"path\"));\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nfunction getTarPath(args, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n switch (process.platform) {\n case 'win32': {\n const systemTar = `${process.env['windir']}\\\\System32\\\\tar.exe`;\n if (compressionMethod !== constants_1.CompressionMethod.Gzip) {\n // We only use zstandard compression on windows when gnu tar is installed due to\n // a bug with compressing large files with bsdtar + zstd\n args.push('--force-local');\n }\n else if (fs_1.existsSync(systemTar)) {\n return systemTar;\n }\n else if (yield utils.isGnuTarInstalled()) {\n args.push('--force-local');\n }\n break;\n }\n case 'darwin': {\n const gnuTar = yield io.which('gtar', false);\n if (gnuTar) {\n // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527\n args.push('--delay-directory-restore');\n return gnuTar;\n }\n break;\n }\n default:\n break;\n }\n return yield io.which('tar', true);\n });\n}\nfunction execTar(args, compressionMethod, cwd) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exec_1.exec(`\"${yield getTarPath(args, compressionMethod)}\"`, args, { cwd });\n }\n catch (error) {\n throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);\n }\n });\n}\nfunction getWorkingDirectory() {\n var _a;\n return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();\n}\nfunction extractTar(archivePath, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n // Create directory to extract tar into\n const workingDirectory = getWorkingDirectory();\n yield io.mkdirP(workingDirectory);\n // --d: Decompress.\n // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.\n // Using 30 here because we also support 32-bit self-hosted runners.\n function getCompressionProgram() {\n switch (compressionMethod) {\n case constants_1.CompressionMethod.Zstd:\n return ['--use-compress-program', 'zstd -d --long=30'];\n case constants_1.CompressionMethod.ZstdWithoutLong:\n return ['--use-compress-program', 'zstd -d'];\n default:\n return ['-z'];\n }\n }\n const args = [\n ...getCompressionProgram(),\n '-xf',\n archivePath.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '-P',\n '-C',\n workingDirectory.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/')\n ];\n yield execTar(args, compressionMethod);\n });\n}\nexports.extractTar = extractTar;\nfunction createTar(archiveFolder, sourceDirectories, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n // Write source directories to manifest.txt to avoid command length limits\n const manifestFilename = 'manifest.txt';\n const cacheFileName = utils.getCacheFileName(compressionMethod);\n fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\\n'));\n const workingDirectory = getWorkingDirectory();\n // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.\n // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.\n // Using 30 here because we also support 32-bit self-hosted runners.\n // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.\n function getCompressionProgram() {\n switch (compressionMethod) {\n case constants_1.CompressionMethod.Zstd:\n return ['--use-compress-program', 'zstd -T0 --long=30'];\n case constants_1.CompressionMethod.ZstdWithoutLong:\n return ['--use-compress-program', 'zstd -T0'];\n default:\n return ['-z'];\n }\n }\n const args = [\n '--posix',\n ...getCompressionProgram(),\n '-cf',\n cacheFileName.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '-P',\n '-C',\n workingDirectory.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '--files-from',\n manifestFilename\n ];\n yield execTar(args, compressionMethod, archiveFolder);\n });\n}\nexports.createTar = createTar;\nfunction listTar(archivePath, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n // --d: Decompress.\n // --long=#: Enables long distance matching with # bits.\n // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.\n // Using 30 here because we also support 32-bit self-hosted runners.\n function getCompressionProgram() {\n switch (compressionMethod) {\n case constants_1.CompressionMethod.Zstd:\n return ['--use-compress-program', 'zstd -d --long=30'];\n case constants_1.CompressionMethod.ZstdWithoutLong:\n return ['--use-compress-program', 'zstd -d'];\n default:\n return ['-z'];\n }\n }\n const args = [\n ...getCompressionProgram(),\n '-tf',\n archivePath.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '-P'\n ];\n yield execTar(args, compressionMethod);\n });\n}\nexports.listTar = listTar;\n//# sourceMappingURL=tar.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Returns a copy of the upload options with defaults filled in.\n *\n * @param copy the original upload options\n */\nfunction getUploadOptions(copy) {\n const result = {\n uploadConcurrency: 4,\n uploadChunkSize: 32 * 1024 * 1024\n };\n if (copy) {\n if (typeof copy.uploadConcurrency === 'number') {\n result.uploadConcurrency = copy.uploadConcurrency;\n }\n if (typeof copy.uploadChunkSize === 'number') {\n result.uploadChunkSize = copy.uploadChunkSize;\n }\n }\n core.debug(`Upload concurrency: ${result.uploadConcurrency}`);\n core.debug(`Upload chunk size: ${result.uploadChunkSize}`);\n return result;\n}\nexports.getUploadOptions = getUploadOptions;\n/**\n * Returns a copy of the download options with defaults filled in.\n *\n * @param copy the original download options\n */\nfunction getDownloadOptions(copy) {\n const result = {\n useAzureSdk: true,\n downloadConcurrency: 8,\n timeoutInMs: 30000\n };\n if (copy) {\n if (typeof copy.useAzureSdk === 'boolean') {\n result.useAzureSdk = copy.useAzureSdk;\n }\n if (typeof copy.downloadConcurrency === 'number') {\n result.downloadConcurrency = copy.downloadConcurrency;\n }\n if (typeof copy.timeoutInMs === 'number') {\n result.timeoutInMs = copy.timeoutInMs;\n }\n }\n core.debug(`Use Azure SDK: ${result.useAzureSdk}`);\n core.debug(`Download concurrency: ${result.downloadConcurrency}`);\n core.debug(`Request timeout (ms): ${result.timeoutInMs}`);\n return result;\n}\nexports.getDownloadOptions = getDownloadOptions;\n//# sourceMappingURL=options.js.map","exports = module.exports = SemVer\n\nvar debug\n/* istanbul ignore next */\nif (typeof process === 'object' &&\n process.env &&\n process.env.NODE_DEBUG &&\n /\\bsemver\\b/i.test(process.env.NODE_DEBUG)) {\n debug = function () {\n var args = Array.prototype.slice.call(arguments, 0)\n args.unshift('SEMVER')\n console.log.apply(console, args)\n }\n} else {\n debug = function () {}\n}\n\n// Note: this is the semver.org version of the spec that it implements\n// Not necessarily the package version of this code.\nexports.SEMVER_SPEC_VERSION = '2.0.0'\n\nvar MAX_LENGTH = 256\nvar MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||\n /* istanbul ignore next */ 9007199254740991\n\n// Max safe segment length for coercion.\nvar MAX_SAFE_COMPONENT_LENGTH = 16\n\n// The actual regexps go on exports.re\nvar re = exports.re = []\nvar src = exports.src = []\nvar t = exports.tokens = {}\nvar R = 0\n\nfunction tok (n) {\n t[n] = R++\n}\n\n// The following Regular Expressions can be used for tokenizing,\n// validating, and parsing SemVer version strings.\n\n// ## Numeric Identifier\n// A single `0`, or a non-zero digit followed by zero or more digits.\n\ntok('NUMERICIDENTIFIER')\nsrc[t.NUMERICIDENTIFIER] = '0|[1-9]\\\\d*'\ntok('NUMERICIDENTIFIERLOOSE')\nsrc[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'\n\n// ## Non-numeric Identifier\n// Zero or more digits, followed by a letter or hyphen, and then zero or\n// more letters, digits, or hyphens.\n\ntok('NONNUMERICIDENTIFIER')\nsrc[t.NONNUMERICIDENTIFIER] = '\\\\d*[a-zA-Z-][a-zA-Z0-9-]*'\n\n// ## Main Version\n// Three dot-separated numeric identifiers.\n\ntok('MAINVERSION')\nsrc[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')'\n\ntok('MAINVERSIONLOOSE')\nsrc[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'\n\n// ## Pre-release Version Identifier\n// A numeric identifier, or a non-numeric identifier.\n\ntok('PRERELEASEIDENTIFIER')\nsrc[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\ntok('PRERELEASEIDENTIFIERLOOSE')\nsrc[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\n// ## Pre-release Version\n// Hyphen, followed by one or more dot-separated pre-release version\n// identifiers.\n\ntok('PRERELEASE')\nsrc[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'\n\ntok('PRERELEASELOOSE')\nsrc[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'\n\n// ## Build Metadata Identifier\n// Any combination of digits, letters, or hyphens.\n\ntok('BUILDIDENTIFIER')\nsrc[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'\n\n// ## Build Metadata\n// Plus sign, followed by one or more period-separated build metadata\n// identifiers.\n\ntok('BUILD')\nsrc[t.BUILD] = '(?:\\\\+(' + src[t.BUILDIDENTIFIER] +\n '(?:\\\\.' + src[t.BUILDIDENTIFIER] + ')*))'\n\n// ## Full Version String\n// A main version, followed optionally by a pre-release version and\n// build metadata.\n\n// Note that the only major, minor, patch, and pre-release sections of\n// the version string are capturing groups. The build metadata is not a\n// capturing group, because it should not ever be used in version\n// comparison.\n\ntok('FULL')\ntok('FULLPLAIN')\nsrc[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +\n src[t.PRERELEASE] + '?' +\n src[t.BUILD] + '?'\n\nsrc[t.FULL] = '^' + src[t.FULLPLAIN] + '$'\n\n// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.\n// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty\n// common in the npm registry.\ntok('LOOSEPLAIN')\nsrc[t.LOOSEPLAIN] = '[v=\\\\s]*' + src[t.MAINVERSIONLOOSE] +\n src[t.PRERELEASELOOSE] + '?' +\n src[t.BUILD] + '?'\n\ntok('LOOSE')\nsrc[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'\n\ntok('GTLT')\nsrc[t.GTLT] = '((?:<|>)?=?)'\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\ntok('XRANGEIDENTIFIERLOOSE')\nsrc[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\\\*'\ntok('XRANGEIDENTIFIER')\nsrc[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\\\*'\n\ntok('XRANGEPLAIN')\nsrc[t.XRANGEPLAIN] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:' + src[t.PRERELEASE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGEPLAINLOOSE')\nsrc[t.XRANGEPLAINLOOSE] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:' + src[t.PRERELEASELOOSE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGE')\nsrc[t.XRANGE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAIN] + '$'\ntok('XRANGELOOSE')\nsrc[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Coercion.\n// Extract anything that could conceivably be a part of a valid semver\ntok('COERCE')\nsrc[t.COERCE] = '(^|[^\\\\d])' +\n '(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:$|[^\\\\d])'\ntok('COERCERTL')\nre[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\ntok('LONETILDE')\nsrc[t.LONETILDE] = '(?:~>?)'\n\ntok('TILDETRIM')\nsrc[t.TILDETRIM] = '(\\\\s*)' + src[t.LONETILDE] + '\\\\s+'\nre[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')\nvar tildeTrimReplace = '$1~'\n\ntok('TILDE')\nsrc[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'\ntok('TILDELOOSE')\nsrc[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\ntok('LONECARET')\nsrc[t.LONECARET] = '(?:\\\\^)'\n\ntok('CARETTRIM')\nsrc[t.CARETTRIM] = '(\\\\s*)' + src[t.LONECARET] + '\\\\s+'\nre[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')\nvar caretTrimReplace = '$1^'\n\ntok('CARET')\nsrc[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'\ntok('CARETLOOSE')\nsrc[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\ntok('COMPARATORLOOSE')\nsrc[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'\ntok('COMPARATOR')\nsrc[t.COMPARATOR] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.FULLPLAIN] + ')$|^$'\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\ntok('COMPARATORTRIM')\nsrc[t.COMPARATORTRIM] = '(\\\\s*)' + src[t.GTLT] +\n '\\\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'\n\n// this one has to use the /g flag\nre[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')\nvar comparatorTrimReplace = '$1$2$3'\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\ntok('HYPHENRANGE')\nsrc[t.HYPHENRANGE] = '^\\\\s*(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s*$'\n\ntok('HYPHENRANGELOOSE')\nsrc[t.HYPHENRANGELOOSE] = '^\\\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s*$'\n\n// Star ranges basically just allow anything at all.\ntok('STAR')\nsrc[t.STAR] = '(<|>)?=?\\\\s*\\\\*'\n\n// Compile to actual regexp objects.\n// All are flag-free, unless they were created above with a flag.\nfor (var i = 0; i < R; i++) {\n debug(i, src[i])\n if (!re[i]) {\n re[i] = new RegExp(src[i])\n }\n}\n\nexports.parse = parse\nfunction parse (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n if (version.length > MAX_LENGTH) {\n return null\n }\n\n var r = options.loose ? re[t.LOOSE] : re[t.FULL]\n if (!r.test(version)) {\n return null\n }\n\n try {\n return new SemVer(version, options)\n } catch (er) {\n return null\n }\n}\n\nexports.valid = valid\nfunction valid (version, options) {\n var v = parse(version, options)\n return v ? v.version : null\n}\n\nexports.clean = clean\nfunction clean (version, options) {\n var s = parse(version.trim().replace(/^[=v]+/, ''), options)\n return s ? s.version : null\n}\n\nexports.SemVer = SemVer\n\nfunction SemVer (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n if (version instanceof SemVer) {\n if (version.loose === options.loose) {\n return version\n } else {\n version = version.version\n }\n } else if (typeof version !== 'string') {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n if (version.length > MAX_LENGTH) {\n throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')\n }\n\n if (!(this instanceof SemVer)) {\n return new SemVer(version, options)\n }\n\n debug('SemVer', version, options)\n this.options = options\n this.loose = !!options.loose\n\n var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])\n\n if (!m) {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n this.raw = version\n\n // these are actually numbers\n this.major = +m[1]\n this.minor = +m[2]\n this.patch = +m[3]\n\n if (this.major > MAX_SAFE_INTEGER || this.major < 0) {\n throw new TypeError('Invalid major version')\n }\n\n if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {\n throw new TypeError('Invalid minor version')\n }\n\n if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {\n throw new TypeError('Invalid patch version')\n }\n\n // numberify any prerelease numeric ids\n if (!m[4]) {\n this.prerelease = []\n } else {\n this.prerelease = m[4].split('.').map(function (id) {\n if (/^[0-9]+$/.test(id)) {\n var num = +id\n if (num >= 0 && num < MAX_SAFE_INTEGER) {\n return num\n }\n }\n return id\n })\n }\n\n this.build = m[5] ? m[5].split('.') : []\n this.format()\n}\n\nSemVer.prototype.format = function () {\n this.version = this.major + '.' + this.minor + '.' + this.patch\n if (this.prerelease.length) {\n this.version += '-' + this.prerelease.join('.')\n }\n return this.version\n}\n\nSemVer.prototype.toString = function () {\n return this.version\n}\n\nSemVer.prototype.compare = function (other) {\n debug('SemVer.compare', this.version, this.options, other)\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return this.compareMain(other) || this.comparePre(other)\n}\n\nSemVer.prototype.compareMain = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch)\n}\n\nSemVer.prototype.comparePre = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length) {\n return -1\n } else if (!this.prerelease.length && other.prerelease.length) {\n return 1\n } else if (!this.prerelease.length && !other.prerelease.length) {\n return 0\n }\n\n var i = 0\n do {\n var a = this.prerelease[i]\n var b = other.prerelease[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\nSemVer.prototype.compareBuild = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n var i = 0\n do {\n var a = this.build[i]\n var b = other.build[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\n// preminor will bump the version up to the next minor release, and immediately\n// down to pre-release. premajor and prepatch work the same way.\nSemVer.prototype.inc = function (release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor = 0\n this.major++\n this.inc('pre', identifier)\n break\n case 'preminor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor++\n this.inc('pre', identifier)\n break\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0\n this.inc('patch', identifier)\n this.inc('pre', identifier)\n break\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0) {\n this.inc('patch', identifier)\n }\n this.inc('pre', identifier)\n break\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (this.minor !== 0 ||\n this.patch !== 0 ||\n this.prerelease.length === 0) {\n this.major++\n }\n this.minor = 0\n this.patch = 0\n this.prerelease = []\n break\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0) {\n this.minor++\n }\n this.patch = 0\n this.prerelease = []\n break\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0) {\n this.patch++\n }\n this.prerelease = []\n break\n // This probably shouldn't be used publicly.\n // 1.0.0 \"pre\" would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0) {\n this.prerelease = [0]\n } else {\n var i = this.prerelease.length\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++\n i = -2\n }\n }\n if (i === -1) {\n // didn't increment anything\n this.prerelease.push(0)\n }\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (this.prerelease[0] === identifier) {\n if (isNaN(this.prerelease[1])) {\n this.prerelease = [identifier, 0]\n }\n } else {\n this.prerelease = [identifier, 0]\n }\n }\n break\n\n default:\n throw new Error('invalid increment argument: ' + release)\n }\n this.format()\n this.raw = this.version\n return this\n}\n\nexports.inc = inc\nfunction inc (version, release, loose, identifier) {\n if (typeof (loose) === 'string') {\n identifier = loose\n loose = undefined\n }\n\n try {\n return new SemVer(version, loose).inc(release, identifier).version\n } catch (er) {\n return null\n }\n}\n\nexports.diff = diff\nfunction diff (version1, version2) {\n if (eq(version1, version2)) {\n return null\n } else {\n var v1 = parse(version1)\n var v2 = parse(version2)\n var prefix = ''\n if (v1.prerelease.length || v2.prerelease.length) {\n prefix = 'pre'\n var defaultResult = 'prerelease'\n }\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return prefix + key\n }\n }\n }\n return defaultResult // may be undefined\n }\n}\n\nexports.compareIdentifiers = compareIdentifiers\n\nvar numeric = /^[0-9]+$/\nfunction compareIdentifiers (a, b) {\n var anum = numeric.test(a)\n var bnum = numeric.test(b)\n\n if (anum && bnum) {\n a = +a\n b = +b\n }\n\n return a === b ? 0\n : (anum && !bnum) ? -1\n : (bnum && !anum) ? 1\n : a < b ? -1\n : 1\n}\n\nexports.rcompareIdentifiers = rcompareIdentifiers\nfunction rcompareIdentifiers (a, b) {\n return compareIdentifiers(b, a)\n}\n\nexports.major = major\nfunction major (a, loose) {\n return new SemVer(a, loose).major\n}\n\nexports.minor = minor\nfunction minor (a, loose) {\n return new SemVer(a, loose).minor\n}\n\nexports.patch = patch\nfunction patch (a, loose) {\n return new SemVer(a, loose).patch\n}\n\nexports.compare = compare\nfunction compare (a, b, loose) {\n return new SemVer(a, loose).compare(new SemVer(b, loose))\n}\n\nexports.compareLoose = compareLoose\nfunction compareLoose (a, b) {\n return compare(a, b, true)\n}\n\nexports.compareBuild = compareBuild\nfunction compareBuild (a, b, loose) {\n var versionA = new SemVer(a, loose)\n var versionB = new SemVer(b, loose)\n return versionA.compare(versionB) || versionA.compareBuild(versionB)\n}\n\nexports.rcompare = rcompare\nfunction rcompare (a, b, loose) {\n return compare(b, a, loose)\n}\n\nexports.sort = sort\nfunction sort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(a, b, loose)\n })\n}\n\nexports.rsort = rsort\nfunction rsort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(b, a, loose)\n })\n}\n\nexports.gt = gt\nfunction gt (a, b, loose) {\n return compare(a, b, loose) > 0\n}\n\nexports.lt = lt\nfunction lt (a, b, loose) {\n return compare(a, b, loose) < 0\n}\n\nexports.eq = eq\nfunction eq (a, b, loose) {\n return compare(a, b, loose) === 0\n}\n\nexports.neq = neq\nfunction neq (a, b, loose) {\n return compare(a, b, loose) !== 0\n}\n\nexports.gte = gte\nfunction gte (a, b, loose) {\n return compare(a, b, loose) >= 0\n}\n\nexports.lte = lte\nfunction lte (a, b, loose) {\n return compare(a, b, loose) <= 0\n}\n\nexports.cmp = cmp\nfunction cmp (a, op, b, loose) {\n switch (op) {\n case '===':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a === b\n\n case '!==':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a !== b\n\n case '':\n case '=':\n case '==':\n return eq(a, b, loose)\n\n case '!=':\n return neq(a, b, loose)\n\n case '>':\n return gt(a, b, loose)\n\n case '>=':\n return gte(a, b, loose)\n\n case '<':\n return lt(a, b, loose)\n\n case '<=':\n return lte(a, b, loose)\n\n default:\n throw new TypeError('Invalid operator: ' + op)\n }\n}\n\nexports.Comparator = Comparator\nfunction Comparator (comp, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (comp instanceof Comparator) {\n if (comp.loose === !!options.loose) {\n return comp\n } else {\n comp = comp.value\n }\n }\n\n if (!(this instanceof Comparator)) {\n return new Comparator(comp, options)\n }\n\n debug('comparator', comp, options)\n this.options = options\n this.loose = !!options.loose\n this.parse(comp)\n\n if (this.semver === ANY) {\n this.value = ''\n } else {\n this.value = this.operator + this.semver.version\n }\n\n debug('comp', this)\n}\n\nvar ANY = {}\nComparator.prototype.parse = function (comp) {\n var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var m = comp.match(r)\n\n if (!m) {\n throw new TypeError('Invalid comparator: ' + comp)\n }\n\n this.operator = m[1] !== undefined ? m[1] : ''\n if (this.operator === '=') {\n this.operator = ''\n }\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2]) {\n this.semver = ANY\n } else {\n this.semver = new SemVer(m[2], this.options.loose)\n }\n}\n\nComparator.prototype.toString = function () {\n return this.value\n}\n\nComparator.prototype.test = function (version) {\n debug('Comparator.test', version, this.options.loose)\n\n if (this.semver === ANY || version === ANY) {\n return true\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n return cmp(version, this.operator, this.semver, this.options)\n}\n\nComparator.prototype.intersects = function (comp, options) {\n if (!(comp instanceof Comparator)) {\n throw new TypeError('a Comparator is required')\n }\n\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n var rangeTmp\n\n if (this.operator === '') {\n if (this.value === '') {\n return true\n }\n rangeTmp = new Range(comp.value, options)\n return satisfies(this.value, rangeTmp, options)\n } else if (comp.operator === '') {\n if (comp.value === '') {\n return true\n }\n rangeTmp = new Range(this.value, options)\n return satisfies(comp.semver, rangeTmp, options)\n }\n\n var sameDirectionIncreasing =\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '>=' || comp.operator === '>')\n var sameDirectionDecreasing =\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '<=' || comp.operator === '<')\n var sameSemVer = this.semver.version === comp.semver.version\n var differentDirectionsInclusive =\n (this.operator === '>=' || this.operator === '<=') &&\n (comp.operator === '>=' || comp.operator === '<=')\n var oppositeDirectionsLessThan =\n cmp(this.semver, '<', comp.semver, options) &&\n ((this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '<=' || comp.operator === '<'))\n var oppositeDirectionsGreaterThan =\n cmp(this.semver, '>', comp.semver, options) &&\n ((this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '>=' || comp.operator === '>'))\n\n return sameDirectionIncreasing || sameDirectionDecreasing ||\n (sameSemVer && differentDirectionsInclusive) ||\n oppositeDirectionsLessThan || oppositeDirectionsGreaterThan\n}\n\nexports.Range = Range\nfunction Range (range, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (range instanceof Range) {\n if (range.loose === !!options.loose &&\n range.includePrerelease === !!options.includePrerelease) {\n return range\n } else {\n return new Range(range.raw, options)\n }\n }\n\n if (range instanceof Comparator) {\n return new Range(range.value, options)\n }\n\n if (!(this instanceof Range)) {\n return new Range(range, options)\n }\n\n this.options = options\n this.loose = !!options.loose\n this.includePrerelease = !!options.includePrerelease\n\n // First, split based on boolean or ||\n this.raw = range\n this.set = range.split(/\\s*\\|\\|\\s*/).map(function (range) {\n return this.parseRange(range.trim())\n }, this).filter(function (c) {\n // throw out any that are not relevant for whatever reason\n return c.length\n })\n\n if (!this.set.length) {\n throw new TypeError('Invalid SemVer Range: ' + range)\n }\n\n this.format()\n}\n\nRange.prototype.format = function () {\n this.range = this.set.map(function (comps) {\n return comps.join(' ').trim()\n }).join('||').trim()\n return this.range\n}\n\nRange.prototype.toString = function () {\n return this.range\n}\n\nRange.prototype.parseRange = function (range) {\n var loose = this.options.loose\n range = range.trim()\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]\n range = range.replace(hr, hyphenReplace)\n debug('hyphen replace', range)\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)\n debug('comparator trim', range, re[t.COMPARATORTRIM])\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[t.TILDETRIM], tildeTrimReplace)\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[t.CARETTRIM], caretTrimReplace)\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ')\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var set = range.split(' ').map(function (comp) {\n return parseComparator(comp, this.options)\n }, this).join(' ').split(/\\s+/)\n if (this.options.loose) {\n // in loose mode, throw out any that are not valid comparators\n set = set.filter(function (comp) {\n return !!comp.match(compRe)\n })\n }\n set = set.map(function (comp) {\n return new Comparator(comp, this.options)\n }, this)\n\n return set\n}\n\nRange.prototype.intersects = function (range, options) {\n if (!(range instanceof Range)) {\n throw new TypeError('a Range is required')\n }\n\n return this.set.some(function (thisComparators) {\n return (\n isSatisfiable(thisComparators, options) &&\n range.set.some(function (rangeComparators) {\n return (\n isSatisfiable(rangeComparators, options) &&\n thisComparators.every(function (thisComparator) {\n return rangeComparators.every(function (rangeComparator) {\n return thisComparator.intersects(rangeComparator, options)\n })\n })\n )\n })\n )\n })\n}\n\n// take a set of comparators and determine whether there\n// exists a version which can satisfy it\nfunction isSatisfiable (comparators, options) {\n var result = true\n var remainingComparators = comparators.slice()\n var testComparator = remainingComparators.pop()\n\n while (result && remainingComparators.length) {\n result = remainingComparators.every(function (otherComparator) {\n return testComparator.intersects(otherComparator, options)\n })\n\n testComparator = remainingComparators.pop()\n }\n\n return result\n}\n\n// Mostly just for testing and legacy API reasons\nexports.toComparators = toComparators\nfunction toComparators (range, options) {\n return new Range(range, options).set.map(function (comp) {\n return comp.map(function (c) {\n return c.value\n }).join(' ').trim().split(' ')\n })\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nfunction parseComparator (comp, options) {\n debug('comp', comp, options)\n comp = replaceCarets(comp, options)\n debug('caret', comp)\n comp = replaceTildes(comp, options)\n debug('tildes', comp)\n comp = replaceXRanges(comp, options)\n debug('xrange', comp)\n comp = replaceStars(comp, options)\n debug('stars', comp)\n return comp\n}\n\nfunction isX (id) {\n return !id || id.toLowerCase() === 'x' || id === '*'\n}\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0\nfunction replaceTildes (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceTilde(comp, options)\n }).join(' ')\n}\n\nfunction replaceTilde (comp, options) {\n var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('tilde', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n // ~1.2 == >=1.2.0 <1.3.0\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else if (pr) {\n debug('replaceTilde pr', pr)\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n } else {\n // ~1.2.3 == >=1.2.3 <1.3.0\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n\n debug('tilde return', ret)\n return ret\n })\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0\n// ^1.2.3 --> >=1.2.3 <2.0.0\n// ^1.2.0 --> >=1.2.0 <2.0.0\nfunction replaceCarets (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceCaret(comp, options)\n }).join(' ')\n}\n\nfunction replaceCaret (comp, options) {\n debug('caret', comp, options)\n var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('caret', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n if (M === '0') {\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else {\n ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'\n }\n } else if (pr) {\n debug('replaceCaret pr', pr)\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + (+M + 1) + '.0.0'\n }\n } else {\n debug('no pr')\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + (+M + 1) + '.0.0'\n }\n }\n\n debug('caret return', ret)\n return ret\n })\n}\n\nfunction replaceXRanges (comp, options) {\n debug('replaceXRanges', comp, options)\n return comp.split(/\\s+/).map(function (comp) {\n return replaceXRange(comp, options)\n }).join(' ')\n}\n\nfunction replaceXRange (comp, options) {\n comp = comp.trim()\n var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]\n return comp.replace(r, function (ret, gtlt, M, m, p, pr) {\n debug('xRange', comp, ret, gtlt, M, m, p, pr)\n var xM = isX(M)\n var xm = xM || isX(m)\n var xp = xm || isX(p)\n var anyX = xp\n\n if (gtlt === '=' && anyX) {\n gtlt = ''\n }\n\n // if we're including prereleases in the match, then we need\n // to fix this to -0, the lowest possible prerelease value\n pr = options.includePrerelease ? '-0' : ''\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0-0'\n } else {\n // nothing is forbidden\n ret = '*'\n }\n } else if (gtlt && anyX) {\n // we know patch is an x, because we have any x at all.\n // replace X with 0\n if (xm) {\n m = 0\n }\n p = 0\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n // >1.2.3 => >= 1.2.4\n gtlt = '>='\n if (xm) {\n M = +M + 1\n m = 0\n p = 0\n } else {\n m = +m + 1\n p = 0\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm) {\n M = +M + 1\n } else {\n m = +m + 1\n }\n }\n\n ret = gtlt + M + '.' + m + '.' + p + pr\n } else if (xm) {\n ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr\n } else if (xp) {\n ret = '>=' + M + '.' + m + '.0' + pr +\n ' <' + M + '.' + (+m + 1) + '.0' + pr\n }\n\n debug('xRange return', ret)\n\n return ret\n })\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nfunction replaceStars (comp, options) {\n debug('replaceStars', comp, options)\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[t.STAR], '')\n}\n\n// This function is passed to string.replace(re[t.HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0\nfunction hyphenReplace ($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) {\n if (isX(fM)) {\n from = ''\n } else if (isX(fm)) {\n from = '>=' + fM + '.0.0'\n } else if (isX(fp)) {\n from = '>=' + fM + '.' + fm + '.0'\n } else {\n from = '>=' + from\n }\n\n if (isX(tM)) {\n to = ''\n } else if (isX(tm)) {\n to = '<' + (+tM + 1) + '.0.0'\n } else if (isX(tp)) {\n to = '<' + tM + '.' + (+tm + 1) + '.0'\n } else if (tpr) {\n to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr\n } else {\n to = '<=' + to\n }\n\n return (from + ' ' + to).trim()\n}\n\n// if ANY of the sets match ALL of its comparators, then pass\nRange.prototype.test = function (version) {\n if (!version) {\n return false\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n for (var i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version, this.options)) {\n return true\n }\n }\n return false\n}\n\nfunction testSet (set, version, options) {\n for (var i = 0; i < set.length; i++) {\n if (!set[i].test(version)) {\n return false\n }\n }\n\n if (version.prerelease.length && !options.includePrerelease) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (i = 0; i < set.length; i++) {\n debug(set[i].semver)\n if (set[i].semver === ANY) {\n continue\n }\n\n if (set[i].semver.prerelease.length > 0) {\n var allowed = set[i].semver\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch) {\n return true\n }\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false\n }\n\n return true\n}\n\nexports.satisfies = satisfies\nfunction satisfies (version, range, options) {\n try {\n range = new Range(range, options)\n } catch (er) {\n return false\n }\n return range.test(version)\n}\n\nexports.maxSatisfying = maxSatisfying\nfunction maxSatisfying (versions, range, options) {\n var max = null\n var maxSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!max || maxSV.compare(v) === -1) {\n // compare(max, v, true)\n max = v\n maxSV = new SemVer(max, options)\n }\n }\n })\n return max\n}\n\nexports.minSatisfying = minSatisfying\nfunction minSatisfying (versions, range, options) {\n var min = null\n var minSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!min || minSV.compare(v) === 1) {\n // compare(min, v, true)\n min = v\n minSV = new SemVer(min, options)\n }\n }\n })\n return min\n}\n\nexports.minVersion = minVersion\nfunction minVersion (range, loose) {\n range = new Range(range, loose)\n\n var minver = new SemVer('0.0.0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = new SemVer('0.0.0-0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = null\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n comparators.forEach(function (comparator) {\n // Clone to avoid manipulating the comparator's semver object.\n var compver = new SemVer(comparator.semver.version)\n switch (comparator.operator) {\n case '>':\n if (compver.prerelease.length === 0) {\n compver.patch++\n } else {\n compver.prerelease.push(0)\n }\n compver.raw = compver.format()\n /* fallthrough */\n case '':\n case '>=':\n if (!minver || gt(minver, compver)) {\n minver = compver\n }\n break\n case '<':\n case '<=':\n /* Ignore maximum versions */\n break\n /* istanbul ignore next */\n default:\n throw new Error('Unexpected operation: ' + comparator.operator)\n }\n })\n }\n\n if (minver && range.test(minver)) {\n return minver\n }\n\n return null\n}\n\nexports.validRange = validRange\nfunction validRange (range, options) {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, options).range || '*'\n } catch (er) {\n return null\n }\n}\n\n// Determine if version is less than all the versions possible in the range\nexports.ltr = ltr\nfunction ltr (version, range, options) {\n return outside(version, range, '<', options)\n}\n\n// Determine if version is greater than all the versions possible in the range.\nexports.gtr = gtr\nfunction gtr (version, range, options) {\n return outside(version, range, '>', options)\n}\n\nexports.outside = outside\nfunction outside (version, range, hilo, options) {\n version = new SemVer(version, options)\n range = new Range(range, options)\n\n var gtfn, ltefn, ltfn, comp, ecomp\n switch (hilo) {\n case '>':\n gtfn = gt\n ltefn = lte\n ltfn = lt\n comp = '>'\n ecomp = '>='\n break\n case '<':\n gtfn = lt\n ltefn = gte\n ltfn = gt\n comp = '<'\n ecomp = '<='\n break\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"')\n }\n\n // If it satisifes the range it is not outside\n if (satisfies(version, range, options)) {\n return false\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n var high = null\n var low = null\n\n comparators.forEach(function (comparator) {\n if (comparator.semver === ANY) {\n comparator = new Comparator('>=0.0.0')\n }\n high = high || comparator\n low = low || comparator\n if (gtfn(comparator.semver, high.semver, options)) {\n high = comparator\n } else if (ltfn(comparator.semver, low.semver, options)) {\n low = comparator\n }\n })\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false\n }\n }\n return true\n}\n\nexports.prerelease = prerelease\nfunction prerelease (version, options) {\n var parsed = parse(version, options)\n return (parsed && parsed.prerelease.length) ? parsed.prerelease : null\n}\n\nexports.intersects = intersects\nfunction intersects (r1, r2, options) {\n r1 = new Range(r1, options)\n r2 = new Range(r2, options)\n return r1.intersects(r2)\n}\n\nexports.coerce = coerce\nfunction coerce (version, options) {\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version === 'number') {\n version = String(version)\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n options = options || {}\n\n var match = null\n if (!options.rtl) {\n match = version.match(re[t.COERCE])\n } else {\n // Find the right-most coercible string that does not share\n // a terminus with a more left-ward coercible string.\n // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'\n //\n // Walk through the string checking with a /g regexp\n // Manually set the index so as to pick up overlapping matches.\n // Stop when we get a match that ends at the string end, since no\n // coercible string can be more right-ward without the same terminus.\n var next\n while ((next = re[t.COERCERTL].exec(version)) &&\n (!match || match.index + match[0].length !== version.length)\n ) {\n if (!match ||\n next.index + next[0].length !== match.index + match[0].length) {\n match = next\n }\n re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length\n }\n // leave it in a clean state\n re[t.COERCERTL].lastIndex = -1\n }\n\n if (match === null) {\n return null\n }\n\n return parse(match[2] +\n '.' + (match[3] || '0') +\n '.' + (match[4] || '0'), options)\n}\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst internal_globber_1 = require(\"./internal-globber\");\n/**\n * Constructs a globber\n *\n * @param patterns Patterns separated by newlines\n * @param options Glob options\n */\nfunction create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield internal_globber_1.DefaultGlobber.create(patterns, options);\n });\n}\nexports.create = create;\n//# sourceMappingURL=glob.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Returns a copy with defaults filled in.\n */\nfunction getOptions(copy) {\n const result = {\n followSymbolicLinks: true,\n implicitDescendants: true,\n omitBrokenSymbolicLinks: true\n };\n if (copy) {\n if (typeof copy.followSymbolicLinks === 'boolean') {\n result.followSymbolicLinks = copy.followSymbolicLinks;\n core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);\n }\n if (typeof copy.implicitDescendants === 'boolean') {\n result.implicitDescendants = copy.implicitDescendants;\n core.debug(`implicitDescendants '${result.implicitDescendants}'`);\n }\n if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {\n result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;\n core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);\n }\n }\n return result;\n}\nexports.getOptions = getOptions;\n//# sourceMappingURL=internal-glob-options-helper.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nvar __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }\nvar __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\n function fulfill(value) { resume(\"next\", value); }\n function reject(value) { resume(\"throw\", value); }\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst fs = __importStar(require(\"fs\"));\nconst globOptionsHelper = __importStar(require(\"./internal-glob-options-helper\"));\nconst path = __importStar(require(\"path\"));\nconst patternHelper = __importStar(require(\"./internal-pattern-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_pattern_1 = require(\"./internal-pattern\");\nconst internal_search_state_1 = require(\"./internal-search-state\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass DefaultGlobber {\n constructor(options) {\n this.patterns = [];\n this.searchPaths = [];\n this.options = globOptionsHelper.getOptions(options);\n }\n getSearchPaths() {\n // Return a copy\n return this.searchPaths.slice();\n }\n glob() {\n var e_1, _a;\n return __awaiter(this, void 0, void 0, function* () {\n const result = [];\n try {\n for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {\n const itemPath = _c.value;\n result.push(itemPath);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return result;\n });\n }\n globGenerator() {\n return __asyncGenerator(this, arguments, function* globGenerator_1() {\n // Fill in defaults options\n const options = globOptionsHelper.getOptions(this.options);\n // Implicit descendants?\n const patterns = [];\n for (const pattern of this.patterns) {\n patterns.push(pattern);\n if (options.implicitDescendants &&\n (pattern.trailingSeparator ||\n pattern.segments[pattern.segments.length - 1] !== '**')) {\n patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));\n }\n }\n // Push the search paths\n const stack = [];\n for (const searchPath of patternHelper.getSearchPaths(patterns)) {\n core.debug(`Search path '${searchPath}'`);\n // Exists?\n try {\n // Intentionally using lstat. Detection for broken symlink\n // will be performed later (if following symlinks).\n yield __await(fs.promises.lstat(searchPath));\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n continue;\n }\n throw err;\n }\n stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));\n }\n // Search\n const traversalChain = []; // used to detect cycles\n while (stack.length) {\n // Pop\n const item = stack.pop();\n // Match?\n const match = patternHelper.match(patterns, item.path);\n const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);\n if (!match && !partialMatch) {\n continue;\n }\n // Stat\n const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)\n // Broken symlink, or symlink cycle detected, or no longer exists\n );\n // Broken symlink, or symlink cycle detected, or no longer exists\n if (!stats) {\n continue;\n }\n // Directory\n if (stats.isDirectory()) {\n // Matched\n if (match & internal_match_kind_1.MatchKind.Directory) {\n yield yield __await(item.path);\n }\n // Descend?\n else if (!partialMatch) {\n continue;\n }\n // Push the child items in reverse\n const childLevel = item.level + 1;\n const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));\n stack.push(...childItems.reverse());\n }\n // File\n else if (match & internal_match_kind_1.MatchKind.File) {\n yield yield __await(item.path);\n }\n }\n });\n }\n /**\n * Constructs a DefaultGlobber\n */\n static create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const result = new DefaultGlobber(options);\n if (IS_WINDOWS) {\n patterns = patterns.replace(/\\r\\n/g, '\\n');\n patterns = patterns.replace(/\\r/g, '\\n');\n }\n const lines = patterns.split('\\n').map(x => x.trim());\n for (const line of lines) {\n // Empty or comment\n if (!line || line.startsWith('#')) {\n continue;\n }\n // Pattern\n else {\n result.patterns.push(new internal_pattern_1.Pattern(line));\n }\n }\n result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));\n return result;\n });\n }\n static stat(item, options, traversalChain) {\n return __awaiter(this, void 0, void 0, function* () {\n // Note:\n // `stat` returns info about the target of a symlink (or symlink chain)\n // `lstat` returns info about a symlink itself\n let stats;\n if (options.followSymbolicLinks) {\n try {\n // Use `stat` (following symlinks)\n stats = yield fs.promises.stat(item.path);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n if (options.omitBrokenSymbolicLinks) {\n core.debug(`Broken symlink '${item.path}'`);\n return undefined;\n }\n throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);\n }\n throw err;\n }\n }\n else {\n // Use `lstat` (not following symlinks)\n stats = yield fs.promises.lstat(item.path);\n }\n // Note, isDirectory() returns false for the lstat of a symlink\n if (stats.isDirectory() && options.followSymbolicLinks) {\n // Get the realpath\n const realPath = yield fs.promises.realpath(item.path);\n // Fixup the traversal chain to match the item level\n while (traversalChain.length >= item.level) {\n traversalChain.pop();\n }\n // Test for a cycle\n if (traversalChain.some((x) => x === realPath)) {\n core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);\n return undefined;\n }\n // Update the traversal chain\n traversalChain.push(realPath);\n }\n return stats;\n });\n }\n}\nexports.DefaultGlobber = DefaultGlobber;\n//# sourceMappingURL=internal-globber.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Indicates whether a pattern matches a path\n */\nvar MatchKind;\n(function (MatchKind) {\n /** Not matched */\n MatchKind[MatchKind[\"None\"] = 0] = \"None\";\n /** Matched if the path is a directory */\n MatchKind[MatchKind[\"Directory\"] = 1] = \"Directory\";\n /** Matched if the path is a regular file */\n MatchKind[MatchKind[\"File\"] = 2] = \"File\";\n /** Matched */\n MatchKind[MatchKind[\"All\"] = 3] = \"All\";\n})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));\n//# sourceMappingURL=internal-match-kind.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst path = __importStar(require(\"path\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.\n *\n * For example, on Linux/macOS:\n * - `/ => /`\n * - `/hello => /`\n *\n * For example, on Windows:\n * - `C:\\ => C:\\`\n * - `C:\\hello => C:\\`\n * - `C: => C:`\n * - `C:hello => C:`\n * - `\\ => \\`\n * - `\\hello => \\`\n * - `\\\\hello => \\\\hello`\n * - `\\\\hello\\world => \\\\hello\\world`\n */\nfunction dirname(p) {\n // Normalize slashes and trim unnecessary trailing slash\n p = safeTrimTrailingSeparator(p);\n // Windows UNC root, e.g. \\\\hello or \\\\hello\\world\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+(\\\\[^\\\\]+)?$/.test(p)) {\n return p;\n }\n // Get dirname\n let result = path.dirname(p);\n // Trim trailing slash for Windows UNC root, e.g. \\\\hello\\world\\\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+\\\\[^\\\\]+\\\\$/.test(result)) {\n result = safeTrimTrailingSeparator(result);\n }\n return result;\n}\nexports.dirname = dirname;\n/**\n * Roots the path if not already rooted. On Windows, relative roots like `\\`\n * or `C:` are expanded based on the current working directory.\n */\nfunction ensureAbsoluteRoot(root, itemPath) {\n assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);\n assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Already rooted\n if (hasAbsoluteRoot(itemPath)) {\n return itemPath;\n }\n // Windows\n if (IS_WINDOWS) {\n // Check for itemPath like C: or C:foo\n if (itemPath.match(/^[A-Z]:[^\\\\/]|^[A-Z]:$/i)) {\n let cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n // Drive letter matches cwd? Expand to cwd\n if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {\n // Drive only, e.g. C:\n if (itemPath.length === 2) {\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}`;\n }\n // Drive + path, e.g. C:foo\n else {\n if (!cwd.endsWith('\\\\')) {\n cwd += '\\\\';\n }\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}${itemPath.substr(2)}`;\n }\n }\n // Different drive\n else {\n return `${itemPath[0]}:\\\\${itemPath.substr(2)}`;\n }\n }\n // Check for itemPath like \\ or \\foo\n else if (normalizeSeparators(itemPath).match(/^\\\\$|^\\\\[^\\\\]/)) {\n const cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n return `${cwd[0]}:\\\\${itemPath.substr(1)}`;\n }\n }\n assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);\n // Otherwise ensure root ends with a separator\n if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\\\'))) {\n // Intentionally empty\n }\n else {\n // Append separator\n root += path.sep;\n }\n return root + itemPath;\n}\nexports.ensureAbsoluteRoot = ensureAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\\\hello\\share` and `C:\\hello` (and using alternate separator).\n */\nfunction hasAbsoluteRoot(itemPath) {\n assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\\\hello\\share or C:\\hello\n return itemPath.startsWith('\\\\\\\\') || /^[A-Z]:\\\\/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasAbsoluteRoot = hasAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\`, `\\hello`, `\\\\hello\\share`, `C:`, and `C:\\hello` (and using alternate separator).\n */\nfunction hasRoot(itemPath) {\n assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\ or \\hello or \\\\hello\n // E.g. C: or C:\\hello\n return itemPath.startsWith('\\\\') || /^[A-Z]:/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasRoot = hasRoot;\n/**\n * Removes redundant slashes and converts `/` to `\\` on Windows\n */\nfunction normalizeSeparators(p) {\n p = p || '';\n // Windows\n if (IS_WINDOWS) {\n // Convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // Remove redundant slashes\n const isUnc = /^\\\\\\\\+[^\\\\]/.test(p); // e.g. \\\\hello\n return (isUnc ? '\\\\' : '') + p.replace(/\\\\\\\\+/g, '\\\\'); // preserve leading \\\\ for UNC\n }\n // Remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\nexports.normalizeSeparators = normalizeSeparators;\n/**\n * Normalizes the path separators and trims the trailing separator (when safe).\n * For example, `/foo/ => /foo` but `/ => /`\n */\nfunction safeTrimTrailingSeparator(p) {\n // Short-circuit if empty\n if (!p) {\n return '';\n }\n // Normalize separators\n p = normalizeSeparators(p);\n // No trailing slash\n if (!p.endsWith(path.sep)) {\n return p;\n }\n // Check '/' on Linux/macOS and '\\' on Windows\n if (p === path.sep) {\n return p;\n }\n // On Windows check if drive root. E.g. C:\\\n if (IS_WINDOWS && /^[A-Z]:\\\\$/i.test(p)) {\n return p;\n }\n // Otherwise trim trailing slash\n return p.substr(0, p.length - 1);\n}\nexports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;\n//# sourceMappingURL=internal-path-helper.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Helper class for parsing paths into segments\n */\nclass Path {\n /**\n * Constructs a Path\n * @param itemPath Path or array of segments\n */\n constructor(itemPath) {\n this.segments = [];\n // String\n if (typeof itemPath === 'string') {\n assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // Not rooted\n if (!pathHelper.hasRoot(itemPath)) {\n this.segments = itemPath.split(path.sep);\n }\n // Rooted\n else {\n // Add all segments, while not at the root\n let remaining = itemPath;\n let dir = pathHelper.dirname(remaining);\n while (dir !== remaining) {\n // Add the segment\n const basename = path.basename(remaining);\n this.segments.unshift(basename);\n // Truncate the last segment\n remaining = dir;\n dir = pathHelper.dirname(remaining);\n }\n // Remainder is the root\n this.segments.unshift(remaining);\n }\n }\n // Array\n else {\n // Must not be empty\n assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);\n // Each segment\n for (let i = 0; i < itemPath.length; i++) {\n let segment = itemPath[i];\n // Must not be empty\n assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);\n // Normalize slashes\n segment = pathHelper.normalizeSeparators(itemPath[i]);\n // Root segment\n if (i === 0 && pathHelper.hasRoot(segment)) {\n segment = pathHelper.safeTrimTrailingSeparator(segment);\n assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);\n this.segments.push(segment);\n }\n // All other segments\n else {\n // Must not contain slash\n assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);\n this.segments.push(segment);\n }\n }\n }\n }\n /**\n * Converts the path to it's string representation\n */\n toString() {\n // First segment\n let result = this.segments[0];\n // All others\n let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));\n for (let i = 1; i < this.segments.length; i++) {\n if (skipSlash) {\n skipSlash = false;\n }\n else {\n result += path.sep;\n }\n result += this.segments[i];\n }\n return result;\n }\n}\nexports.Path = Path;\n//# sourceMappingURL=internal-path.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Given an array of patterns, returns an array of paths to search.\n * Duplicates and paths under other included paths are filtered out.\n */\nfunction getSearchPaths(patterns) {\n // Ignore negate patterns\n patterns = patterns.filter(x => !x.negate);\n // Create a map of all search paths\n const searchPathMap = {};\n for (const pattern of patterns) {\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n searchPathMap[key] = 'candidate';\n }\n const result = [];\n for (const pattern of patterns) {\n // Check if already included\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n if (searchPathMap[key] === 'included') {\n continue;\n }\n // Check for an ancestor search path\n let foundAncestor = false;\n let tempKey = key;\n let parent = pathHelper.dirname(tempKey);\n while (parent !== tempKey) {\n if (searchPathMap[parent]) {\n foundAncestor = true;\n break;\n }\n tempKey = parent;\n parent = pathHelper.dirname(tempKey);\n }\n // Include the search pattern in the result\n if (!foundAncestor) {\n result.push(pattern.searchPath);\n searchPathMap[key] = 'included';\n }\n }\n return result;\n}\nexports.getSearchPaths = getSearchPaths;\n/**\n * Matches the patterns against the path\n */\nfunction match(patterns, itemPath) {\n let result = internal_match_kind_1.MatchKind.None;\n for (const pattern of patterns) {\n if (pattern.negate) {\n result &= ~pattern.match(itemPath);\n }\n else {\n result |= pattern.match(itemPath);\n }\n }\n return result;\n}\nexports.match = match;\n/**\n * Checks whether to descend further into the directory\n */\nfunction partialMatch(patterns, itemPath) {\n return patterns.some(x => !x.negate && x.partialMatch(itemPath));\n}\nexports.partialMatch = partialMatch;\n//# sourceMappingURL=internal-pattern-helper.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst minimatch_1 = require(\"minimatch\");\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_path_1 = require(\"./internal-path\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass Pattern {\n constructor(patternOrNegate, segments, homedir) {\n /**\n * Indicates whether matches should be excluded from the result set\n */\n this.negate = false;\n // Pattern overload\n let pattern;\n if (typeof patternOrNegate === 'string') {\n pattern = patternOrNegate.trim();\n }\n // Segments overload\n else {\n // Convert to pattern\n segments = segments || [];\n assert_1.default(segments.length, `Parameter 'segments' must not empty`);\n const root = Pattern.getLiteral(segments[0]);\n assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);\n pattern = new internal_path_1.Path(segments).toString().trim();\n if (patternOrNegate) {\n pattern = `!${pattern}`;\n }\n }\n // Negate\n while (pattern.startsWith('!')) {\n this.negate = !this.negate;\n pattern = pattern.substr(1).trim();\n }\n // Normalize slashes and ensures absolute root\n pattern = Pattern.fixupPattern(pattern, homedir);\n // Segments\n this.segments = new internal_path_1.Path(pattern).segments;\n // Trailing slash indicates the pattern should only match directories, not regular files\n this.trailingSeparator = pathHelper\n .normalizeSeparators(pattern)\n .endsWith(path.sep);\n pattern = pathHelper.safeTrimTrailingSeparator(pattern);\n // Search path (literal path prior to the first glob segment)\n let foundGlob = false;\n const searchSegments = this.segments\n .map(x => Pattern.getLiteral(x))\n .filter(x => !foundGlob && !(foundGlob = x === ''));\n this.searchPath = new internal_path_1.Path(searchSegments).toString();\n // Root RegExp (required when determining partial match)\n this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');\n // Create minimatch\n const minimatchOptions = {\n dot: true,\n nobrace: true,\n nocase: IS_WINDOWS,\n nocomment: true,\n noext: true,\n nonegate: true\n };\n pattern = IS_WINDOWS ? pattern.replace(/\\\\/g, '/') : pattern;\n this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);\n }\n /**\n * Matches the pattern against the specified path\n */\n match(itemPath) {\n // Last segment is globstar?\n if (this.segments[this.segments.length - 1] === '**') {\n // Normalize slashes\n itemPath = pathHelper.normalizeSeparators(itemPath);\n // Append a trailing slash. Otherwise Minimatch will not match the directory immediately\n // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns\n // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.\n if (!itemPath.endsWith(path.sep)) {\n // Note, this is safe because the constructor ensures the pattern has an absolute root.\n // For example, formats like C: and C:foo on Windows are resolved to an absolute root.\n itemPath = `${itemPath}${path.sep}`;\n }\n }\n else {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n }\n // Match\n if (this.minimatch.match(itemPath)) {\n return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;\n }\n return internal_match_kind_1.MatchKind.None;\n }\n /**\n * Indicates whether the pattern may match descendants of the specified path\n */\n partialMatch(itemPath) {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // matchOne does not handle root path correctly\n if (pathHelper.dirname(itemPath) === itemPath) {\n return this.rootRegExp.test(itemPath);\n }\n return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\\\+/ : /\\/+/), this.minimatch.set[0], true);\n }\n /**\n * Escapes glob patterns within a path\n */\n static globEscape(s) {\n return (IS_WINDOWS ? s : s.replace(/\\\\/g, '\\\\\\\\')) // escape '\\' on Linux/macOS\n .replace(/(\\[)(?=[^/]+\\])/g, '[[]') // escape '[' when ']' follows within the path segment\n .replace(/\\?/g, '[?]') // escape '?'\n .replace(/\\*/g, '[*]'); // escape '*'\n }\n /**\n * Normalizes slashes and ensures absolute root\n */\n static fixupPattern(pattern, homedir) {\n // Empty\n assert_1.default(pattern, 'pattern cannot be empty');\n // Must not contain `.` segment, unless first segment\n // Must not contain `..` segment\n const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));\n assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);\n // Must not contain globs in root, e.g. Windows UNC path \\\\foo\\b*r\n assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);\n // Normalize slashes\n pattern = pathHelper.normalizeSeparators(pattern);\n // Replace leading `.` segment\n if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {\n pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);\n }\n // Replace leading `~` segment\n else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {\n homedir = homedir || os.homedir();\n assert_1.default(homedir, 'Unable to determine HOME directory');\n assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);\n pattern = Pattern.globEscape(homedir) + pattern.substr(1);\n }\n // Replace relative drive root, e.g. pattern is C: or C:foo\n else if (IS_WINDOWS &&\n (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\\\]/i))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', pattern.substr(0, 2));\n if (pattern.length > 2 && !root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(2);\n }\n // Replace relative root, e.g. pattern is \\ or \\foo\n else if (IS_WINDOWS && (pattern === '\\\\' || pattern.match(/^\\\\[^\\\\]/))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', '\\\\');\n if (!root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(1);\n }\n // Otherwise ensure absolute root\n else {\n pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);\n }\n return pathHelper.normalizeSeparators(pattern);\n }\n /**\n * Attempts to unescape a pattern segment to create a literal path segment.\n * Otherwise returns empty string.\n */\n static getLiteral(segment) {\n let literal = '';\n for (let i = 0; i < segment.length; i++) {\n const c = segment[i];\n // Escape\n if (c === '\\\\' && !IS_WINDOWS && i + 1 < segment.length) {\n literal += segment[++i];\n continue;\n }\n // Wildcard\n else if (c === '*' || c === '?') {\n return '';\n }\n // Character set\n else if (c === '[' && i + 1 < segment.length) {\n let set = '';\n let closed = -1;\n for (let i2 = i + 1; i2 < segment.length; i2++) {\n const c2 = segment[i2];\n // Escape\n if (c2 === '\\\\' && !IS_WINDOWS && i2 + 1 < segment.length) {\n set += segment[++i2];\n continue;\n }\n // Closed\n else if (c2 === ']') {\n closed = i2;\n break;\n }\n // Otherwise\n else {\n set += c2;\n }\n }\n // Closed?\n if (closed >= 0) {\n // Cannot convert\n if (set.length > 1) {\n return '';\n }\n // Convert to literal\n if (set) {\n literal += set;\n i = closed;\n continue;\n }\n }\n // Otherwise fall thru\n }\n // Append\n literal += c;\n }\n return literal;\n }\n /**\n * Escapes regexp special characters\n * https://javascript.info/regexp-escaping\n */\n static regExpEscape(s) {\n return s.replace(/[[\\\\^$.|?*+()]/g, '\\\\$&');\n }\n}\nexports.Pattern = Pattern;\n//# sourceMappingURL=internal-pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass SearchState {\n constructor(path, level) {\n this.path = path;\n this.level = level;\n }\n}\nexports.SearchState = SearchState;\n//# sourceMappingURL=internal-search-state.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n options.headers['Authorization'] =\n 'Basic ' +\n Buffer.from(this.username + ':' + this.password).toString('base64');\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n options.headers['Authorization'] = 'Bearer ' + this.token;\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n options.headers['Authorization'] =\n 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst semver = __importStar(require(\"semver\"));\nconst core_1 = require(\"@actions/core\");\n// needs to be require for core node modules to be mocked\n/* eslint @typescript-eslint/no-require-imports: 0 */\nconst os = require(\"os\");\nconst cp = require(\"child_process\");\nconst fs = require(\"fs\");\nfunction _findMatch(versionSpec, stable, candidates, archFilter) {\n return __awaiter(this, void 0, void 0, function* () {\n const platFilter = os.platform();\n let result;\n let match;\n let file;\n for (const candidate of candidates) {\n const version = candidate.version;\n core_1.debug(`check ${version} satisfies ${versionSpec}`);\n if (semver.satisfies(version, versionSpec) &&\n (!stable || candidate.stable === stable)) {\n file = candidate.files.find(item => {\n core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);\n let chk = item.arch === archFilter && item.platform === platFilter;\n if (chk && item.platform_version) {\n const osVersion = module.exports._getOsVersion();\n if (osVersion === item.platform_version) {\n chk = true;\n }\n else {\n chk = semver.satisfies(osVersion, item.platform_version);\n }\n }\n return chk;\n });\n if (file) {\n core_1.debug(`matched ${candidate.version}`);\n match = candidate;\n break;\n }\n }\n }\n if (match && file) {\n // clone since we're mutating the file list to be only the file that matches\n result = Object.assign({}, match);\n result.files = [file];\n }\n return result;\n });\n}\nexports._findMatch = _findMatch;\nfunction _getOsVersion() {\n // TODO: add windows and other linux, arm variants\n // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)\n const plat = os.platform();\n let version = '';\n if (plat === 'darwin') {\n version = cp.execSync('sw_vers -productVersion').toString();\n }\n else if (plat === 'linux') {\n // lsb_release process not in some containers, readfile\n // Run cat /etc/lsb-release\n // DISTRIB_ID=Ubuntu\n // DISTRIB_RELEASE=18.04\n // DISTRIB_CODENAME=bionic\n // DISTRIB_DESCRIPTION=\"Ubuntu 18.04.4 LTS\"\n const lsbContents = module.exports._readLinuxVersionFile();\n if (lsbContents) {\n const lines = lsbContents.split('\\n');\n for (const line of lines) {\n const parts = line.split('=');\n if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') {\n version = parts[1].trim();\n break;\n }\n }\n }\n }\n return version;\n}\nexports._getOsVersion = _getOsVersion;\nfunction _readLinuxVersionFile() {\n const lsbFile = '/etc/lsb-release';\n let contents = '';\n if (fs.existsSync(lsbFile)) {\n contents = fs.readFileSync(lsbFile).toString();\n }\n return contents;\n}\nexports._readLinuxVersionFile = _readLinuxVersionFile;\n//# sourceMappingURL=manifest.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Internal class for retries\n */\nclass RetryHelper {\n constructor(maxAttempts, minSeconds, maxSeconds) {\n if (maxAttempts < 1) {\n throw new Error('max attempts should be greater than or equal to 1');\n }\n this.maxAttempts = maxAttempts;\n this.minSeconds = Math.floor(minSeconds);\n this.maxSeconds = Math.floor(maxSeconds);\n if (this.minSeconds > this.maxSeconds) {\n throw new Error('min seconds should be less than or equal to max seconds');\n }\n }\n execute(action, isRetryable) {\n return __awaiter(this, void 0, void 0, function* () {\n let attempt = 1;\n while (attempt < this.maxAttempts) {\n // Try\n try {\n return yield action();\n }\n catch (err) {\n if (isRetryable && !isRetryable(err)) {\n throw err;\n }\n core.info(err.message);\n }\n // Sleep\n const seconds = this.getSleepAmount();\n core.info(`Waiting ${seconds} seconds before trying again`);\n yield this.sleep(seconds);\n attempt++;\n }\n // Last attempt\n return yield action();\n });\n }\n getSleepAmount() {\n return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +\n this.minSeconds);\n }\n sleep(seconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, seconds * 1000));\n });\n }\n}\nexports.RetryHelper = RetryHelper;\n//# sourceMappingURL=retry-helper.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst fs = __importStar(require(\"fs\"));\nconst mm = __importStar(require(\"./manifest\"));\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst httpm = __importStar(require(\"@actions/http-client\"));\nconst semver = __importStar(require(\"semver\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst v4_1 = __importDefault(require(\"uuid/v4\"));\nconst exec_1 = require(\"@actions/exec/lib/exec\");\nconst assert_1 = require(\"assert\");\nconst retry_helper_1 = require(\"./retry-helper\");\nclass HTTPError extends Error {\n constructor(httpStatusCode) {\n super(`Unexpected HTTP response: ${httpStatusCode}`);\n this.httpStatusCode = httpStatusCode;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.HTTPError = HTTPError;\nconst IS_WINDOWS = process.platform === 'win32';\nconst IS_MAC = process.platform === 'darwin';\nconst userAgent = 'actions/tool-cache';\n/**\n * Download a tool from an url and stream it into a file\n *\n * @param url url of tool to download\n * @param dest path to download tool\n * @param auth authorization header\n * @returns path to downloaded tool\n */\nfunction downloadTool(url, dest, auth) {\n return __awaiter(this, void 0, void 0, function* () {\n dest = dest || path.join(_getTempDirectory(), v4_1.default());\n yield io.mkdirP(path.dirname(dest));\n core.debug(`Downloading ${url}`);\n core.debug(`Destination ${dest}`);\n const maxAttempts = 3;\n const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);\n const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);\n const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);\n return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {\n return yield downloadToolAttempt(url, dest || '', auth);\n }), (err) => {\n if (err instanceof HTTPError && err.httpStatusCode) {\n // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests\n if (err.httpStatusCode < 500 &&\n err.httpStatusCode !== 408 &&\n err.httpStatusCode !== 429) {\n return false;\n }\n }\n // Otherwise retry\n return true;\n });\n });\n}\nexports.downloadTool = downloadTool;\nfunction downloadToolAttempt(url, dest, auth) {\n return __awaiter(this, void 0, void 0, function* () {\n if (fs.existsSync(dest)) {\n throw new Error(`Destination file path ${dest} already exists`);\n }\n // Get the response headers\n const http = new httpm.HttpClient(userAgent, [], {\n allowRetries: false\n });\n let headers;\n if (auth) {\n core.debug('set auth');\n headers = {\n authorization: auth\n };\n }\n const response = yield http.get(url, headers);\n if (response.message.statusCode !== 200) {\n const err = new HTTPError(response.message.statusCode);\n core.debug(`Failed to download from \"${url}\". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);\n throw err;\n }\n // Download the response body\n const pipeline = util.promisify(stream.pipeline);\n const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);\n const readStream = responseMessageFactory();\n let succeeded = false;\n try {\n yield pipeline(readStream, fs.createWriteStream(dest));\n core.debug('download complete');\n succeeded = true;\n return dest;\n }\n finally {\n // Error, delete dest before retry\n if (!succeeded) {\n core.debug('download failed');\n try {\n yield io.rmRF(dest);\n }\n catch (err) {\n core.debug(`Failed to delete '${dest}'. ${err.message}`);\n }\n }\n }\n });\n}\n/**\n * Extract a .7z file\n *\n * @param file path to the .7z file\n * @param dest destination directory. Optional.\n * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this\n * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will\n * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is\n * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line\n * interface, it is smaller than the full command line interface, and it does support long paths. At the\n * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.\n * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path\n * to 7zr.exe can be pass to this function.\n * @returns path to the destination directory\n */\nfunction extract7z(file, dest, _7zPath) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n const originalCwd = process.cwd();\n process.chdir(dest);\n if (_7zPath) {\n try {\n const logLevel = core.isDebug() ? '-bb1' : '-bb0';\n const args = [\n 'x',\n logLevel,\n '-bd',\n '-sccUTF-8',\n file\n ];\n const options = {\n silent: true\n };\n yield exec_1.exec(`\"${_7zPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n else {\n const escapedScript = path\n .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')\n .replace(/'/g, \"''\")\n .replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const escapedTarget = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n command\n ];\n const options = {\n silent: true\n };\n try {\n const powershellPath = yield io.which('powershell', true);\n yield exec_1.exec(`\"${powershellPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n return dest;\n });\n}\nexports.extract7z = extract7z;\n/**\n * Extract a compressed tar archive\n *\n * @param file path to the tar\n * @param dest destination directory. Optional.\n * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.\n * @returns path to the destination directory\n */\nfunction extractTar(file, dest, flags = 'xz') {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n // Create dest\n dest = yield _createExtractFolder(dest);\n // Determine whether GNU tar\n core.debug('Checking tar --version');\n let versionOutput = '';\n yield exec_1.exec('tar --version', [], {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n core.debug(versionOutput.trim());\n const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');\n // Initialize args\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n if (core.isDebug() && !flags.includes('v')) {\n args.push('-v');\n }\n let destArg = dest;\n let fileArg = file;\n if (IS_WINDOWS && isGnuTar) {\n args.push('--force-local');\n destArg = dest.replace(/\\\\/g, '/');\n // Technically only the dest needs to have `/` but for aesthetic consistency\n // convert slashes in the file arg too.\n fileArg = file.replace(/\\\\/g, '/');\n }\n if (isGnuTar) {\n // Suppress warnings when using GNU tar to extract archives created by BSD tar\n args.push('--warning=no-unknown-keyword');\n }\n args.push('-C', destArg, '-f', fileArg);\n yield exec_1.exec(`tar`, args);\n return dest;\n });\n}\nexports.extractTar = extractTar;\n/**\n * Extract a xar compatible archive\n *\n * @param file path to the archive\n * @param dest destination directory. Optional.\n * @param flags flags for the xar. Optional.\n * @returns path to the destination directory\n */\nfunction extractXar(file, dest, flags = []) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n args.push('-x', '-C', dest, '-f', file);\n if (core.isDebug()) {\n args.push('-v');\n }\n const xarPath = yield io.which('xar', true);\n yield exec_1.exec(`\"${xarPath}\"`, _unique(args));\n return dest;\n });\n}\nexports.extractXar = extractXar;\n/**\n * Extract a zip\n *\n * @param file path to the zip\n * @param dest destination directory. Optional.\n * @returns path to the destination directory\n */\nfunction extractZip(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n dest = yield _createExtractFolder(dest);\n if (IS_WINDOWS) {\n yield extractZipWin(file, dest);\n }\n else {\n yield extractZipNix(file, dest);\n }\n return dest;\n });\n}\nexports.extractZip = extractZip;\nfunction extractZipWin(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n // build the powershell command\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedDest = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;\n // run powershell\n const powershellPath = yield io.which('powershell', true);\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n command\n ];\n yield exec_1.exec(`\"${powershellPath}\"`, args);\n });\n}\nfunction extractZipNix(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n const unzipPath = yield io.which('unzip', true);\n const args = [file];\n if (!core.isDebug()) {\n args.unshift('-q');\n }\n yield exec_1.exec(`\"${unzipPath}\"`, args, { cwd: dest });\n });\n}\n/**\n * Caches a directory and installs it into the tool cacheDir\n *\n * @param sourceDir the directory to cache into tools\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheDir(sourceDir, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source dir: ${sourceDir}`);\n if (!fs.statSync(sourceDir).isDirectory()) {\n throw new Error('sourceDir is not a directory');\n }\n // Create the tool dir\n const destPath = yield _createToolPath(tool, version, arch);\n // copy each child item. do not move. move can fail on Windows\n // due to anti-virus software having an open handle on a file.\n for (const itemName of fs.readdirSync(sourceDir)) {\n const s = path.join(sourceDir, itemName);\n yield io.cp(s, destPath, { recursive: true });\n }\n // write .complete\n _completeToolPath(tool, version, arch);\n return destPath;\n });\n}\nexports.cacheDir = cacheDir;\n/**\n * Caches a downloaded file (GUID) and installs it\n * into the tool cache with a given targetName\n *\n * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.\n * @param targetFile the name of the file name in the tools directory\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheFile(sourceFile, targetFile, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source file: ${sourceFile}`);\n if (!fs.statSync(sourceFile).isFile()) {\n throw new Error('sourceFile is not a file');\n }\n // create the tool dir\n const destFolder = yield _createToolPath(tool, version, arch);\n // copy instead of move. move can fail on Windows due to\n // anti-virus software having an open handle on a file.\n const destPath = path.join(destFolder, targetFile);\n core.debug(`destination file ${destPath}`);\n yield io.cp(sourceFile, destPath);\n // write .complete\n _completeToolPath(tool, version, arch);\n return destFolder;\n });\n}\nexports.cacheFile = cacheFile;\n/**\n * Finds the path to a tool version in the local installed tool cache\n *\n * @param toolName name of the tool\n * @param versionSpec version of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction find(toolName, versionSpec, arch) {\n if (!toolName) {\n throw new Error('toolName parameter is required');\n }\n if (!versionSpec) {\n throw new Error('versionSpec parameter is required');\n }\n arch = arch || os.arch();\n // attempt to resolve an explicit version\n if (!_isExplicitVersion(versionSpec)) {\n const localVersions = findAllVersions(toolName, arch);\n const match = _evaluateVersions(localVersions, versionSpec);\n versionSpec = match;\n }\n // check for the explicit version in the cache\n let toolPath = '';\n if (versionSpec) {\n versionSpec = semver.clean(versionSpec) || '';\n const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);\n core.debug(`checking cache: ${cachePath}`);\n if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {\n core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);\n toolPath = cachePath;\n }\n else {\n core.debug('not found');\n }\n }\n return toolPath;\n}\nexports.find = find;\n/**\n * Finds the paths to all versions of a tool that are installed in the local tool cache\n *\n * @param toolName name of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction findAllVersions(toolName, arch) {\n const versions = [];\n arch = arch || os.arch();\n const toolPath = path.join(_getCacheDirectory(), toolName);\n if (fs.existsSync(toolPath)) {\n const children = fs.readdirSync(toolPath);\n for (const child of children) {\n if (_isExplicitVersion(child)) {\n const fullPath = path.join(toolPath, child, arch || '');\n if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {\n versions.push(child);\n }\n }\n }\n }\n return versions;\n}\nexports.findAllVersions = findAllVersions;\nfunction getManifestFromRepo(owner, repo, auth, branch = 'master') {\n return __awaiter(this, void 0, void 0, function* () {\n let releases = [];\n const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;\n const http = new httpm.HttpClient('tool-cache');\n const headers = {};\n if (auth) {\n core.debug('set auth');\n headers.authorization = auth;\n }\n const response = yield http.getJson(treeUrl, headers);\n if (!response.result) {\n return releases;\n }\n let manifestUrl = '';\n for (const item of response.result.tree) {\n if (item.path === 'versions-manifest.json') {\n manifestUrl = item.url;\n break;\n }\n }\n headers['accept'] = 'application/vnd.github.VERSION.raw';\n let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();\n if (versionsRaw) {\n // shouldn't be needed but protects against invalid json saved with BOM\n versionsRaw = versionsRaw.replace(/^\\uFEFF/, '');\n try {\n releases = JSON.parse(versionsRaw);\n }\n catch (_a) {\n core.debug('Invalid json');\n }\n }\n return releases;\n });\n}\nexports.getManifestFromRepo = getManifestFromRepo;\nfunction findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {\n return __awaiter(this, void 0, void 0, function* () {\n // wrap the internal impl\n const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);\n return match;\n });\n}\nexports.findFromManifest = findFromManifest;\nfunction _createExtractFolder(dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!dest) {\n // create a temp dir\n dest = path.join(_getTempDirectory(), v4_1.default());\n }\n yield io.mkdirP(dest);\n return dest;\n });\n}\nfunction _createToolPath(tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n core.debug(`destination ${folderPath}`);\n const markerPath = `${folderPath}.complete`;\n yield io.rmRF(folderPath);\n yield io.rmRF(markerPath);\n yield io.mkdirP(folderPath);\n return folderPath;\n });\n}\nfunction _completeToolPath(tool, version, arch) {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n const markerPath = `${folderPath}.complete`;\n fs.writeFileSync(markerPath, '');\n core.debug('finished caching tool');\n}\nfunction _isExplicitVersion(versionSpec) {\n const c = semver.clean(versionSpec) || '';\n core.debug(`isExplicit: ${c}`);\n const valid = semver.valid(c) != null;\n core.debug(`explicit? ${valid}`);\n return valid;\n}\nfunction _evaluateVersions(versions, versionSpec) {\n let version = '';\n core.debug(`evaluating ${versions.length} versions`);\n versions = versions.sort((a, b) => {\n if (semver.gt(a, b)) {\n return 1;\n }\n return -1;\n });\n for (let i = versions.length - 1; i >= 0; i--) {\n const potential = versions[i];\n const satisfied = semver.satisfies(potential, versionSpec);\n if (satisfied) {\n version = potential;\n break;\n }\n }\n if (version) {\n core.debug(`matched: ${version}`);\n }\n else {\n core.debug('match not found');\n }\n return version;\n}\n/**\n * Gets RUNNER_TOOL_CACHE\n */\nfunction _getCacheDirectory() {\n const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';\n assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');\n return cacheDirectory;\n}\n/**\n * Gets RUNNER_TEMP\n */\nfunction _getTempDirectory() {\n const tempDirectory = process.env['RUNNER_TEMP'] || '';\n assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');\n return tempDirectory;\n}\n/**\n * Gets a global variable\n */\nfunction _getGlobal(key, defaultValue) {\n /* eslint-disable @typescript-eslint/no-explicit-any */\n const value = global[key];\n /* eslint-enable @typescript-eslint/no-explicit-any */\n return value !== undefined ? value : defaultValue;\n}\n/**\n * Returns an array of unique values.\n * @param values Values to make unique.\n */\nfunction _unique(values) {\n return Array.from(new Set(values));\n}\n//# sourceMappingURL=tool-cache.js.map","exports = module.exports = SemVer\n\nvar debug\n/* istanbul ignore next */\nif (typeof process === 'object' &&\n process.env &&\n process.env.NODE_DEBUG &&\n /\\bsemver\\b/i.test(process.env.NODE_DEBUG)) {\n debug = function () {\n var args = Array.prototype.slice.call(arguments, 0)\n args.unshift('SEMVER')\n console.log.apply(console, args)\n }\n} else {\n debug = function () {}\n}\n\n// Note: this is the semver.org version of the spec that it implements\n// Not necessarily the package version of this code.\nexports.SEMVER_SPEC_VERSION = '2.0.0'\n\nvar MAX_LENGTH = 256\nvar MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||\n /* istanbul ignore next */ 9007199254740991\n\n// Max safe segment length for coercion.\nvar MAX_SAFE_COMPONENT_LENGTH = 16\n\n// The actual regexps go on exports.re\nvar re = exports.re = []\nvar src = exports.src = []\nvar t = exports.tokens = {}\nvar R = 0\n\nfunction tok (n) {\n t[n] = R++\n}\n\n// The following Regular Expressions can be used for tokenizing,\n// validating, and parsing SemVer version strings.\n\n// ## Numeric Identifier\n// A single `0`, or a non-zero digit followed by zero or more digits.\n\ntok('NUMERICIDENTIFIER')\nsrc[t.NUMERICIDENTIFIER] = '0|[1-9]\\\\d*'\ntok('NUMERICIDENTIFIERLOOSE')\nsrc[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'\n\n// ## Non-numeric Identifier\n// Zero or more digits, followed by a letter or hyphen, and then zero or\n// more letters, digits, or hyphens.\n\ntok('NONNUMERICIDENTIFIER')\nsrc[t.NONNUMERICIDENTIFIER] = '\\\\d*[a-zA-Z-][a-zA-Z0-9-]*'\n\n// ## Main Version\n// Three dot-separated numeric identifiers.\n\ntok('MAINVERSION')\nsrc[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')'\n\ntok('MAINVERSIONLOOSE')\nsrc[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'\n\n// ## Pre-release Version Identifier\n// A numeric identifier, or a non-numeric identifier.\n\ntok('PRERELEASEIDENTIFIER')\nsrc[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\ntok('PRERELEASEIDENTIFIERLOOSE')\nsrc[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\n// ## Pre-release Version\n// Hyphen, followed by one or more dot-separated pre-release version\n// identifiers.\n\ntok('PRERELEASE')\nsrc[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'\n\ntok('PRERELEASELOOSE')\nsrc[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'\n\n// ## Build Metadata Identifier\n// Any combination of digits, letters, or hyphens.\n\ntok('BUILDIDENTIFIER')\nsrc[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'\n\n// ## Build Metadata\n// Plus sign, followed by one or more period-separated build metadata\n// identifiers.\n\ntok('BUILD')\nsrc[t.BUILD] = '(?:\\\\+(' + src[t.BUILDIDENTIFIER] +\n '(?:\\\\.' + src[t.BUILDIDENTIFIER] + ')*))'\n\n// ## Full Version String\n// A main version, followed optionally by a pre-release version and\n// build metadata.\n\n// Note that the only major, minor, patch, and pre-release sections of\n// the version string are capturing groups. The build metadata is not a\n// capturing group, because it should not ever be used in version\n// comparison.\n\ntok('FULL')\ntok('FULLPLAIN')\nsrc[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +\n src[t.PRERELEASE] + '?' +\n src[t.BUILD] + '?'\n\nsrc[t.FULL] = '^' + src[t.FULLPLAIN] + '$'\n\n// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.\n// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty\n// common in the npm registry.\ntok('LOOSEPLAIN')\nsrc[t.LOOSEPLAIN] = '[v=\\\\s]*' + src[t.MAINVERSIONLOOSE] +\n src[t.PRERELEASELOOSE] + '?' +\n src[t.BUILD] + '?'\n\ntok('LOOSE')\nsrc[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'\n\ntok('GTLT')\nsrc[t.GTLT] = '((?:<|>)?=?)'\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\ntok('XRANGEIDENTIFIERLOOSE')\nsrc[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\\\*'\ntok('XRANGEIDENTIFIER')\nsrc[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\\\*'\n\ntok('XRANGEPLAIN')\nsrc[t.XRANGEPLAIN] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:' + src[t.PRERELEASE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGEPLAINLOOSE')\nsrc[t.XRANGEPLAINLOOSE] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:' + src[t.PRERELEASELOOSE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGE')\nsrc[t.XRANGE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAIN] + '$'\ntok('XRANGELOOSE')\nsrc[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Coercion.\n// Extract anything that could conceivably be a part of a valid semver\ntok('COERCE')\nsrc[t.COERCE] = '(^|[^\\\\d])' +\n '(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:$|[^\\\\d])'\ntok('COERCERTL')\nre[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\ntok('LONETILDE')\nsrc[t.LONETILDE] = '(?:~>?)'\n\ntok('TILDETRIM')\nsrc[t.TILDETRIM] = '(\\\\s*)' + src[t.LONETILDE] + '\\\\s+'\nre[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')\nvar tildeTrimReplace = '$1~'\n\ntok('TILDE')\nsrc[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'\ntok('TILDELOOSE')\nsrc[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\ntok('LONECARET')\nsrc[t.LONECARET] = '(?:\\\\^)'\n\ntok('CARETTRIM')\nsrc[t.CARETTRIM] = '(\\\\s*)' + src[t.LONECARET] + '\\\\s+'\nre[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')\nvar caretTrimReplace = '$1^'\n\ntok('CARET')\nsrc[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'\ntok('CARETLOOSE')\nsrc[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\ntok('COMPARATORLOOSE')\nsrc[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'\ntok('COMPARATOR')\nsrc[t.COMPARATOR] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.FULLPLAIN] + ')$|^$'\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\ntok('COMPARATORTRIM')\nsrc[t.COMPARATORTRIM] = '(\\\\s*)' + src[t.GTLT] +\n '\\\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'\n\n// this one has to use the /g flag\nre[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')\nvar comparatorTrimReplace = '$1$2$3'\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\ntok('HYPHENRANGE')\nsrc[t.HYPHENRANGE] = '^\\\\s*(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s*$'\n\ntok('HYPHENRANGELOOSE')\nsrc[t.HYPHENRANGELOOSE] = '^\\\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s*$'\n\n// Star ranges basically just allow anything at all.\ntok('STAR')\nsrc[t.STAR] = '(<|>)?=?\\\\s*\\\\*'\n\n// Compile to actual regexp objects.\n// All are flag-free, unless they were created above with a flag.\nfor (var i = 0; i < R; i++) {\n debug(i, src[i])\n if (!re[i]) {\n re[i] = new RegExp(src[i])\n }\n}\n\nexports.parse = parse\nfunction parse (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n if (version.length > MAX_LENGTH) {\n return null\n }\n\n var r = options.loose ? re[t.LOOSE] : re[t.FULL]\n if (!r.test(version)) {\n return null\n }\n\n try {\n return new SemVer(version, options)\n } catch (er) {\n return null\n }\n}\n\nexports.valid = valid\nfunction valid (version, options) {\n var v = parse(version, options)\n return v ? v.version : null\n}\n\nexports.clean = clean\nfunction clean (version, options) {\n var s = parse(version.trim().replace(/^[=v]+/, ''), options)\n return s ? s.version : null\n}\n\nexports.SemVer = SemVer\n\nfunction SemVer (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n if (version instanceof SemVer) {\n if (version.loose === options.loose) {\n return version\n } else {\n version = version.version\n }\n } else if (typeof version !== 'string') {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n if (version.length > MAX_LENGTH) {\n throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')\n }\n\n if (!(this instanceof SemVer)) {\n return new SemVer(version, options)\n }\n\n debug('SemVer', version, options)\n this.options = options\n this.loose = !!options.loose\n\n var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])\n\n if (!m) {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n this.raw = version\n\n // these are actually numbers\n this.major = +m[1]\n this.minor = +m[2]\n this.patch = +m[3]\n\n if (this.major > MAX_SAFE_INTEGER || this.major < 0) {\n throw new TypeError('Invalid major version')\n }\n\n if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {\n throw new TypeError('Invalid minor version')\n }\n\n if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {\n throw new TypeError('Invalid patch version')\n }\n\n // numberify any prerelease numeric ids\n if (!m[4]) {\n this.prerelease = []\n } else {\n this.prerelease = m[4].split('.').map(function (id) {\n if (/^[0-9]+$/.test(id)) {\n var num = +id\n if (num >= 0 && num < MAX_SAFE_INTEGER) {\n return num\n }\n }\n return id\n })\n }\n\n this.build = m[5] ? m[5].split('.') : []\n this.format()\n}\n\nSemVer.prototype.format = function () {\n this.version = this.major + '.' + this.minor + '.' + this.patch\n if (this.prerelease.length) {\n this.version += '-' + this.prerelease.join('.')\n }\n return this.version\n}\n\nSemVer.prototype.toString = function () {\n return this.version\n}\n\nSemVer.prototype.compare = function (other) {\n debug('SemVer.compare', this.version, this.options, other)\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return this.compareMain(other) || this.comparePre(other)\n}\n\nSemVer.prototype.compareMain = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch)\n}\n\nSemVer.prototype.comparePre = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length) {\n return -1\n } else if (!this.prerelease.length && other.prerelease.length) {\n return 1\n } else if (!this.prerelease.length && !other.prerelease.length) {\n return 0\n }\n\n var i = 0\n do {\n var a = this.prerelease[i]\n var b = other.prerelease[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\nSemVer.prototype.compareBuild = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n var i = 0\n do {\n var a = this.build[i]\n var b = other.build[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\n// preminor will bump the version up to the next minor release, and immediately\n// down to pre-release. premajor and prepatch work the same way.\nSemVer.prototype.inc = function (release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor = 0\n this.major++\n this.inc('pre', identifier)\n break\n case 'preminor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor++\n this.inc('pre', identifier)\n break\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0\n this.inc('patch', identifier)\n this.inc('pre', identifier)\n break\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0) {\n this.inc('patch', identifier)\n }\n this.inc('pre', identifier)\n break\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (this.minor !== 0 ||\n this.patch !== 0 ||\n this.prerelease.length === 0) {\n this.major++\n }\n this.minor = 0\n this.patch = 0\n this.prerelease = []\n break\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0) {\n this.minor++\n }\n this.patch = 0\n this.prerelease = []\n break\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0) {\n this.patch++\n }\n this.prerelease = []\n break\n // This probably shouldn't be used publicly.\n // 1.0.0 \"pre\" would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0) {\n this.prerelease = [0]\n } else {\n var i = this.prerelease.length\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++\n i = -2\n }\n }\n if (i === -1) {\n // didn't increment anything\n this.prerelease.push(0)\n }\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (this.prerelease[0] === identifier) {\n if (isNaN(this.prerelease[1])) {\n this.prerelease = [identifier, 0]\n }\n } else {\n this.prerelease = [identifier, 0]\n }\n }\n break\n\n default:\n throw new Error('invalid increment argument: ' + release)\n }\n this.format()\n this.raw = this.version\n return this\n}\n\nexports.inc = inc\nfunction inc (version, release, loose, identifier) {\n if (typeof (loose) === 'string') {\n identifier = loose\n loose = undefined\n }\n\n try {\n return new SemVer(version, loose).inc(release, identifier).version\n } catch (er) {\n return null\n }\n}\n\nexports.diff = diff\nfunction diff (version1, version2) {\n if (eq(version1, version2)) {\n return null\n } else {\n var v1 = parse(version1)\n var v2 = parse(version2)\n var prefix = ''\n if (v1.prerelease.length || v2.prerelease.length) {\n prefix = 'pre'\n var defaultResult = 'prerelease'\n }\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return prefix + key\n }\n }\n }\n return defaultResult // may be undefined\n }\n}\n\nexports.compareIdentifiers = compareIdentifiers\n\nvar numeric = /^[0-9]+$/\nfunction compareIdentifiers (a, b) {\n var anum = numeric.test(a)\n var bnum = numeric.test(b)\n\n if (anum && bnum) {\n a = +a\n b = +b\n }\n\n return a === b ? 0\n : (anum && !bnum) ? -1\n : (bnum && !anum) ? 1\n : a < b ? -1\n : 1\n}\n\nexports.rcompareIdentifiers = rcompareIdentifiers\nfunction rcompareIdentifiers (a, b) {\n return compareIdentifiers(b, a)\n}\n\nexports.major = major\nfunction major (a, loose) {\n return new SemVer(a, loose).major\n}\n\nexports.minor = minor\nfunction minor (a, loose) {\n return new SemVer(a, loose).minor\n}\n\nexports.patch = patch\nfunction patch (a, loose) {\n return new SemVer(a, loose).patch\n}\n\nexports.compare = compare\nfunction compare (a, b, loose) {\n return new SemVer(a, loose).compare(new SemVer(b, loose))\n}\n\nexports.compareLoose = compareLoose\nfunction compareLoose (a, b) {\n return compare(a, b, true)\n}\n\nexports.compareBuild = compareBuild\nfunction compareBuild (a, b, loose) {\n var versionA = new SemVer(a, loose)\n var versionB = new SemVer(b, loose)\n return versionA.compare(versionB) || versionA.compareBuild(versionB)\n}\n\nexports.rcompare = rcompare\nfunction rcompare (a, b, loose) {\n return compare(b, a, loose)\n}\n\nexports.sort = sort\nfunction sort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(a, b, loose)\n })\n}\n\nexports.rsort = rsort\nfunction rsort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(b, a, loose)\n })\n}\n\nexports.gt = gt\nfunction gt (a, b, loose) {\n return compare(a, b, loose) > 0\n}\n\nexports.lt = lt\nfunction lt (a, b, loose) {\n return compare(a, b, loose) < 0\n}\n\nexports.eq = eq\nfunction eq (a, b, loose) {\n return compare(a, b, loose) === 0\n}\n\nexports.neq = neq\nfunction neq (a, b, loose) {\n return compare(a, b, loose) !== 0\n}\n\nexports.gte = gte\nfunction gte (a, b, loose) {\n return compare(a, b, loose) >= 0\n}\n\nexports.lte = lte\nfunction lte (a, b, loose) {\n return compare(a, b, loose) <= 0\n}\n\nexports.cmp = cmp\nfunction cmp (a, op, b, loose) {\n switch (op) {\n case '===':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a === b\n\n case '!==':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a !== b\n\n case '':\n case '=':\n case '==':\n return eq(a, b, loose)\n\n case '!=':\n return neq(a, b, loose)\n\n case '>':\n return gt(a, b, loose)\n\n case '>=':\n return gte(a, b, loose)\n\n case '<':\n return lt(a, b, loose)\n\n case '<=':\n return lte(a, b, loose)\n\n default:\n throw new TypeError('Invalid operator: ' + op)\n }\n}\n\nexports.Comparator = Comparator\nfunction Comparator (comp, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (comp instanceof Comparator) {\n if (comp.loose === !!options.loose) {\n return comp\n } else {\n comp = comp.value\n }\n }\n\n if (!(this instanceof Comparator)) {\n return new Comparator(comp, options)\n }\n\n debug('comparator', comp, options)\n this.options = options\n this.loose = !!options.loose\n this.parse(comp)\n\n if (this.semver === ANY) {\n this.value = ''\n } else {\n this.value = this.operator + this.semver.version\n }\n\n debug('comp', this)\n}\n\nvar ANY = {}\nComparator.prototype.parse = function (comp) {\n var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var m = comp.match(r)\n\n if (!m) {\n throw new TypeError('Invalid comparator: ' + comp)\n }\n\n this.operator = m[1] !== undefined ? m[1] : ''\n if (this.operator === '=') {\n this.operator = ''\n }\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2]) {\n this.semver = ANY\n } else {\n this.semver = new SemVer(m[2], this.options.loose)\n }\n}\n\nComparator.prototype.toString = function () {\n return this.value\n}\n\nComparator.prototype.test = function (version) {\n debug('Comparator.test', version, this.options.loose)\n\n if (this.semver === ANY || version === ANY) {\n return true\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n return cmp(version, this.operator, this.semver, this.options)\n}\n\nComparator.prototype.intersects = function (comp, options) {\n if (!(comp instanceof Comparator)) {\n throw new TypeError('a Comparator is required')\n }\n\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n var rangeTmp\n\n if (this.operator === '') {\n if (this.value === '') {\n return true\n }\n rangeTmp = new Range(comp.value, options)\n return satisfies(this.value, rangeTmp, options)\n } else if (comp.operator === '') {\n if (comp.value === '') {\n return true\n }\n rangeTmp = new Range(this.value, options)\n return satisfies(comp.semver, rangeTmp, options)\n }\n\n var sameDirectionIncreasing =\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '>=' || comp.operator === '>')\n var sameDirectionDecreasing =\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '<=' || comp.operator === '<')\n var sameSemVer = this.semver.version === comp.semver.version\n var differentDirectionsInclusive =\n (this.operator === '>=' || this.operator === '<=') &&\n (comp.operator === '>=' || comp.operator === '<=')\n var oppositeDirectionsLessThan =\n cmp(this.semver, '<', comp.semver, options) &&\n ((this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '<=' || comp.operator === '<'))\n var oppositeDirectionsGreaterThan =\n cmp(this.semver, '>', comp.semver, options) &&\n ((this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '>=' || comp.operator === '>'))\n\n return sameDirectionIncreasing || sameDirectionDecreasing ||\n (sameSemVer && differentDirectionsInclusive) ||\n oppositeDirectionsLessThan || oppositeDirectionsGreaterThan\n}\n\nexports.Range = Range\nfunction Range (range, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (range instanceof Range) {\n if (range.loose === !!options.loose &&\n range.includePrerelease === !!options.includePrerelease) {\n return range\n } else {\n return new Range(range.raw, options)\n }\n }\n\n if (range instanceof Comparator) {\n return new Range(range.value, options)\n }\n\n if (!(this instanceof Range)) {\n return new Range(range, options)\n }\n\n this.options = options\n this.loose = !!options.loose\n this.includePrerelease = !!options.includePrerelease\n\n // First, split based on boolean or ||\n this.raw = range\n this.set = range.split(/\\s*\\|\\|\\s*/).map(function (range) {\n return this.parseRange(range.trim())\n }, this).filter(function (c) {\n // throw out any that are not relevant for whatever reason\n return c.length\n })\n\n if (!this.set.length) {\n throw new TypeError('Invalid SemVer Range: ' + range)\n }\n\n this.format()\n}\n\nRange.prototype.format = function () {\n this.range = this.set.map(function (comps) {\n return comps.join(' ').trim()\n }).join('||').trim()\n return this.range\n}\n\nRange.prototype.toString = function () {\n return this.range\n}\n\nRange.prototype.parseRange = function (range) {\n var loose = this.options.loose\n range = range.trim()\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]\n range = range.replace(hr, hyphenReplace)\n debug('hyphen replace', range)\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)\n debug('comparator trim', range, re[t.COMPARATORTRIM])\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[t.TILDETRIM], tildeTrimReplace)\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[t.CARETTRIM], caretTrimReplace)\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ')\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var set = range.split(' ').map(function (comp) {\n return parseComparator(comp, this.options)\n }, this).join(' ').split(/\\s+/)\n if (this.options.loose) {\n // in loose mode, throw out any that are not valid comparators\n set = set.filter(function (comp) {\n return !!comp.match(compRe)\n })\n }\n set = set.map(function (comp) {\n return new Comparator(comp, this.options)\n }, this)\n\n return set\n}\n\nRange.prototype.intersects = function (range, options) {\n if (!(range instanceof Range)) {\n throw new TypeError('a Range is required')\n }\n\n return this.set.some(function (thisComparators) {\n return (\n isSatisfiable(thisComparators, options) &&\n range.set.some(function (rangeComparators) {\n return (\n isSatisfiable(rangeComparators, options) &&\n thisComparators.every(function (thisComparator) {\n return rangeComparators.every(function (rangeComparator) {\n return thisComparator.intersects(rangeComparator, options)\n })\n })\n )\n })\n )\n })\n}\n\n// take a set of comparators and determine whether there\n// exists a version which can satisfy it\nfunction isSatisfiable (comparators, options) {\n var result = true\n var remainingComparators = comparators.slice()\n var testComparator = remainingComparators.pop()\n\n while (result && remainingComparators.length) {\n result = remainingComparators.every(function (otherComparator) {\n return testComparator.intersects(otherComparator, options)\n })\n\n testComparator = remainingComparators.pop()\n }\n\n return result\n}\n\n// Mostly just for testing and legacy API reasons\nexports.toComparators = toComparators\nfunction toComparators (range, options) {\n return new Range(range, options).set.map(function (comp) {\n return comp.map(function (c) {\n return c.value\n }).join(' ').trim().split(' ')\n })\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nfunction parseComparator (comp, options) {\n debug('comp', comp, options)\n comp = replaceCarets(comp, options)\n debug('caret', comp)\n comp = replaceTildes(comp, options)\n debug('tildes', comp)\n comp = replaceXRanges(comp, options)\n debug('xrange', comp)\n comp = replaceStars(comp, options)\n debug('stars', comp)\n return comp\n}\n\nfunction isX (id) {\n return !id || id.toLowerCase() === 'x' || id === '*'\n}\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0\nfunction replaceTildes (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceTilde(comp, options)\n }).join(' ')\n}\n\nfunction replaceTilde (comp, options) {\n var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('tilde', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n // ~1.2 == >=1.2.0 <1.3.0\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else if (pr) {\n debug('replaceTilde pr', pr)\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n } else {\n // ~1.2.3 == >=1.2.3 <1.3.0\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n\n debug('tilde return', ret)\n return ret\n })\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0\n// ^1.2.3 --> >=1.2.3 <2.0.0\n// ^1.2.0 --> >=1.2.0 <2.0.0\nfunction replaceCarets (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceCaret(comp, options)\n }).join(' ')\n}\n\nfunction replaceCaret (comp, options) {\n debug('caret', comp, options)\n var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('caret', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n if (M === '0') {\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else {\n ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'\n }\n } else if (pr) {\n debug('replaceCaret pr', pr)\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + (+M + 1) + '.0.0'\n }\n } else {\n debug('no pr')\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + (+M + 1) + '.0.0'\n }\n }\n\n debug('caret return', ret)\n return ret\n })\n}\n\nfunction replaceXRanges (comp, options) {\n debug('replaceXRanges', comp, options)\n return comp.split(/\\s+/).map(function (comp) {\n return replaceXRange(comp, options)\n }).join(' ')\n}\n\nfunction replaceXRange (comp, options) {\n comp = comp.trim()\n var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]\n return comp.replace(r, function (ret, gtlt, M, m, p, pr) {\n debug('xRange', comp, ret, gtlt, M, m, p, pr)\n var xM = isX(M)\n var xm = xM || isX(m)\n var xp = xm || isX(p)\n var anyX = xp\n\n if (gtlt === '=' && anyX) {\n gtlt = ''\n }\n\n // if we're including prereleases in the match, then we need\n // to fix this to -0, the lowest possible prerelease value\n pr = options.includePrerelease ? '-0' : ''\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0-0'\n } else {\n // nothing is forbidden\n ret = '*'\n }\n } else if (gtlt && anyX) {\n // we know patch is an x, because we have any x at all.\n // replace X with 0\n if (xm) {\n m = 0\n }\n p = 0\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n // >1.2.3 => >= 1.2.4\n gtlt = '>='\n if (xm) {\n M = +M + 1\n m = 0\n p = 0\n } else {\n m = +m + 1\n p = 0\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm) {\n M = +M + 1\n } else {\n m = +m + 1\n }\n }\n\n ret = gtlt + M + '.' + m + '.' + p + pr\n } else if (xm) {\n ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr\n } else if (xp) {\n ret = '>=' + M + '.' + m + '.0' + pr +\n ' <' + M + '.' + (+m + 1) + '.0' + pr\n }\n\n debug('xRange return', ret)\n\n return ret\n })\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nfunction replaceStars (comp, options) {\n debug('replaceStars', comp, options)\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[t.STAR], '')\n}\n\n// This function is passed to string.replace(re[t.HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0\nfunction hyphenReplace ($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) {\n if (isX(fM)) {\n from = ''\n } else if (isX(fm)) {\n from = '>=' + fM + '.0.0'\n } else if (isX(fp)) {\n from = '>=' + fM + '.' + fm + '.0'\n } else {\n from = '>=' + from\n }\n\n if (isX(tM)) {\n to = ''\n } else if (isX(tm)) {\n to = '<' + (+tM + 1) + '.0.0'\n } else if (isX(tp)) {\n to = '<' + tM + '.' + (+tm + 1) + '.0'\n } else if (tpr) {\n to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr\n } else {\n to = '<=' + to\n }\n\n return (from + ' ' + to).trim()\n}\n\n// if ANY of the sets match ALL of its comparators, then pass\nRange.prototype.test = function (version) {\n if (!version) {\n return false\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n for (var i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version, this.options)) {\n return true\n }\n }\n return false\n}\n\nfunction testSet (set, version, options) {\n for (var i = 0; i < set.length; i++) {\n if (!set[i].test(version)) {\n return false\n }\n }\n\n if (version.prerelease.length && !options.includePrerelease) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (i = 0; i < set.length; i++) {\n debug(set[i].semver)\n if (set[i].semver === ANY) {\n continue\n }\n\n if (set[i].semver.prerelease.length > 0) {\n var allowed = set[i].semver\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch) {\n return true\n }\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false\n }\n\n return true\n}\n\nexports.satisfies = satisfies\nfunction satisfies (version, range, options) {\n try {\n range = new Range(range, options)\n } catch (er) {\n return false\n }\n return range.test(version)\n}\n\nexports.maxSatisfying = maxSatisfying\nfunction maxSatisfying (versions, range, options) {\n var max = null\n var maxSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!max || maxSV.compare(v) === -1) {\n // compare(max, v, true)\n max = v\n maxSV = new SemVer(max, options)\n }\n }\n })\n return max\n}\n\nexports.minSatisfying = minSatisfying\nfunction minSatisfying (versions, range, options) {\n var min = null\n var minSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!min || minSV.compare(v) === 1) {\n // compare(min, v, true)\n min = v\n minSV = new SemVer(min, options)\n }\n }\n })\n return min\n}\n\nexports.minVersion = minVersion\nfunction minVersion (range, loose) {\n range = new Range(range, loose)\n\n var minver = new SemVer('0.0.0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = new SemVer('0.0.0-0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = null\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n comparators.forEach(function (comparator) {\n // Clone to avoid manipulating the comparator's semver object.\n var compver = new SemVer(comparator.semver.version)\n switch (comparator.operator) {\n case '>':\n if (compver.prerelease.length === 0) {\n compver.patch++\n } else {\n compver.prerelease.push(0)\n }\n compver.raw = compver.format()\n /* fallthrough */\n case '':\n case '>=':\n if (!minver || gt(minver, compver)) {\n minver = compver\n }\n break\n case '<':\n case '<=':\n /* Ignore maximum versions */\n break\n /* istanbul ignore next */\n default:\n throw new Error('Unexpected operation: ' + comparator.operator)\n }\n })\n }\n\n if (minver && range.test(minver)) {\n return minver\n }\n\n return null\n}\n\nexports.validRange = validRange\nfunction validRange (range, options) {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, options).range || '*'\n } catch (er) {\n return null\n }\n}\n\n// Determine if version is less than all the versions possible in the range\nexports.ltr = ltr\nfunction ltr (version, range, options) {\n return outside(version, range, '<', options)\n}\n\n// Determine if version is greater than all the versions possible in the range.\nexports.gtr = gtr\nfunction gtr (version, range, options) {\n return outside(version, range, '>', options)\n}\n\nexports.outside = outside\nfunction outside (version, range, hilo, options) {\n version = new SemVer(version, options)\n range = new Range(range, options)\n\n var gtfn, ltefn, ltfn, comp, ecomp\n switch (hilo) {\n case '>':\n gtfn = gt\n ltefn = lte\n ltfn = lt\n comp = '>'\n ecomp = '>='\n break\n case '<':\n gtfn = lt\n ltefn = gte\n ltfn = gt\n comp = '<'\n ecomp = '<='\n break\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"')\n }\n\n // If it satisifes the range it is not outside\n if (satisfies(version, range, options)) {\n return false\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n var high = null\n var low = null\n\n comparators.forEach(function (comparator) {\n if (comparator.semver === ANY) {\n comparator = new Comparator('>=0.0.0')\n }\n high = high || comparator\n low = low || comparator\n if (gtfn(comparator.semver, high.semver, options)) {\n high = comparator\n } else if (ltfn(comparator.semver, low.semver, options)) {\n low = comparator\n }\n })\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false\n }\n }\n return true\n}\n\nexports.prerelease = prerelease\nfunction prerelease (version, options) {\n var parsed = parse(version, options)\n return (parsed && parsed.prerelease.length) ? parsed.prerelease : null\n}\n\nexports.intersects = intersects\nfunction intersects (r1, r2, options) {\n r1 = new Range(r1, options)\n r2 = new Range(r2, options)\n return r1.intersects(r2)\n}\n\nexports.coerce = coerce\nfunction coerce (version, options) {\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version === 'number') {\n version = String(version)\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n options = options || {}\n\n var match = null\n if (!options.rtl) {\n match = version.match(re[t.COERCE])\n } else {\n // Find the right-most coercible string that does not share\n // a terminus with a more left-ward coercible string.\n // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'\n //\n // Walk through the string checking with a /g regexp\n // Manually set the index so as to pick up overlapping matches.\n // Stop when we get a match that ends at the string end, since no\n // coercible string can be more right-ward without the same terminus.\n var next\n while ((next = re[t.COERCERTL].exec(version)) &&\n (!match || match.index + match[0].length !== version.length)\n ) {\n if (!match ||\n next.index + next[0].length !== match.index + match[0].length) {\n match = next\n }\n re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length\n }\n // leave it in a clean state\n re[t.COERCERTL].lastIndex = -1\n }\n\n if (match === null) {\n return null\n }\n\n return parse(match[2] +\n '.' + (match[3] || '0') +\n '.' + (match[4] || '0'), options)\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar listenersMap = new WeakMap();\nvar abortedMap = new WeakMap();\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nvar AbortSignal = /** @class */ (function () {\n function AbortSignal() {\n /**\n * onabort event listener.\n */\n this.onabort = null;\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n Object.defineProperty(AbortSignal.prototype, \"aborted\", {\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n get: function () {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n return abortedMap.get(this);\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(AbortSignal, \"none\", {\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n get: function () {\n return new AbortSignal();\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n AbortSignal.prototype.addEventListener = function (\n // tslint:disable-next-line:variable-name\n _type, listener) {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n var listeners = listenersMap.get(this);\n listeners.push(listener);\n };\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n AbortSignal.prototype.removeEventListener = function (\n // tslint:disable-next-line:variable-name\n _type, listener) {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n var listeners = listenersMap.get(this);\n var index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n };\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n AbortSignal.prototype.dispatchEvent = function (_event) {\n throw new Error(\"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\");\n };\n return AbortSignal;\n}());\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nfunction abortSignal(signal) {\n if (signal.aborted) {\n return;\n }\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n var listeners = listenersMap.get(signal);\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach(function (listener) {\n listener.call(signal, { type: \"abort\" });\n });\n }\n abortedMap.set(signal, true);\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nvar AbortError = /** @class */ (function (_super) {\n tslib.__extends(AbortError, _super);\n function AbortError(message) {\n var _this = _super.call(this, message) || this;\n _this.name = \"AbortError\";\n return _this;\n }\n return AbortError;\n}(Error));\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nvar AbortController = /** @class */ (function () {\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n function AbortController(parentSignals) {\n var _this = this;\n this._signal = new AbortSignal();\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {\n var parentSignal = parentSignals_1[_i];\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n }\n else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", function () {\n _this.abort();\n });\n }\n }\n }\n Object.defineProperty(AbortController.prototype, \"signal\", {\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n get: function () {\n return this._signal;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n AbortController.prototype.abort = function () {\n abortSignal(this._signal);\n };\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n AbortController.timeout = function (ms) {\n var signal = new AbortSignal();\n var timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n };\n return AbortController;\n}());\n\nexports.AbortController = AbortController;\nexports.AbortError = AbortError;\nexports.AbortSignal = AbortSignal;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n","\"use strict\";\nif (typeof Symbol === undefined || !Symbol.asyncIterator) {\n Symbol.asyncIterator = Symbol.for(\"Symbol.asyncIterator\");\n}\n//# sourceMappingURL=index.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nvar AzureKeyCredential = /** @class */ (function () {\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n function AzureKeyCredential(key) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n this._key = key;\n }\n Object.defineProperty(AzureKeyCredential.prototype, \"key\", {\n /**\n * The value of the key to be used in authentication\n */\n get: function () {\n return this._key;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n AzureKeyCredential.prototype.update = function (newKey) {\n this._key = newKey;\n };\n return AzureKeyCredential;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n * @internal\n */\nfunction isDefined(thing) {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * Note: The properties may be inherited.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n * @internal\n */\nfunction isObjectWithProperties(thing, properties) {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n for (var _i = 0, properties_1 = properties; _i < properties_1.length; _i++) {\n var property = properties_1[_i];\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n return true;\n}\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * Note: The property may be inherited.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n * @internal\n */\nfunction objectHasProperty(thing, property) {\n return typeof thing === \"object\" && property in thing;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nvar AzureNamedKeyCredential = /** @class */ (function () {\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n function AzureNamedKeyCredential(name, key) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n this._name = name;\n this._key = key;\n }\n Object.defineProperty(AzureNamedKeyCredential.prototype, \"key\", {\n /**\n * The value of the key to be used in authentication.\n */\n get: function () {\n return this._key;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(AzureNamedKeyCredential.prototype, \"name\", {\n /**\n * The value of the name to be used in authentication.\n */\n get: function () {\n return this._name;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n AzureNamedKeyCredential.prototype.update = function (newName, newKey) {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n this._name = newName;\n this._key = newKey;\n };\n return AzureNamedKeyCredential;\n}());\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nfunction isNamedKeyCredential(credential) {\n return (isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\");\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nvar AzureSASCredential = /** @class */ (function () {\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n function AzureSASCredential(signature) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n this._signature = signature;\n }\n Object.defineProperty(AzureSASCredential.prototype, \"signature\", {\n /**\n * The value of the shared access signature to be used in authentication\n */\n get: function () {\n return this._signature;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n AzureSASCredential.prototype.update = function (newSignature) {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n this._signature = newSignature;\n };\n return AzureSASCredential;\n}());\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nfunction isSASCredential(credential) {\n return (isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\");\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nfunction isTokenCredential(credential) {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n var castCredential = credential;\n return (castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0));\n}\n\nexports.AzureKeyCredential = AzureKeyCredential;\nexports.AzureNamedKeyCredential = AzureNamedKeyCredential;\nexports.AzureSASCredential = AzureSASCredential;\nexports.isNamedKeyCredential = isNamedKeyCredential;\nexports.isSASCredential = isSASCredential;\nexports.isTokenCredential = isTokenCredential;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar tslib = require('tslib');\nvar uuid = require('uuid');\nvar tough = require('tough-cookie');\nvar http = require('http');\nvar https = require('https');\nvar node_fetch = _interopDefault(require('node-fetch'));\nvar abortController = require('@azure/abort-controller');\nvar FormData = _interopDefault(require('form-data'));\nvar util = require('util');\nvar url = require('url');\nvar stream = require('stream');\nvar logger$1 = require('@azure/logger');\nvar tunnel = require('tunnel');\nvar coreAuth = require('@azure/core-auth');\nvar xml2js = require('xml2js');\nvar os = require('os');\nvar coreTracing = require('@azure/core-tracing');\nrequire('@azure/core-asynciterator-polyfill');\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName) {\n return headerName.toLowerCase();\n}\nfunction isHttpHeadersLike(object) {\n if (object && typeof object === \"object\") {\n var castObject = object;\n if (typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\") {\n return true;\n }\n }\n return false;\n}\n/**\n * A collection of HTTP header key/value pairs.\n */\nvar HttpHeaders = /** @class */ (function () {\n function HttpHeaders(rawHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (var headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n HttpHeaders.prototype.set = function (headerName, headerValue) {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString()\n };\n };\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n HttpHeaders.prototype.get = function (headerName) {\n var header = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n };\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n HttpHeaders.prototype.contains = function (headerName) {\n return !!this._headersMap[getHeaderKey(headerName)];\n };\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n HttpHeaders.prototype.remove = function (headerName) {\n var result = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n };\n /**\n * Get the headers that are contained this collection as an object.\n */\n HttpHeaders.prototype.rawHeaders = function () {\n var result = {};\n for (var headerKey in this._headersMap) {\n var header = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n };\n /**\n * Get the headers that are contained in this collection as an array.\n */\n HttpHeaders.prototype.headersArray = function () {\n var headers = [];\n for (var headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n };\n /**\n * Get the header names that are contained in this collection.\n */\n HttpHeaders.prototype.headerNames = function () {\n var headerNames = [];\n var headers = this.headersArray();\n for (var i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n };\n /**\n * Get the header values that are contained in this collection.\n */\n HttpHeaders.prototype.headerValues = function () {\n var headerValues = [];\n var headers = this.headersArray();\n for (var i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n };\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n HttpHeaders.prototype.toJson = function () {\n return this.rawHeaders();\n };\n /**\n * Get the string representation of this HTTP header collection.\n */\n HttpHeaders.prototype.toString = function () {\n return JSON.stringify(this.toJson());\n };\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n HttpHeaders.prototype.clone = function () {\n return new HttpHeaders(this.rawHeaders());\n };\n return HttpHeaders;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nfunction encodeString(value) {\n return Buffer.from(value).toString(\"base64\");\n}\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nfunction encodeByteArray(value) {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer);\n return bufferValue.toString(\"base64\");\n}\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nfunction decodeString(value) {\n return Buffer.from(value, \"base64\");\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"1.2.4\",\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\"\n },\n StatusCodes: {\n TooManyRequests: 429\n }\n },\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\"\n }\n};\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Default key used to access the XML attributes.\n */\nvar XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nvar XML_CHARKEY = \"_\";\n\n// Copyright (c) Microsoft Corporation.\nvar validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nvar isNode = typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nfunction encodeUri(uri) {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nfunction stripResponse(response) {\n var strippedResponse = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nfunction stripRequest(request) {\n var strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nfunction isValidUuid(uuid) {\n return validUuidRegex.test(uuid);\n}\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nfunction generateUuid() {\n return uuid.v4();\n}\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nfunction executePromisesSequentially(promiseFactories, kickstart) {\n var result = Promise.resolve(kickstart);\n promiseFactories.forEach(function (promiseFactory) {\n result = result.then(promiseFactory);\n });\n return result;\n}\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param t - The number of milliseconds to be delayed.\n * @param value - The value to be resolved with after a timeout of t milliseconds.\n * @returns Resolved promise\n */\nfunction delay(t, value) {\n return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); });\n}\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nfunction promiseToCallback(promise) {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return function (cb) {\n promise\n .then(function (data) {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch(function (err) {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nfunction promiseToServiceCallback(promise) {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return function (cb) {\n promise\n .then(function (data) {\n return process.nextTick(cb, undefined, data.parsedBody, data.request, data);\n })\n .catch(function (err) {\n process.nextTick(cb, err);\n });\n };\n}\nfunction prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) {\n var _a, _b, _c;\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n if (!xmlNamespaceKey || !xmlNamespace) {\n return _a = {}, _a[elementName] = obj, _a;\n }\n var result = (_b = {}, _b[elementName] = obj, _b);\n result[XML_ATTRKEY] = (_c = {}, _c[xmlNamespaceKey] = xmlNamespace, _c);\n return result;\n}\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nfunction applyMixins(targetCtorParam, sourceCtors) {\n var castTargetCtorParam = targetCtorParam;\n sourceCtors.forEach(function (sourceCtor) {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach(function (name) {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\nvar validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nfunction isDuration(value) {\n return validateISODuration.test(value);\n}\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nfunction replaceAll(value, searchValue, replaceValue) {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nfunction isPrimitiveType(value) {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\nfunction getEnvironmentValue(name) {\n if (process.env[name]) {\n return process.env[name];\n }\n else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar Serializer = /** @class */ (function () {\n function Serializer(modelMappers, isXML) {\n if (modelMappers === void 0) { modelMappers = {}; }\n this.modelMappers = modelMappers;\n this.isXML = isXML;\n }\n Serializer.prototype.validateConstraints = function (mapper, value, objectName) {\n var failValidation = function (constraintName, constraintValue) {\n throw new Error(\"\\\"\" + objectName + \"\\\" with value \\\"\" + value + \"\\\" should satisfy the constraint \\\"\" + constraintName + \"\\\": \" + constraintValue + \".\");\n };\n if (mapper.constraints && value != undefined) {\n var valueAsNumber = value;\n var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n var valueAsArray = value;\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n var pattern = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (UniqueItems &&\n valueAsArray.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n };\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param mapper - The mapper which defines the metadata of the serializable object\n * @param object - A valid Javascript object to be serialized\n * @param objectName - Name of the serialized object\n * @param options - additional options to deserialization\n * @returns A valid serialized Javascript object\n */\n Serializer.prototype.serialize = function (mapper, object, objectName, options) {\n var _a, _b, _c;\n if (options === void 0) { options = {}; }\n var updatedOptions = {\n rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : \"\",\n includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false,\n xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY\n };\n var payload = {};\n var mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n var required = mapper.required, nullable = mapper.nullable;\n if (required && nullable && object === undefined) {\n throw new Error(objectName + \" cannot be undefined.\");\n }\n if (required && !nullable && object == undefined) {\n throw new Error(objectName + \" cannot be null or undefined.\");\n }\n if (!required && nullable === false && object === null) {\n throw new Error(objectName + \" cannot be null.\");\n }\n if (object == undefined) {\n payload = object;\n }\n else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n }\n else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n }\n else if (mapperType.match(/^Enum$/i) !== null) {\n var enumMapper = mapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n }\n else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) {\n payload = serializeDateTypes(mapperType, object, objectName);\n }\n else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object);\n }\n else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n }\n else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);\n }\n else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);\n }\n else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);\n }\n }\n return payload;\n };\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param mapper - The mapper which defines the metadata of the serializable object\n * @param responseBody - A valid Javascript entity to be deserialized\n * @param objectName - Name of the deserialized object\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object\n */\n Serializer.prototype.deserialize = function (mapper, responseBody, objectName, options) {\n var _a, _b, _c;\n if (options === void 0) { options = {}; }\n var updatedOptions = {\n rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : \"\",\n includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false,\n xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n var payload;\n var mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName;\n }\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions);\n }\n else {\n if (this.isXML) {\n var xmlCharKey = updatedOptions.xmlCharKey;\n var castResponseBody = responseBody;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n }\n else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n }\n else if (responseBody === \"false\") {\n payload = false;\n }\n else {\n payload = responseBody;\n }\n }\n else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n }\n else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody);\n }\n else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody);\n }\n else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = decodeString(responseBody);\n }\n else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody);\n }\n else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions);\n }\n else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions);\n }\n }\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n return payload;\n };\n return Serializer;\n}());\nfunction trimEnd(str, ch) {\n var len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\nfunction bufferToBase64Url(buffer) {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(\"Please provide an input of type Uint8Array for converting to Base64Url.\");\n }\n // Uint8Array to Base64.\n var str = encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\")\n .replace(/\\+/g, \"-\")\n .replace(/\\//g, \"_\");\n}\nfunction base64UrlToByteArray(str) {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return decodeString(str);\n}\nfunction splitSerializeName(prop) {\n var classes = [];\n var partialclass = \"\";\n if (prop) {\n var subwords = prop.split(\".\");\n for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) {\n var item = subwords_1[_i];\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n }\n else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n return classes;\n}\nfunction dateToUnixTime(d) {\n if (!d) {\n return undefined;\n }\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d);\n }\n return Math.floor(d.getTime() / 1000);\n}\nfunction unixTimeToDate(n) {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\nfunction serializeBasicTypes(typeName, objectName, value) {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(objectName + \" with value \" + value + \" must be of type number.\");\n }\n }\n else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(objectName + \" with value \\\"\" + value + \"\\\" must be of type string.\");\n }\n }\n else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && isValidUuid(value))) {\n throw new Error(objectName + \" with value \\\"\" + value + \"\\\" must be of type string and a valid uuid.\");\n }\n }\n else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(objectName + \" with value \" + value + \" must be of type boolean.\");\n }\n }\n else if (typeName.match(/^Stream$/i) !== null) {\n var objectType = typeof value;\n if (objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)) {\n throw new Error(objectName + \" must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.\");\n }\n }\n }\n return value;\n}\nfunction serializeEnumType(objectName, allowedValues, value) {\n if (!allowedValues) {\n throw new Error(\"Please provide a set of allowedValues to validate \" + objectName + \" as an Enum Type.\");\n }\n var isPresent = allowedValues.some(function (item) {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(value + \" is not a valid value for \" + objectName + \". The valid values are: \" + JSON.stringify(allowedValues) + \".\");\n }\n return value;\n}\nfunction serializeByteArrayType(objectName, value) {\n var returnValue = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(objectName + \" must be of type Uint8Array.\");\n }\n returnValue = encodeByteArray(value);\n }\n return returnValue;\n}\nfunction serializeBase64UrlType(objectName, value) {\n var returnValue = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(objectName + \" must be of type Uint8Array.\");\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\nfunction serializeDateTypes(typeName, value, objectName) {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in ISO8601 format.\");\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n }\n else if (typeName.match(/^DateTime$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in ISO8601 format.\");\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n }\n else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in RFC-1123 format.\");\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n }\n else if (typeName.match(/^UnixTime$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in RFC-1123/ISO8601 format \" +\n \"for it to be serialized in UnixTime/Epoch format.\");\n }\n value = dateToUnixTime(value);\n }\n else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!isDuration(value)) {\n throw new Error(objectName + \" must be a string in ISO 8601 format. Instead was \\\"\" + value + \"\\\".\");\n }\n }\n }\n return value;\n}\nfunction serializeSequenceType(serializer, mapper, object, objectName, isXml, options) {\n var _a, _b;\n if (!Array.isArray(object)) {\n throw new Error(objectName + \" must be of type Array.\");\n }\n var elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\"element\\\" metadata for an Array must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName + \".\"));\n }\n var tempArray = [];\n for (var i = 0; i < object.length; i++) {\n var serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n if (isXml && elementType.xmlNamespace) {\n var xmlnsKey = elementType.xmlNamespacePrefix\n ? \"xmlns:\" + elementType.xmlNamespacePrefix\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = tslib.__assign({}, serializedValue);\n tempArray[i][XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = elementType.xmlNamespace, _a);\n }\n else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = (_b = {}, _b[xmlnsKey] = elementType.xmlNamespace, _b);\n }\n }\n else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\nfunction serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) {\n var _a;\n if (typeof object !== \"object\") {\n throw new Error(objectName + \" must be of type object.\");\n }\n var valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\"\\\"value\\\" metadata for a Dictionary must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName + \".\"));\n }\n var tempDictionary = {};\n for (var _i = 0, _b = Object.keys(object); _i < _b.length; _i++) {\n var key = _b[_i];\n var serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n var xmlnsKey = mapper.xmlNamespacePrefix ? \"xmlns:\" + mapper.xmlNamespacePrefix : \"xmlns\";\n var result = tempDictionary;\n result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a);\n return result;\n }\n return tempDictionary;\n}\n/**\n * Resolves the additionalProperties property from a referenced mapper\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveAdditionalProperties(serializer, mapper, objectName) {\n var additionalProperties = mapper.type.additionalProperties;\n if (!additionalProperties && mapper.type.className) {\n var modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties;\n }\n return additionalProperties;\n}\n/**\n * Finds the mapper referenced by className\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(serializer, mapper, objectName) {\n var className = mapper.type.className;\n if (!className) {\n throw new Error(\"Class name for model \\\"\" + objectName + \"\\\" is not provided in the mapper \\\"\" + JSON.stringify(mapper, undefined, 2) + \"\\\".\");\n }\n return serializer.modelMappers[className];\n}\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(serializer, mapper, objectName) {\n var modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n var modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(\"mapper() cannot be null or undefined for model \\\"\" + mapper.type.className + \"\\\".\");\n }\n modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\"modelProperties cannot be null or undefined in the \" +\n (\"mapper \\\"\" + JSON.stringify(modelMapper) + \"\\\" of type \\\"\" + mapper.type.className + \"\\\" for object \\\"\" + objectName + \"\\\".\"));\n }\n }\n return modelProps;\n}\nfunction serializeCompositeType(serializer, mapper, object, objectName, isXml, options) {\n var _a, _b;\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n if (object != undefined) {\n var payload = {};\n var modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (var _i = 0, _c = Object.keys(modelProps); _i < _c.length; _i++) {\n var key = _c[_i];\n var propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n var propName = void 0;\n var parentObject = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n }\n else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n }\n else {\n var paths = splitSerializeName(propertyMapper.serializedName);\n propName = paths.pop();\n for (var _d = 0, paths_1 = paths; _d < paths_1.length; _d++) {\n var pathName = paths_1[_d];\n var childObject = parentObject[pathName];\n if (childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n var xmlnsKey = mapper.xmlNamespacePrefix\n ? \"xmlns:\" + mapper.xmlNamespacePrefix\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = tslib.__assign(tslib.__assign({}, parentObject[XML_ATTRKEY]), (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a));\n }\n var propertyObjectName = propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n var toSerialize = object[key];\n var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined) {\n toSerialize = mapper.serializedName;\n }\n var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options);\n if (serializedValue !== undefined && propName != undefined) {\n var value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n }\n else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = (_b = {}, _b[propertyMapper.xmlElementName] = value, _b);\n }\n else {\n parentObject[propName] = value;\n }\n }\n }\n }\n var additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n var propNames = Object.keys(modelProps);\n var _loop_1 = function (clientPropName) {\n var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; });\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '[\"' + clientPropName + '\"]', options);\n }\n };\n for (var clientPropName in object) {\n _loop_1(clientPropName);\n }\n }\n return payload;\n }\n return object;\n}\nfunction getXmlObjectValue(propertyMapper, serializedValue, isXml, options) {\n var _a;\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n var xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? \"xmlns:\" + propertyMapper.xmlNamespacePrefix\n : \"xmlns\";\n var xmlNamespace = (_a = {}, _a[xmlnsKey] = propertyMapper.xmlNamespace, _a);\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n }\n else {\n var result_1 = tslib.__assign({}, serializedValue);\n result_1[XML_ATTRKEY] = xmlNamespace;\n return result_1;\n }\n }\n var result = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\nfunction isSpecialXmlProperty(propertyName, options) {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\nfunction deserializeCompositeType(serializer, mapper, responseBody, objectName, options) {\n var _a;\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n var modelProps = resolveModelProperties(serializer, mapper, objectName);\n var instance = {};\n var handledPropertyNames = [];\n for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) {\n var key = _b[_i];\n var propertyMapper = modelProps[key];\n var paths = splitSerializeName(modelProps[key].serializedName);\n handledPropertyNames.push(paths[0]);\n var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName;\n var propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n var headerCollectionPrefix = propertyMapper.headerCollectionPrefix;\n if (headerCollectionPrefix) {\n var dictionary = {};\n for (var _c = 0, _d = Object.keys(responseBody); _c < _d.length; _c++) {\n var headerKey = _d[_c];\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options);\n }\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n }\n else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options);\n }\n else {\n var propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n var wrapped = responseBody[xmlName];\n var elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : [];\n instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options);\n }\n else {\n var property = responseBody[propertyName];\n instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options);\n }\n }\n }\n else {\n // deserialize the property if it is present in the provided responseBody instance\n var propertyInstance = void 0;\n var res = responseBody;\n // traversing the object step by step.\n for (var _e = 0, paths_2 = paths; _e < paths_2.length; _e++) {\n var item = paths_2[_e];\n if (!res)\n break;\n res = res[item];\n }\n propertyInstance = res;\n var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined) {\n propertyInstance = mapper.serializedName;\n }\n var serializedValue = void 0;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n instance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options);\n }\n else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options);\n instance[key] = serializedValue;\n }\n }\n }\n var additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n var isAdditionalProperty = function (responsePropName) {\n for (var clientPropName in modelProps) {\n var paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n for (var responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '[\"' + responsePropName + '\"]', options);\n }\n }\n }\n else if (responseBody) {\n for (var _f = 0, _g = Object.keys(responseBody); _f < _g.length; _f++) {\n var key = _g[_f];\n if (instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)) {\n instance[key] = responseBody[key];\n }\n }\n }\n return instance;\n}\nfunction deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) {\n var value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\"\\\"value\\\" metadata for a Dictionary must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName));\n }\n if (responseBody) {\n var tempDictionary = {};\n for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) {\n var key = _a[_i];\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\nfunction deserializeSequenceType(serializer, mapper, responseBody, objectName, options) {\n var element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\"element\\\" metadata for an Array must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName));\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n var tempArray = [];\n for (var i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + \"[\" + i + \"]\", options);\n }\n return tempArray;\n }\n return responseBody;\n}\nfunction getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) {\n var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n var discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n var typeName = mapper.type.uberParent || mapper.type.className;\n var indexDiscriminator = discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\nfunction getPolymorphicDiscriminatorRecursively(serializer, mapper) {\n return (mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className));\n}\nfunction getPolymorphicDiscriminatorSafely(serializer, typeName) {\n return (typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator);\n}\n// TODO: why is this here?\nfunction serializeObject(toSerialize) {\n var castToSerialize = toSerialize;\n if (toSerialize == undefined)\n return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = encodeByteArray(toSerialize);\n return toSerialize;\n }\n else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n }\n else if (Array.isArray(toSerialize)) {\n var array = [];\n for (var i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n }\n else if (typeof toSerialize === \"object\") {\n var dictionary = {};\n for (var property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o) {\n var result = {};\n for (var _i = 0, o_1 = o; _i < o_1.length; _i++) {\n var key = o_1[_i];\n result[key] = key;\n }\n return result;\n}\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nvar MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\"\n]);\n\n// Copyright (c) Microsoft Corporation.\nfunction isWebResourceLike(object) {\n if (object && typeof object === \"object\") {\n var castObject = object;\n if (typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\") {\n return true;\n }\n }\n return false;\n}\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nvar WebResource = /** @class */ (function () {\n function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n WebResource.prototype.validateRequestProperties = function () {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n };\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n WebResource.prototype.prepare = function (options) {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n if (options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n if (options.url && options.pathTemplate) {\n throw new Error(\"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\");\n }\n if ((options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n // set the method\n if (options.method) {\n var validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error('The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods));\n }\n }\n this.method = options.method.toUpperCase();\n // construct the url if path template is provided\n if (options.pathTemplate) {\n var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters;\n if (typeof pathTemplate_1 !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n var baseUrl = options.baseUrl;\n var url_1 = baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate_1.startsWith(\"/\") ? pathTemplate_1.slice(1) : pathTemplate_1);\n var segments = url_1.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters_1) {\n throw new Error(\"pathTemplate: \" + pathTemplate_1 + \" has been provided. Hence, options.pathParameters must also be provided.\");\n }\n segments.forEach(function (item) {\n var pathParamName = item.slice(1, -1);\n var pathParam = pathParameters_1[pathParamName];\n if (pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")) {\n var stringifiedPathParameters = JSON.stringify(pathParameters_1, undefined, 2);\n throw new Error(\"pathTemplate: \" + pathTemplate_1 + \" contains the path parameter \" + pathParamName +\n (\" however, it is not present in parameters: \" + stringifiedPathParameters + \".\") +\n (\"The value of the path parameter can either be a \\\"string\\\" of the form { \" + pathParamName + \": \\\"some sample value\\\" } or \") +\n (\"it can be an \\\"object\\\" of the form { \\\"\" + pathParamName + \"\\\": { value: \\\"some sample value\\\", skipUrlEncoding: true } }.\"));\n }\n if (typeof pathParam.valueOf() === \"string\") {\n url_1 = url_1.replace(item, encodeURIComponent(pathParam));\n }\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\"options.pathParameters[\" + pathParamName + \"] is of type \\\"object\\\" but it does not contain a \\\"value\\\" property.\");\n }\n if (pathParam.skipUrlEncoding) {\n url_1 = url_1.replace(item, pathParam.value);\n }\n else {\n url_1 = url_1.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url_1;\n }\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n var queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\"options.queryParameters must be of type object. It should be a JSON object \" +\n \"of \\\"query-parameter-name\\\" as the key and the \\\"query-parameter-value\\\" as the value. \" +\n \"The \\\"query-parameter-value\\\" may be fo type \\\"string\\\" or an \\\"object\\\" of the form { value: \\\"query-parameter-value\\\", skipUrlEncoding: true }.\");\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n var queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (var queryParamName in queryParameters) {\n var queryParam = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n }\n else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\"options.queryParameters[\" + queryParamName + \"] is of type \\\"object\\\" but it does not contain a \\\"value\\\" property.\");\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n }\n else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n // add headers to the request if they are provided\n if (options.headers) {\n var headers = options.headers;\n for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) {\n var headerName = _a[_i];\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n }\n else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, \"requestBody\");\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n return this;\n };\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n WebResource.prototype.clone = function () {\n var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes);\n if (this.formData) {\n result.formData = this.formData;\n }\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n return result;\n };\n return WebResource;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar custom = util.inspect.custom;\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nvar URLQuery = /** @class */ (function () {\n function URLQuery() {\n this._rawQuery = {};\n }\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n URLQuery.prototype.any = function () {\n return Object.keys(this._rawQuery).length > 0;\n };\n /**\n * Get the keys of the query string.\n */\n URLQuery.prototype.keys = function () {\n return Object.keys(this._rawQuery);\n };\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n URLQuery.prototype.set = function (parameterName, parameterValue) {\n var caseParameterValue = parameterValue;\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n var newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n }\n else {\n delete this._rawQuery[parameterName];\n }\n }\n };\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n URLQuery.prototype.get = function (parameterName) {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n };\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n URLQuery.prototype.toString = function () {\n var result = \"\";\n for (var parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n var parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n var parameterStrings = [];\n for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) {\n var parameterValueElement = parameterValue_1[_i];\n parameterStrings.push(parameterName + \"=\" + parameterValueElement);\n }\n result += parameterStrings.join(\"&\");\n }\n else {\n result += parameterName + \"=\" + parameterValue;\n }\n }\n return result;\n };\n /**\n * Parse a URLQuery from the provided text.\n */\n URLQuery.parse = function (text) {\n var result = new URLQuery();\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n var currentState = \"ParameterName\";\n var parameterName = \"\";\n var parameterValue = \"\";\n for (var i = 0; i < text.length; ++i) {\n var currentCharacter = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n return result;\n };\n return URLQuery;\n}());\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nvar URLBuilder = /** @class */ (function () {\n function URLBuilder() {\n }\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n URLBuilder.prototype.setScheme = function (scheme) {\n if (!scheme) {\n this._scheme = undefined;\n }\n else {\n this.set(scheme, \"SCHEME\");\n }\n };\n /**\n * Get the scheme that has been set in this URL.\n */\n URLBuilder.prototype.getScheme = function () {\n return this._scheme;\n };\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n URLBuilder.prototype.setHost = function (host) {\n if (!host) {\n this._host = undefined;\n }\n else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n };\n /**\n * Get the host that has been set in this URL.\n */\n URLBuilder.prototype.getHost = function () {\n return this._host;\n };\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n URLBuilder.prototype.setPort = function (port) {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n }\n else {\n this.set(port.toString(), \"PORT\");\n }\n };\n /**\n * Get the port that has been set in this URL.\n */\n URLBuilder.prototype.getPort = function () {\n return this._port;\n };\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n URLBuilder.prototype.setPath = function (path) {\n if (!path) {\n this._path = undefined;\n }\n else {\n var schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n var schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n }\n else {\n this.set(path, \"PATH\");\n }\n }\n };\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n URLBuilder.prototype.appendPath = function (path) {\n if (path) {\n var currentPath = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n };\n /**\n * Get the path that has been set in this URL.\n */\n URLBuilder.prototype.getPath = function () {\n return this._path;\n };\n /**\n * Set the query in this URL.\n */\n URLBuilder.prototype.setQuery = function (query) {\n if (!query) {\n this._query = undefined;\n }\n else {\n this._query = URLQuery.parse(query);\n }\n };\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n };\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) {\n return this._query ? this._query.get(queryParameterName) : undefined;\n };\n /**\n * Get the query in this URL.\n */\n URLBuilder.prototype.getQuery = function () {\n return this._query ? this._query.toString() : undefined;\n };\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n URLBuilder.prototype.set = function (text, startState) {\n var tokenizer = new URLTokenizer(text, startState);\n while (tokenizer.next()) {\n var token = tokenizer.current();\n var tokenPath = void 0;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n default:\n throw new Error(\"Unrecognized URLTokenType: \" + token.type);\n }\n }\n }\n };\n URLBuilder.prototype.toString = function () {\n var result = \"\";\n if (this._scheme) {\n result += this._scheme + \"://\";\n }\n if (this._host) {\n result += this._host;\n }\n if (this._port) {\n result += \":\" + this._port;\n }\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n if (this._query && this._query.any()) {\n result += \"?\" + this._query.toString();\n }\n return result;\n };\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n };\n URLBuilder.parse = function (text) {\n var result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n };\n return URLBuilder;\n}());\nvar URLToken = /** @class */ (function () {\n function URLToken(text, type) {\n this.text = text;\n this.type = type;\n }\n URLToken.scheme = function (text) {\n return new URLToken(text, \"SCHEME\");\n };\n URLToken.host = function (text) {\n return new URLToken(text, \"HOST\");\n };\n URLToken.port = function (text) {\n return new URLToken(text, \"PORT\");\n };\n URLToken.path = function (text) {\n return new URLToken(text, \"PATH\");\n };\n URLToken.query = function (text) {\n return new URLToken(text, \"QUERY\");\n };\n return URLToken;\n}());\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nfunction isAlphaNumericCharacter(character) {\n var characterCode = character.charCodeAt(0);\n return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */);\n}\n/**\n * A class that tokenizes URL strings.\n */\nvar URLTokenizer = /** @class */ (function () {\n function URLTokenizer(_text, state) {\n this._text = _text;\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n URLTokenizer.prototype.current = function () {\n return this._currentToken;\n };\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n URLTokenizer.prototype.next = function () {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n }\n else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n case \"HOST\":\n nextHost(this);\n break;\n case \"PORT\":\n nextPort(this);\n break;\n case \"PATH\":\n nextPath(this);\n break;\n case \"QUERY\":\n nextQuery(this);\n break;\n default:\n throw new Error(\"Unrecognized URLTokenizerState: \" + this._currentState);\n }\n }\n return !!this._currentToken;\n };\n return URLTokenizer;\n}());\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer) {\n var result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer) {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer) {\n return tokenizer._text[tokenizer._currentIndex];\n}\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer, step) {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer, charactersToPeek) {\n var endIndex = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer, condition) {\n var result = \"\";\n while (hasCurrentCharacter(tokenizer)) {\n var currentCharacter = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n }\n else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n return result;\n}\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer) {\n return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); });\n}\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer) {\n var terminatingCharacters = [];\n for (var _i = 1; _i < arguments.length; _i++) {\n terminatingCharacters[_i - 1] = arguments[_i];\n }\n return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; });\n}\nfunction nextScheme(tokenizer) {\n var scheme = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else {\n tokenizer._currentState = \"HOST\";\n }\n}\nfunction nextSchemeOrHost(tokenizer) {\n var schemeOrHost = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n }\n else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n }\n else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n }\n else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\nfunction nextHost(tokenizer) {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n var host = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n }\n else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n}\nfunction nextPort(tokenizer) {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n var port = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n}\nfunction nextPath(tokenizer) {\n var path = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n}\nfunction nextQuery(tokenizer) {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n var query = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n\n// Copyright (c) Microsoft Corporation.\nvar RedactedString = \"REDACTED\";\nvar defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n \"Accept\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\"\n];\nvar defaultAllowedQueryParameters = [\"api-version\"];\nvar Sanitizer = /** @class */ (function () {\n function Sanitizer(_a) {\n var _b = _a === void 0 ? {} : _a, _c = _b.allowedHeaderNames, allowedHeaderNames = _c === void 0 ? [] : _c, _d = _b.allowedQueryParameters, allowedQueryParameters = _d === void 0 ? [] : _d;\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n this.allowedHeaderNames = new Set(allowedHeaderNames.map(function (n) { return n.toLowerCase(); }));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map(function (p) { return p.toLowerCase(); }));\n }\n Sanitizer.prototype.sanitize = function (obj) {\n return JSON.stringify(obj, this.replacer.bind(this), 2);\n };\n Sanitizer.prototype.replacer = function (key, value) {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return tslib.__assign(tslib.__assign({}, value), { name: value.name, message: value.message });\n }\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(key, value);\n }\n else if (key === \"url\") {\n return this.sanitizeUrl(value);\n }\n else if (key === \"query\") {\n return this.sanitizeQuery(value);\n }\n else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n }\n else if (key === \"response\") {\n // Don't log response again\n return undefined;\n }\n else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n }\n return value;\n };\n Sanitizer.prototype.sanitizeHeaders = function (_, value) {\n return this.sanitizeObject(value, this.allowedHeaderNames, function (v, k) { return v[k].value; });\n };\n Sanitizer.prototype.sanitizeQuery = function (value) {\n return this.sanitizeObject(value, this.allowedQueryParameters, function (v, k) { return v[k]; });\n };\n Sanitizer.prototype.sanitizeObject = function (value, allowedKeys, accessor) {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n var sanitized = {};\n for (var _i = 0, _a = Object.keys(value); _i < _a.length; _i++) {\n var k = _a[_i];\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n }\n else {\n sanitized[k] = RedactedString;\n }\n }\n return sanitized;\n };\n Sanitizer.prototype.sanitizeUrl = function (value) {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n var urlBuilder = URLBuilder.parse(value);\n var queryString = urlBuilder.getQuery();\n if (!queryString) {\n return value;\n }\n var query = URLQuery.parse(queryString);\n for (var _i = 0, _a = query.keys(); _i < _a.length; _i++) {\n var k = _a[_i];\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n };\n return Sanitizer;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar errorSanitizer = new Sanitizer();\nvar RestError = /** @class */ (function (_super) {\n tslib.__extends(RestError, _super);\n function RestError(message, code, statusCode, request, response) {\n var _this = _super.call(this, message) || this;\n _this.name = \"RestError\";\n _this.code = code;\n _this.statusCode = statusCode;\n _this.request = request;\n _this.response = response;\n Object.setPrototypeOf(_this, RestError.prototype);\n return _this;\n }\n /**\n * Logging method for util.inspect in Node\n */\n RestError.prototype[custom] = function () {\n return \"RestError: \" + this.message + \" \\n \" + errorSanitizer.sanitize(this);\n };\n RestError.REQUEST_SEND_ERROR = \"REQUEST_SEND_ERROR\";\n RestError.PARSE_ERROR = \"PARSE_ERROR\";\n return RestError;\n}(Error));\n\n// Copyright (c) Microsoft Corporation.\nvar logger = logger$1.createClientLogger(\"core-http\");\n\n// Copyright (c) Microsoft Corporation.\nvar ReportTransform = /** @class */ (function (_super) {\n tslib.__extends(ReportTransform, _super);\n function ReportTransform(progressCallback) {\n var _this = _super.call(this) || this;\n _this.progressCallback = progressCallback;\n _this.loadedBytes = 0;\n return _this;\n }\n ReportTransform.prototype._transform = function (chunk, _encoding, callback) {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback({ loadedBytes: this.loadedBytes });\n callback(undefined);\n };\n return ReportTransform;\n}(stream.Transform));\nvar FetchHttpClient = /** @class */ (function () {\n function FetchHttpClient() {\n }\n FetchHttpClient.prototype.sendRequest = function (httpRequest) {\n var _a;\n return tslib.__awaiter(this, void 0, void 0, function () {\n var abortController$1, abortListener, formData, requestForm_1, appendFormValue, _i, _b, formKey, formValue, j, contentType, body, onUploadProgress, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, streaming, _c, onDownloadProgress, responseBody, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone;\n var _d;\n return tslib.__generator(this, function (_e) {\n switch (_e.label) {\n case 0:\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\");\n }\n abortController$1 = new abortController.AbortController();\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new abortController.AbortError(\"The operation was aborted.\");\n }\n abortListener = function (event) {\n if (event.type === \"abort\") {\n abortController$1.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n if (httpRequest.timeout) {\n setTimeout(function () {\n abortController$1.abort();\n }, httpRequest.timeout);\n }\n if (httpRequest.formData) {\n formData = httpRequest.formData;\n requestForm_1 = new FormData();\n appendFormValue = function (key, value) {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")) {\n requestForm_1.append(key, value.value, value.options);\n }\n else {\n requestForm_1.append(key, value);\n }\n };\n for (_i = 0, _b = Object.keys(formData); _i < _b.length; _i++) {\n formKey = _b[_i];\n formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n }\n else {\n appendFormValue(formKey, formValue);\n }\n }\n httpRequest.body = requestForm_1;\n httpRequest.formData = undefined;\n contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm_1.getBoundary === \"function\") {\n httpRequest.headers.set(\"Content-Type\", \"multipart/form-data; boundary=\" + requestForm_1.getBoundary());\n }\n else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n onUploadProgress = httpRequest.onUploadProgress;\n uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n }\n else {\n uploadReportStream.end(body);\n }\n body = uploadReportStream;\n }\n return [4 /*yield*/, this.prepareRequest(httpRequest)];\n case 1:\n platformSpecificRequestInit = _e.sent();\n requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: \"manual\" }, platformSpecificRequestInit);\n _e.label = 2;\n case 2:\n _e.trys.push([2, 8, 9, 10]);\n return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)];\n case 3:\n response = _e.sent();\n headers = parseHeaders(response.headers);\n streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) ||\n httpRequest.streamResponseBody;\n _d = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? response.body\n : undefined\n };\n if (!!streaming) return [3 /*break*/, 5];\n return [4 /*yield*/, response.text()];\n case 4:\n _c = _e.sent();\n return [3 /*break*/, 6];\n case 5:\n _c = undefined;\n _e.label = 6;\n case 6:\n operationResponse = (_d.bodyAsText = _c,\n _d);\n onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n responseBody = response.body || undefined;\n if (isReadableStream(responseBody)) {\n downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n }\n else {\n length_1 = parseInt(headers.get(\"Content-Length\")) || undefined;\n if (length_1) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length_1 });\n }\n }\n }\n return [4 /*yield*/, this.processRequest(operationResponse)];\n case 7:\n _e.sent();\n return [2 /*return*/, operationResponse];\n case 8:\n error_1 = _e.sent();\n fetchError = error_1;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest);\n }\n else if (fetchError.type === \"aborted\") {\n throw new abortController.AbortError(\"The operation was aborted.\");\n }\n throw fetchError;\n case 9:\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);\n }\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(function () {\n var _a;\n (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener(\"abort\", abortListener);\n return;\n })\n .catch(function (e) {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n return [7 /*endfinally*/];\n case 10: return [2 /*return*/];\n }\n });\n });\n };\n return FetchHttpClient;\n}());\nfunction isReadableStream(body) {\n return body && typeof body.pipe === \"function\";\n}\nfunction isStreamComplete(stream) {\n return new Promise(function (resolve) {\n stream.on(\"close\", resolve);\n stream.on(\"end\", resolve);\n stream.on(\"error\", resolve);\n });\n}\nfunction parseHeaders(headers) {\n var httpHeaders = new HttpHeaders();\n headers.forEach(function (value, key) {\n httpHeaders.set(key, value);\n });\n return httpHeaders;\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction createProxyAgent(requestUrl, proxySettings, headers) {\n var host = URLBuilder.parse(proxySettings.host).getHost();\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n var tunnelOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {}\n }\n };\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy.proxyAuth = proxySettings.username + \":\" + proxySettings.password;\n }\n var isRequestHttps = isUrlHttps(requestUrl);\n var isProxyHttps = isUrlHttps(proxySettings.host);\n var proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions)\n };\n return proxyAgent;\n}\nfunction isUrlHttps(url) {\n var urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\nfunction createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n }\n else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n }\n else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n }\n else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\nfunction isValidPort(port) {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction getCachedAgent(isHttps, agentCache) {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\nvar NodeFetchHttpClient = /** @class */ (function (_super) {\n tslib.__extends(NodeFetchHttpClient, _super);\n function NodeFetchHttpClient() {\n var _this = _super !== null && _super.apply(this, arguments) || this;\n _this.proxyAgents = {};\n _this.keepAliveAgents = {};\n _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n return _this;\n }\n NodeFetchHttpClient.prototype.getOrCreateAgent = function (httpRequest) {\n var isHttps = isUrlHttps(httpRequest.url);\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n var agent = getCachedAgent(isHttps, this.proxyAgents);\n if (agent) {\n return agent;\n }\n var tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers);\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n this.proxyAgents.httpsAgent = tunnel.agent;\n }\n else {\n this.proxyAgents.httpAgent = tunnel.agent;\n }\n return agent;\n }\n else if (httpRequest.keepAlive) {\n var agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n var agentOptions = {\n keepAlive: httpRequest.keepAlive\n };\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n }\n else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n return agent;\n }\n else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n };\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n NodeFetchHttpClient.prototype.fetch = function (input, init) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, node_fetch(input, init)];\n });\n });\n };\n NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var requestInit, cookieString;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n requestInit = {};\n if (!(this.cookieJar && !httpRequest.headers.get(\"Cookie\"))) return [3 /*break*/, 2];\n return [4 /*yield*/, new Promise(function (resolve, reject) {\n _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) {\n if (err) {\n reject(err);\n }\n else {\n resolve(cookie);\n }\n });\n })];\n case 1:\n cookieString = _a.sent();\n httpRequest.headers.set(\"Cookie\", cookieString);\n _a.label = 2;\n case 2:\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n requestInit.compress = httpRequest.decompressResponse;\n return [2 /*return*/, requestInit];\n }\n });\n });\n };\n NodeFetchHttpClient.prototype.processRequest = function (operationResponse) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var setCookieHeader_1;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!this.cookieJar) return [3 /*break*/, 2];\n setCookieHeader_1 = operationResponse.headers.get(\"Set-Cookie\");\n if (!(setCookieHeader_1 !== undefined)) return [3 /*break*/, 2];\n return [4 /*yield*/, new Promise(function (resolve, reject) {\n _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) {\n if (err) {\n reject(err);\n }\n else {\n resolve();\n }\n });\n })];\n case 1:\n _a.sent();\n _a.label = 2;\n case 2: return [2 /*return*/];\n }\n });\n });\n };\n return NodeFetchHttpClient;\n}(FetchHttpClient));\n\n// Copyright (c) Microsoft Corporation.\n(function (HttpPipelineLogLevel) {\n /**\n * A log level that indicates that no logs will be logged.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"OFF\"] = 0] = \"OFF\";\n /**\n * An error log.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"ERROR\"] = 1] = \"ERROR\";\n /**\n * A warning log.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"WARNING\"] = 2] = \"WARNING\";\n /**\n * An information log.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"INFO\"] = 3] = \"INFO\";\n})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {}));\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nfunction operationOptionsToRequestOptionsBase(opts) {\n var requestOptions = opts.requestOptions, tracingOptions = opts.tracingOptions, additionalOptions = tslib.__rest(opts, [\"requestOptions\", \"tracingOptions\"]);\n var result = additionalOptions;\n if (requestOptions) {\n result = tslib.__assign(tslib.__assign({}, result), requestOptions);\n }\n if (tracingOptions) {\n result.spanOptions = tracingOptions.spanOptions;\n result.tracingContext = tracingOptions.tracingContext;\n }\n return result;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar BaseRequestPolicy = /** @class */ (function () {\n function BaseRequestPolicy(_nextPolicy, _options) {\n this._nextPolicy = _nextPolicy;\n this._options = _options;\n }\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n BaseRequestPolicy.prototype.shouldLog = function (logLevel) {\n return this._options.shouldLog(logLevel);\n };\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n BaseRequestPolicy.prototype.log = function (logLevel, message) {\n this._options.log(logLevel, message);\n };\n return BaseRequestPolicy;\n}());\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nvar RequestPolicyOptions = /** @class */ (function () {\n function RequestPolicyOptions(_logger) {\n this._logger = _logger;\n }\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n RequestPolicyOptions.prototype.shouldLog = function (logLevel) {\n return (!!this._logger &&\n logLevel !== exports.HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel);\n };\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n RequestPolicyOptions.prototype.log = function (logLevel, message) {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n };\n return RequestPolicyOptions;\n}());\n\n// Copyright (c) Microsoft Corporation.\nfunction logPolicy(loggingOptions) {\n if (loggingOptions === void 0) { loggingOptions = {}; }\n return {\n create: function (nextPolicy, options) {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n }\n };\n}\nvar LogPolicy = /** @class */ (function (_super) {\n tslib.__extends(LogPolicy, _super);\n function LogPolicy(nextPolicy, options, _a) {\n var _b = _a === void 0 ? {} : _a, _c = _b.logger, logger$1 = _c === void 0 ? logger.info : _c, _d = _b.allowedHeaderNames, allowedHeaderNames = _d === void 0 ? [] : _d, _e = _b.allowedQueryParameters, allowedQueryParameters = _e === void 0 ? [] : _e;\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.logger = logger$1;\n _this.sanitizer = new Sanitizer({ allowedHeaderNames: allowedHeaderNames, allowedQueryParameters: allowedQueryParameters });\n return _this;\n }\n Object.defineProperty(LogPolicy.prototype, \"allowedHeaderNames\", {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n get: function () {\n return this.sanitizer.allowedHeaderNames;\n },\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n set: function (allowedHeaderNames) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(LogPolicy.prototype, \"allowedQueryParameters\", {\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n get: function () {\n return this.sanitizer.allowedQueryParameters;\n },\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n set: function (allowedQueryParameters) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n },\n enumerable: false,\n configurable: true\n });\n LogPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n if (!this.logger.enabled)\n return this._nextPolicy.sendRequest(request);\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then(function (response) { return _this.logResponse(response); });\n };\n LogPolicy.prototype.logRequest = function (request) {\n this.logger(\"Request: \" + this.sanitizer.sanitize(request));\n };\n LogPolicy.prototype.logResponse = function (response) {\n this.logger(\"Response status code: \" + response.status);\n this.logger(\"Headers: \" + this.sanitizer.sanitize(response.headers));\n return response;\n };\n return LogPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nfunction getPathStringFromParameter(parameter) {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\nfunction getPathStringFromParameterPath(parameterPath, mapper) {\n var result;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n }\n else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n }\n else {\n result = mapper.serializedName;\n }\n return result;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nfunction getStreamResponseStatusCodes(operationSpec) {\n var result = new Set();\n for (var statusCode in operationSpec.responses) {\n var operationResponse = operationSpec.responses[statusCode];\n if (operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n\n// Copyright (c) Microsoft Corporation.\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nvar xml2jsDefaultOptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\"\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false\n};\n// The xml2js settings for general XML parsing operations.\nvar xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n// The xml2js settings for general XML building operations.\nvar xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false\n};\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nfunction stringifyXML(obj, opts) {\n var _a;\n if (opts === void 0) { opts = {}; }\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY;\n var builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nfunction parseXML(str, opts) {\n var _a;\n if (opts === void 0) { opts = {}; }\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY;\n var xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise(function (resolve, reject) {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n }\n else {\n xmlParser.parseString(str, function (err, res) {\n if (err) {\n reject(err);\n }\n else {\n resolve(res);\n }\n });\n }\n });\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nfunction deserializationPolicy(deserializationContentTypes, parsingOptions) {\n return {\n create: function (nextPolicy, options) {\n return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions);\n }\n };\n}\nvar defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nvar defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\nvar DefaultDeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes\n }\n};\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nvar DeserializationPolicy = /** @class */ (function (_super) {\n tslib.__extends(DeserializationPolicy, _super);\n function DeserializationPolicy(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions) {\n if (parsingOptions === void 0) { parsingOptions = {}; }\n var _a;\n var _this = _super.call(this, nextPolicy, requestPolicyOptions) || this;\n _this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n _this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n _this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY;\n return _this;\n }\n DeserializationPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, this._nextPolicy.sendRequest(request).then(function (response) {\n return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response, {\n xmlCharKey: _this.xmlCharKey\n });\n })];\n });\n });\n };\n return DeserializationPolicy;\n}(BaseRequestPolicy));\nfunction getOperationResponse(parsedResponse) {\n var result;\n var request = parsedResponse.request;\n var operationSpec = request.operationSpec;\n if (operationSpec) {\n var operationResponseGetter = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n }\n else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\nfunction shouldDeserializeResponse(parsedResponse) {\n var shouldDeserialize = parsedResponse.request.shouldDeserialize;\n var result;\n if (shouldDeserialize === undefined) {\n result = true;\n }\n else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n }\n else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\nfunction deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options) {\n var _a, _b, _c;\n if (options === void 0) { options = {}; }\n var updatedOptions = {\n rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : \"\",\n includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false,\n xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(function (parsedResponse) {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n var operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n var responseSpec = getOperationResponse(parsedResponse);\n var _a = handleErrorResponse(parsedResponse, operationSpec, responseSpec), error = _a.error, shouldReturnResponse = _a.shouldReturnResponse;\n if (error) {\n throw error;\n }\n else if (shouldReturnResponse) {\n return parsedResponse;\n }\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n var valueToDeserialize = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, \"operationRes.parsedBody\", options);\n }\n catch (innerError) {\n var restError = new RestError(\"Error \" + innerError + \" occurred in deserializing the responseBody - \" + parsedResponse.bodyAsText, undefined, parsedResponse.status, parsedResponse.request, parsedResponse);\n throw restError;\n }\n }\n else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), \"operationRes.parsedHeaders\", options);\n }\n }\n return parsedResponse;\n });\n}\nfunction isOperationSpecEmpty(operationSpec) {\n var expectedStatusCodes = Object.keys(operationSpec.responses);\n return (expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\"));\n}\nfunction handleErrorResponse(parsedResponse, operationSpec, responseSpec) {\n var _a;\n var isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n var isExpectedStatusCode = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n }\n else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n var errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default;\n var streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) ||\n parsedResponse.request.streamResponseBody;\n var initialErrorMessage = streaming\n ? \"Unexpected status code: \" + parsedResponse.status\n : parsedResponse.bodyAsText;\n var error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse);\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n var defaultBodyMapper = errorResponseSpec.bodyMapper;\n var defaultHeadersMapper = errorResponseSpec.headersMapper;\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n var parsedBody = parsedResponse.parsedBody;\n var parsedError = void 0;\n if (defaultBodyMapper) {\n var valueToDeserialize = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName] : [];\n }\n parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, \"error.response.parsedBody\");\n }\n var internalError = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n if (defaultBodyMapper) {\n error.response.parsedBody = parsedError;\n }\n }\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.rawHeaders(), \"operationRes.parsedHeaders\");\n }\n }\n catch (defaultError) {\n error.message = \"Error \\\"\" + defaultError.message + \"\\\" occurred in deserializing the responseBody - \\\"\" + parsedResponse.bodyAsText + \"\\\" for the default response.\";\n }\n return { error: error, shouldReturnResponse: false };\n}\nfunction parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) {\n var _a;\n var errorHandler = function (err) {\n var msg = \"Error \\\"\" + err + \"\\\" occurred while parsing the response body - \" + operationResponse.bodyAsText + \".\";\n var errCode = err.code || RestError.PARSE_ERROR;\n var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse);\n return Promise.reject(e);\n };\n var streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n var text_1 = operationResponse.bodyAsText;\n var contentType = operationResponse.headers.get(\"Content-Type\") || \"\";\n var contentComponents = !contentType\n ? []\n : contentType.split(\";\").map(function (component) { return component.toLowerCase(); });\n if (contentComponents.length === 0 ||\n contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) {\n return new Promise(function (resolve) {\n operationResponse.parsedBody = JSON.parse(text_1);\n resolve(operationResponse);\n }).catch(errorHandler);\n }\n else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) {\n return parseXML(text_1, opts)\n .then(function (body) {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n return Promise.resolve(operationResponse);\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nvar DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nvar DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nvar DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\nfunction isNumber(n) {\n return typeof n === \"number\";\n}\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(retryLimit, predicate, retryData, response, error) {\n if (!predicate(response, error)) {\n return false;\n }\n return retryData.retryCount < retryLimit;\n}\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nfunction updateRetryData(retryOptions, retryData, err) {\n if (retryData === void 0) { retryData = { retryCount: 0, retryInterval: 0 }; }\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n retryData.error = err;\n }\n // Adjust retry count\n retryData.retryCount++;\n // Adjust retry interval\n var incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n var boundedRandDelta = retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval);\n return retryData;\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) {\n return {\n create: function (nextPolicy, options) {\n return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval);\n }\n };\n}\n(function (RetryMode) {\n RetryMode[RetryMode[\"Exponential\"] = 0] = \"Exponential\";\n})(exports.RetryMode || (exports.RetryMode = {}));\nvar DefaultRetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL\n};\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nvar ExponentialRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(ExponentialRetryPolicy, _super);\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n _this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n return _this;\n }\n ExponentialRetryPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request.clone())\n .then(function (response) { return retry(_this, request, response); })\n .catch(function (error) { return retry(_this, request, error.response, undefined, error); });\n };\n return ExponentialRetryPolicy;\n}(BaseRequestPolicy));\nfunction retry(policy, request, response, retryData, requestError) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n function shouldPolicyRetry(responseParam) {\n var statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status;\n if (statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505) {\n return false;\n }\n return true;\n }\n var isAborted, res, err_1, err;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n retryData = updateRetryData({\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval\n }, retryData, requestError);\n isAborted = request.abortSignal && request.abortSignal.aborted;\n if (!(!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response))) return [3 /*break*/, 6];\n logger.info(\"Retrying request in \" + retryData.retryInterval);\n _a.label = 1;\n case 1:\n _a.trys.push([1, 4, , 5]);\n return [4 /*yield*/, delay(retryData.retryInterval)];\n case 2:\n _a.sent();\n return [4 /*yield*/, policy._nextPolicy.sendRequest(request.clone())];\n case 3:\n res = _a.sent();\n return [2 /*return*/, retry(policy, request, res, retryData)];\n case 4:\n err_1 = _a.sent();\n return [2 /*return*/, retry(policy, request, response, retryData, err_1)];\n case 5: return [3 /*break*/, 7];\n case 6:\n if (isAborted || requestError || !response) {\n err = retryData.error ||\n new RestError(\"Failed to send the request.\", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response);\n throw err;\n }\n else {\n return [2 /*return*/, response];\n }\n case 7: return [2 /*return*/];\n }\n });\n });\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction generateClientRequestIdPolicy(requestIdHeaderName) {\n if (requestIdHeaderName === void 0) { requestIdHeaderName = \"x-ms-client-request-id\"; }\n return {\n create: function (nextPolicy, options) {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n }\n };\n}\nvar GenerateClientRequestIdPolicy = /** @class */ (function (_super) {\n tslib.__extends(GenerateClientRequestIdPolicy, _super);\n function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this._requestIdHeaderName = _requestIdHeaderName;\n return _this;\n }\n GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n };\n return GenerateClientRequestIdPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction getDefaultUserAgentKey() {\n return Constants.HeaderConstants.USER_AGENT;\n}\nfunction getPlatformSpecificData() {\n var runtimeInfo = {\n key: \"Node\",\n value: process.version\n };\n var osInfo = {\n key: \"OS\",\n value: \"(\" + os.arch() + \"-\" + os.type() + \"-\" + os.release() + \")\"\n };\n return [runtimeInfo, osInfo];\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction getRuntimeInfo() {\n var msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion\n };\n return [msRestRuntime];\n}\nfunction getUserAgentString(telemetryInfo, keySeparator, valueSeparator) {\n if (keySeparator === void 0) { keySeparator = \" \"; }\n if (valueSeparator === void 0) { valueSeparator = \"/\"; }\n return telemetryInfo\n .map(function (info) {\n var value = info.value ? \"\" + valueSeparator + info.value : \"\";\n return \"\" + info.key + value;\n })\n .join(keySeparator);\n}\nvar getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\nfunction getDefaultUserAgentValue() {\n var runtimeInfo = getRuntimeInfo();\n var platformSpecificData = getPlatformSpecificData();\n var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\nfunction userAgentPolicy(userAgentData) {\n var key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n var value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n return {\n create: function (nextPolicy, options) {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n }\n };\n}\nvar UserAgentPolicy = /** @class */ (function (_super) {\n tslib.__extends(UserAgentPolicy, _super);\n function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) {\n var _this = _super.call(this, _nextPolicy, _options) || this;\n _this._nextPolicy = _nextPolicy;\n _this._options = _options;\n _this.headerKey = headerKey;\n _this.headerValue = headerValue;\n return _this;\n }\n UserAgentPolicy.prototype.sendRequest = function (request) {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n };\n UserAgentPolicy.prototype.addUserAgentHeader = function (request) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n };\n return UserAgentPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nvar allowedRedirect = [\"GET\", \"HEAD\"];\nvar DefaultRedirectOptions = {\n handleRedirects: true,\n maxRetries: 20\n};\nfunction redirectPolicy(maximumRetries) {\n if (maximumRetries === void 0) { maximumRetries = 20; }\n return {\n create: function (nextPolicy, options) {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n }\n };\n}\nvar RedirectPolicy = /** @class */ (function (_super) {\n tslib.__extends(RedirectPolicy, _super);\n function RedirectPolicy(nextPolicy, options, maxRetries) {\n if (maxRetries === void 0) { maxRetries = 20; }\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.maxRetries = maxRetries;\n return _this;\n }\n RedirectPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request)\n .then(function (response) { return handleRedirect(_this, response, 0); });\n };\n return RedirectPolicy;\n}(BaseRequestPolicy));\nfunction handleRedirect(policy, response, currentRetries) {\n var request = response.request, status = response.status;\n var locationHeader = response.headers.get(\"location\");\n if (locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)) {\n var builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n return policy._nextPolicy\n .sendRequest(request)\n .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); });\n }\n return Promise.resolve(response);\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction rpRegistrationPolicy(retryTimeout) {\n if (retryTimeout === void 0) { retryTimeout = 30; }\n return {\n create: function (nextPolicy, options) {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n }\n };\n}\nvar RPRegistrationPolicy = /** @class */ (function (_super) {\n tslib.__extends(RPRegistrationPolicy, _super);\n function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) {\n if (_retryTimeout === void 0) { _retryTimeout = 30; }\n var _this = _super.call(this, nextPolicy, options) || this;\n _this._retryTimeout = _retryTimeout;\n return _this;\n }\n RPRegistrationPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request.clone())\n .then(function (response) { return registerIfNeeded(_this, request, response); });\n };\n return RPRegistrationPolicy;\n}(BaseRequestPolicy));\nfunction registerIfNeeded(policy, request, response) {\n if (response.status === 409) {\n var rpName = checkRPNotRegisteredError(response.bodyAsText);\n if (rpName) {\n var urlPrefix = extractSubscriptionUrl(request.url);\n return (registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(function () { return false; })\n .then(function (registrationStatus) {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n }));\n }\n }\n return Promise.resolve(response);\n}\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(originalRequest, reuseUrlToo) {\n if (reuseUrlToo === void 0) { reuseUrlToo = false; }\n var reqOptions = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", generateUuid());\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n return reqOptions;\n}\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body) {\n var result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n }\n catch (err) {\n // do nothing;\n }\n if (responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\") {\n var matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url) {\n var result;\n var matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n }\n else {\n throw new Error(\"Unable to extract subscriptionId from the given url - \" + url + \".\");\n }\n return result;\n}\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param callback - The callback that handles the RP registration\n */\nfunction registerRP(policy, urlPrefix, provider, originalRequest) {\n var postUrl = urlPrefix + \"providers/\" + provider + \"/register?api-version=2016-02-01\";\n var getUrl = urlPrefix + \"providers/\" + provider + \"?api-version=2016-02-01\";\n var reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n return policy._nextPolicy.sendRequest(reqOptions).then(function (response) {\n if (response.status !== 200) {\n throw new Error(\"Autoregistration of \" + provider + \" failed. Please try registering manually.\");\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nfunction getRegistrationStatus(policy, url, originalRequest) {\n var reqOptions = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n return policy._nextPolicy.sendRequest(reqOptions).then(function (res) {\n var obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n }\n else {\n return delay(policy._retryTimeout * 1000)\n .then(function () { return getRegistrationStatus(policy, url, originalRequest); });\n }\n });\n}\n\n// Copyright (c) Microsoft Corporation.\n// Default options for the cycler if none are provided\nvar DEFAULT_CYCLER_OPTIONS = {\n forcedRefreshWindowInMs: 1000,\n retryIntervalInMs: 3000,\n refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry\n};\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nfunction beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n function tryGetAccessToken() {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, finalToken;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!(Date.now() < timeoutInMs)) return [3 /*break*/, 5];\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, , 4]);\n return [4 /*yield*/, getAccessToken()];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n _a = _b.sent();\n return [2 /*return*/, null];\n case 4: return [3 /*break*/, 7];\n case 5: return [4 /*yield*/, getAccessToken()];\n case 6:\n finalToken = _b.sent();\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n return [2 /*return*/, finalToken];\n case 7: return [2 /*return*/];\n }\n });\n });\n }\n var token;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, tryGetAccessToken()];\n case 1:\n token = _a.sent();\n _a.label = 2;\n case 2:\n if (!(token === null)) return [3 /*break*/, 5];\n return [4 /*yield*/, delay(retryIntervalInMs)];\n case 3:\n _a.sent();\n return [4 /*yield*/, tryGetAccessToken()];\n case 4:\n token = _a.sent();\n return [3 /*break*/, 2];\n case 5: return [2 /*return*/, token];\n }\n });\n });\n}\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(credential, scopes, tokenCyclerOptions) {\n var _this = this;\n var refreshWorker = null;\n var token = null;\n var options = tslib.__assign(tslib.__assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions);\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n var cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing() {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh() {\n var _a;\n return (!cycler.isRefreshing &&\n ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now());\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh() {\n return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now());\n }\n };\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions) {\n var _a;\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n var tryGetAccessToken = function () {\n return credential.getToken(scopes, getTokenOptions);\n };\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, \n // If we don't have a token, then we should timeout immediately\n (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now())\n .then(function (_token) {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch(function (reason) {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n return refreshWorker;\n }\n return function (tokenOptions) { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n if (cycler.mustRefresh)\n return [2 /*return*/, refresh(tokenOptions)];\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n return [2 /*return*/, token];\n });\n }); };\n}\n// #endregion\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nfunction bearerTokenAuthenticationPolicy(credential, scopes) {\n // This simple function encapsulates the entire process of reliably retrieving the token\n var getToken = createTokenCycler(credential, scopes /* , options */);\n var BearerTokenAuthenticationPolicy = /** @class */ (function (_super) {\n tslib.__extends(BearerTokenAuthenticationPolicy, _super);\n function BearerTokenAuthenticationPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n BearerTokenAuthenticationPolicy.prototype.sendRequest = function (webResource) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var token;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n spanOptions: webResource.spanOptions,\n tracingContext: webResource.tracingContext\n }\n })];\n case 1:\n token = (_a.sent()).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, \"Bearer \" + token);\n return [2 /*return*/, this._nextPolicy.sendRequest(webResource)];\n }\n });\n });\n };\n return BearerTokenAuthenticationPolicy;\n }(BaseRequestPolicy));\n return {\n create: function (nextPolicy, options) {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n }\n };\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) {\n return {\n create: function (nextPolicy, options) {\n return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval);\n }\n };\n}\n/**\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nvar SystemErrorRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(SystemErrorRetryPolicy, _super);\n function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n _this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n _this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n return _this;\n }\n SystemErrorRetryPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch(function (error) { return retry$1(_this, request, error.response, error); });\n };\n return SystemErrorRetryPolicy;\n}(BaseRequestPolicy));\nfunction retry$1(policy, request, operationResponse, err, retryData) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n function shouldPolicyRetry(_response, error) {\n if (error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")) {\n return true;\n }\n return false;\n }\n var nestedErr_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n retryData = updateRetryData(policy, retryData, err);\n if (!shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) return [3 /*break*/, 5];\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n return [4 /*yield*/, delay(retryData.retryInterval)];\n case 2:\n _a.sent();\n return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())];\n case 3:\n nestedErr_1 = _a.sent();\n return [2 /*return*/, retry$1(policy, request, operationResponse, nestedErr_1, retryData)];\n case 4: return [3 /*break*/, 6];\n case 5:\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return [2 /*return*/, Promise.reject(retryData.error)];\n }\n return [2 /*return*/, operationResponse];\n case 6: return [2 /*return*/];\n }\n });\n });\n}\n\n// Copyright (c) Microsoft Corporation.\n(function (QueryCollectionFormat) {\n QueryCollectionFormat[\"Csv\"] = \",\";\n QueryCollectionFormat[\"Ssv\"] = \" \";\n QueryCollectionFormat[\"Tsv\"] = \"\\t\";\n QueryCollectionFormat[\"Pipes\"] = \"|\";\n QueryCollectionFormat[\"Multi\"] = \"Multi\";\n})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {}));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * @internal\n */\nvar noProxyList = loadNoProxy();\nvar byPassedList = new Map();\nfunction loadEnvironmentProxyValue() {\n if (!process) {\n return undefined;\n }\n var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n var allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n return httpsProxy || allProxy || httpProxy;\n}\n// Check whether the host of a given `uri` is in the noProxyList.\n// If there's a match, any request sent to the same host won't have the proxy settings set.\n// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\nfunction isBypassed(uri) {\n if (noProxyList.length === 0) {\n return false;\n }\n var host = URLBuilder.parse(uri).getHost();\n if (byPassedList.has(host)) {\n return byPassedList.get(host);\n }\n var isBypassedFlag = false;\n for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) {\n var pattern = noProxyList_1[_i];\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n }\n else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n }\n else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n byPassedList.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n/**\n * @internal\n */\nfunction loadNoProxy() {\n var noProxy = getEnvironmentValue(Constants.NO_PROXY);\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map(function (item) { return item.trim(); })\n .filter(function (item) { return item.length; });\n }\n return [];\n}\nfunction getDefaultProxySettings(proxyUrl) {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth;\n var parsedUrl = URLBuilder.parse(urlWithoutAuth);\n var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username: username,\n password: password\n };\n}\nfunction proxyPolicy(proxySettings) {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n return {\n create: function (nextPolicy, options) {\n return new ProxyPolicy(nextPolicy, options, proxySettings);\n }\n };\n}\nfunction extractAuthFromUrl(url) {\n var atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n var schemeIndex = url.indexOf(\"://\");\n var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n var auth = url.substring(authStart, atIndex);\n var colonIndex = auth.indexOf(\":\");\n var hasPassword = colonIndex !== -1;\n var username = hasPassword ? auth.substring(0, colonIndex) : auth;\n var password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username: username,\n password: password,\n urlWithoutAuth: urlWithoutAuth\n };\n}\nvar ProxyPolicy = /** @class */ (function (_super) {\n tslib.__extends(ProxyPolicy, _super);\n function ProxyPolicy(nextPolicy, options, proxySettings) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.proxySettings = proxySettings;\n return _this;\n }\n ProxyPolicy.prototype.sendRequest = function (request) {\n if (!request.proxySettings && !isBypassed(request.url)) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n };\n return ProxyPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar StatusCodes = Constants.HttpConstants.StatusCodes;\nfunction throttlingRetryPolicy() {\n return {\n create: function (nextPolicy, options) {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n }\n };\n}\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nvar ThrottlingRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(ThrottlingRetryPolicy, _super);\n function ThrottlingRetryPolicy(nextPolicy, options, _handleResponse) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this._handleResponse = _handleResponse || _this._defaultResponseHandler;\n return _this;\n }\n ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) {\n if (response.status !== StatusCodes.TooManyRequests) {\n return response;\n }\n else {\n return _this._handleResponse(httpRequest, response);\n }\n })];\n });\n });\n };\n ThrottlingRetryPolicy.prototype._defaultResponseHandler = function (httpRequest, httpResponse) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var retryAfterHeader, delayInMs;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER);\n if (retryAfterHeader) {\n delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n return [2 /*return*/, delay(delayInMs).then(function (_) { return _this._nextPolicy.sendRequest(httpRequest); })];\n }\n }\n return [2 /*return*/, httpResponse];\n });\n });\n };\n ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) {\n var retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n }\n else {\n return retryAfterInSeconds * 1000;\n }\n };\n ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) {\n try {\n var now = Date.now();\n var date = Date.parse(headerValue);\n var diff = date - now;\n return Number.isNaN(diff) ? undefined : diff;\n }\n catch (error) {\n return undefined;\n }\n };\n return ThrottlingRetryPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction signingPolicy(authenticationProvider) {\n return {\n create: function (nextPolicy, options) {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n }\n };\n}\nvar SigningPolicy = /** @class */ (function (_super) {\n tslib.__extends(SigningPolicy, _super);\n function SigningPolicy(nextPolicy, options, authenticationProvider) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.authenticationProvider = authenticationProvider;\n return _this;\n }\n SigningPolicy.prototype.signRequest = function (request) {\n return this.authenticationProvider.signRequest(request);\n };\n SigningPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this.signRequest(request).then(function (nextRequest) {\n return _this._nextPolicy.sendRequest(nextRequest);\n });\n };\n return SigningPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar DefaultKeepAliveOptions = {\n enable: true\n};\nfunction keepAlivePolicy(keepAliveOptions) {\n return {\n create: function (nextPolicy, options) {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n }\n };\n}\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nvar KeepAlivePolicy = /** @class */ (function (_super) {\n tslib.__extends(KeepAlivePolicy, _super);\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n function KeepAlivePolicy(nextPolicy, options, keepAliveOptions) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.keepAliveOptions = keepAliveOptions;\n return _this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n KeepAlivePolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n request.keepAlive = this.keepAliveOptions.enable;\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return KeepAlivePolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar createSpan = coreTracing.createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\"\n});\nfunction tracingPolicy(tracingOptions) {\n if (tracingOptions === void 0) { tracingOptions = {}; }\n return {\n create: function (nextPolicy, options) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n }\n };\n}\nvar TracingPolicy = /** @class */ (function (_super) {\n tslib.__extends(TracingPolicy, _super);\n function TracingPolicy(nextPolicy, options, tracingOptions) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.userAgent = tracingOptions.userAgent;\n return _this;\n }\n TracingPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var path, span, spanContext, traceParentHeader, traceState, response, serviceRequestId, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!request.tracingContext) {\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n }\n path = URLBuilder.parse(request.url).getPath() || \"/\";\n span = createSpan(path, {\n tracingOptions: {\n spanOptions: tslib.__assign(tslib.__assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }),\n tracingContext: request.tracingContext\n }\n }).span;\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId\n });\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n spanContext = span.context();\n traceParentHeader = coreTracing.getTraceParentHeader(spanContext);\n if (traceParentHeader) {\n request.headers.set(\"traceparent\", traceParentHeader);\n traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return [4 /*yield*/, this._nextPolicy.sendRequest(request)];\n case 2:\n response = _a.sent();\n span.setAttribute(\"http.status_code\", response.status);\n serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.end();\n return [2 /*return*/, response];\n case 3:\n err_1 = _a.sent();\n span.end();\n throw err_1;\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n return TracingPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nfunction disableResponseDecompressionPolicy() {\n return {\n create: function (nextPolicy, options) {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n }\n };\n}\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nvar DisableResponseDecompressionPolicy = /** @class */ (function (_super) {\n tslib.__extends(DisableResponseDecompressionPolicy, _super);\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n function DisableResponseDecompressionPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n DisableResponseDecompressionPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n request.decompressResponse = false;\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return DisableResponseDecompressionPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction ndJsonPolicy() {\n return {\n create: function (nextPolicy, options) {\n return new NdJsonPolicy(nextPolicy, options);\n }\n };\n}\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nvar NdJsonPolicy = /** @class */ (function (_super) {\n tslib.__extends(NdJsonPolicy, _super);\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n function NdJsonPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n /**\n * Sends a request.\n */\n NdJsonPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var body;\n return tslib.__generator(this, function (_a) {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map(function (item) { return JSON.stringify(item) + \"\\n\"; }).join(\"\");\n }\n }\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return NdJsonPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar cachedHttpClient;\nfunction getCachedDefaultHttpClient() {\n if (!cachedHttpClient) {\n cachedHttpClient = new NodeFetchHttpClient();\n }\n return cachedHttpClient;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ServiceClient sends service requests and receives responses.\n */\nvar ServiceClient = /** @class */ (function () {\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n function ServiceClient(credentials, \n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options) {\n var _this = this;\n if (!options) {\n options = {};\n }\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n var requestPolicyFactories;\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n }\n else {\n var authPolicyFactory = undefined;\n if (coreAuth.isTokenCredential(credentials)) {\n logger.info(\"ServiceClient: creating bearer token authentication policy from provided credentials\");\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n var wrappedPolicyFactory = function () {\n var bearerTokenPolicyFactory = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n var serviceClient = _this;\n var serviceClientOptions = options;\n return {\n create: function (nextPolicy, createOptions) {\n var credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri);\n if (!credentialScopes) {\n throw new Error(\"When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy\");\n }\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes);\n }\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n }\n };\n };\n authPolicyFactory = wrappedPolicyFactory();\n }\n else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n }\n else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n /**\n * Send the provided httpRequest.\n */\n ServiceClient.prototype.sendRequest = function (options) {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n var httpRequest;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n }\n else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n }\n catch (error) {\n return Promise.reject(error);\n }\n var httpPipeline = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions);\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n };\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) {\n var _a;\n return tslib.__awaiter(this, void 0, void 0, function () {\n var serializerOptions, httpRequest, result, baseUri, requestUrl, _i, _b, urlParameter, urlParameterValue, _c, _d, queryParameter, queryParameterValue, index, item, index, contentType, _e, _f, headerParameter, headerValue, headerCollectionPrefix, _g, _h, key, options, customHeaderName, rawResponse, sendRequestError, error_1, error_2, cb;\n return tslib.__generator(this, function (_j) {\n switch (_j.label) {\n case 0:\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions;\n httpRequest = new WebResource();\n _j.label = 1;\n case 1:\n _j.trys.push([1, 6, , 7]);\n baseUri = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\");\n }\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n requestUrl = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (_i = 0, _b = operationSpec.urlParameters; _i < _b.length; _i++) {\n urlParameter = _b[_i];\n urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer);\n urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions);\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\"{\" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + \"}\", urlParameterValue);\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (_c = 0, _d = operationSpec.queryParameters; _c < _d.length; _c++) {\n queryParameter = _d[_c];\n queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer);\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions);\n if (queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null) {\n if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n }\n else {\n for (index in queryParameterValue) {\n item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n }\n else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (index in queryParameterValue) {\n if (queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n }\n else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue);\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n if (operationSpec.headerParameters) {\n for (_e = 0, _f = operationSpec.headerParameters; _e < _f.length; _e++) {\n headerParameter = _f[_e];\n headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer);\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions);\n headerCollectionPrefix = headerParameter.mapper\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (_g = 0, _h = Object.keys(headerValue); _g < _h.length; _g++) {\n key = _h[_g];\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n }\n else {\n httpRequest.headers.set(headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter), headerValue);\n }\n }\n }\n }\n options = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n if (options.spanOptions) {\n httpRequest.spanOptions = options.spanOptions;\n }\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n httpRequest.withCredentials = this._withCredentials;\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n rawResponse = void 0;\n sendRequestError = void 0;\n _j.label = 2;\n case 2:\n _j.trys.push([2, 4, , 5]);\n return [4 /*yield*/, this.sendRequest(httpRequest)];\n case 3:\n rawResponse = _j.sent();\n return [3 /*break*/, 5];\n case 4:\n error_1 = _j.sent();\n sendRequestError = error_1;\n return [3 /*break*/, 5];\n case 5:\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]);\n }\n result = Promise.reject(sendRequestError);\n }\n else {\n result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status]));\n }\n return [3 /*break*/, 7];\n case 6:\n error_2 = _j.sent();\n result = Promise.reject(error_2);\n return [3 /*break*/, 7];\n case 7:\n cb = callback;\n if (cb) {\n result\n .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); })\n .catch(function (err) { return cb(err); });\n }\n return [2 /*return*/, result];\n }\n });\n });\n };\n return ServiceClient;\n}());\nfunction serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) {\n var _a, _b, _c, _d, _e, _f;\n var serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {};\n var updatedOptions = {\n rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : \"\",\n includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false,\n xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY\n };\n var xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer);\n var bodyMapper = operationSpec.requestBody.mapper;\n var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName, xmlNamespace = bodyMapper.xmlNamespace, xmlNamespacePrefix = bodyMapper.xmlNamespacePrefix;\n var typeName = bodyMapper.type.name;\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody);\n httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions);\n var isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n var xmlnsKey = xmlNamespacePrefix ? \"xmlns:\" + xmlNamespacePrefix : \"xmlns\";\n var value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions);\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), {\n rootName: xmlName || serializedName,\n xmlCharKey: xmlCharKey\n });\n }\n else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey: xmlCharKey\n });\n }\n }\n else if (typeName === MapperType.String &&\n (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match(\"text/plain\")) || operationSpec.mediaType === \"text\")) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n }\n else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n }\n catch (error) {\n throw new Error(\"Error \\\"\" + error.message + \"\\\" occurred in serializing the payload - \" + JSON.stringify(serializedName, undefined, \" \") + \".\");\n }\n }\n else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (var _i = 0, _g = operationSpec.formDataParameters; _i < _g.length; _i++) {\n var formDataParameter = _g[_i];\n var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer);\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions);\n }\n }\n }\n}\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) {\n var _a;\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n var result = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = xmlNamespace, _a);\n return result;\n }\n return serializedValue;\n}\nfunction getValueOrFunctionResult(value, defaultValueCreator) {\n var result;\n if (typeof value === \"string\") {\n result = value;\n }\n else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\nfunction createDefaultRequestPolicyFactories(authPolicyFactory, options) {\n var factories = [];\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName);\n var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue);\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n {\n factories.push(proxyPolicy(options.proxySettings));\n }\n factories.push(logPolicy({ logger: logger.info }));\n return factories;\n}\nfunction createPipelineFromOptions(pipelineOptions, authPolicyFactory) {\n var requestPolicyFactories = [];\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n var userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n var userAgentInfo = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n var defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n userAgentValue = userAgentInfo.join(\" \");\n }\n var keepAliveOptions = tslib.__assign(tslib.__assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);\n var retryOptions = tslib.__assign(tslib.__assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);\n var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);\n {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n var deserializationOptions = tslib.__assign(tslib.__assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);\n var loggingOptions = tslib.__assign({}, pipelineOptions.loggingOptions);\n requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs));\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n requestPolicyFactories.push(logPolicy(loggingOptions));\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories: requestPolicyFactories\n };\n}\nfunction getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) {\n return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer);\n}\nfunction getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) {\n var _a;\n var value;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n var serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n }\n else {\n var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath);\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n var useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n // Serialize just for validation purposes.\n var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper);\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n }\n else {\n if (parameterMapper.required) {\n value = {};\n }\n for (var propertyName in parameterPath) {\n var propertyMapper = parameterMapper.type.modelProperties[propertyName];\n var propertyPath = parameterPath[propertyName];\n var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer);\n // Serialize just for validation purposes.\n var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper);\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\nfunction getPropertyFromParameterPath(parent, parameterPath) {\n var result = { propertyFound: false };\n var i = 0;\n for (; i < parameterPath.length; ++i) {\n var parameterPathPart = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n }\n else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\nfunction flattenResponse(_response, responseSpec) {\n var parsedHeaders = _response.parsedHeaders;\n var bodyMapper = responseSpec && responseSpec.bodyMapper;\n var addOperationResponse = function (obj) {\n return Object.defineProperty(obj, \"_response\", {\n value: _response\n });\n };\n if (bodyMapper) {\n var typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody }));\n }\n var modelProperties_1 = (typeName === \"Composite\" && bodyMapper.type.modelProperties) || {};\n var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === \"\"; });\n if (typeName === \"Sequence\" || isPageableResponse) {\n var arrayResponse = tslib.__spreadArray([], (_response.parsedBody || []));\n for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) {\n var key = _a[_i];\n if (modelProperties_1[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n if (parsedHeaders) {\n for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) {\n var key = _c[_b];\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody));\n }\n }\n if (bodyMapper ||\n _response.request.method === \"HEAD\" ||\n isPrimitiveType(_response.parsedBody)) {\n // primitive body types and HEAD booleans\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody }));\n }\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody));\n}\nfunction getCredentialScopes(options, baseUri) {\n if (options === null || options === void 0 ? void 0 : options.credentialScopes) {\n var scopes = options.credentialScopes;\n return Array.isArray(scopes)\n ? scopes.map(function (scope) { return new url.URL(scope).toString(); })\n : new url.URL(scopes).toString();\n }\n if (baseUri) {\n return baseUri + \"/.default\";\n }\n return undefined;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nfunction createSpanFunction(args) {\n return coreTracing.createSpanFunction(args);\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Defines the default token refresh buffer duration.\n */\nvar TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nvar ExpiringAccessTokenCache = /** @class */ (function () {\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n function ExpiringAccessTokenCache(tokenRefreshBufferMs) {\n if (tokenRefreshBufferMs === void 0) { tokenRefreshBufferMs = TokenRefreshBufferMs; }\n this.cachedToken = undefined;\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n ExpiringAccessTokenCache.prototype.setCachedToken = function (accessToken) {\n this.cachedToken = accessToken;\n };\n ExpiringAccessTokenCache.prototype.getCachedToken = function () {\n if (this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) {\n this.cachedToken = undefined;\n }\n return this.cachedToken;\n };\n return ExpiringAccessTokenCache;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nvar AccessTokenRefresher = /** @class */ (function () {\n function AccessTokenRefresher(credential, scopes, requiredMillisecondsBeforeNewRefresh) {\n if (requiredMillisecondsBeforeNewRefresh === void 0) { requiredMillisecondsBeforeNewRefresh = 30000; }\n this.credential = credential;\n this.scopes = scopes;\n this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh;\n this.lastCalled = 0;\n }\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n AccessTokenRefresher.prototype.isReady = function () {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh);\n };\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n AccessTokenRefresher.prototype.getToken = function (options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var token;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n this.lastCalled = Date.now();\n return [4 /*yield*/, this.credential.getToken(this.scopes, options)];\n case 1:\n token = _a.sent();\n this.promise = undefined;\n return [2 /*return*/, token || undefined];\n }\n });\n });\n };\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n AccessTokenRefresher.prototype.refresh = function (options) {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n return this.promise;\n };\n return AccessTokenRefresher;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar HeaderConstants = Constants.HeaderConstants;\nvar DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\nvar BasicAuthenticationCredentials = /** @class */ (function () {\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n function BasicAuthenticationCredentials(userName, password, authorizationScheme) {\n if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; }\n this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME;\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n BasicAuthenticationCredentials.prototype.signRequest = function (webResource) {\n var credentials = this.userName + \":\" + this.password;\n var encodedCredentials = this.authorizationScheme + \" \" + encodeString(credentials);\n if (!webResource.headers)\n webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n };\n return BasicAuthenticationCredentials;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Authenticates to a service using an API key.\n */\nvar ApiKeyCredentials = /** @class */ (function () {\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n function ApiKeyCredentials(options) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\"options cannot be null or undefined. Either \\\"inHeader\\\" or \\\"inQuery\\\" property of the options object needs to be provided.\");\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n ApiKeyCredentials.prototype.signRequest = function (webResource) {\n if (!webResource) {\n return Promise.reject(new Error(\"webResource cannot be null or undefined and must be of type \\\"object\\\".\"));\n }\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (var headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(\"url cannot be null in the request object.\"));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (var key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += key + \"=\" + this.inQuery[key];\n }\n }\n return Promise.resolve(webResource);\n };\n return ApiKeyCredentials;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar TopicCredentials = /** @class */ (function (_super) {\n tslib.__extends(TopicCredentials, _super);\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n function TopicCredentials(topicKey) {\n var _this = this;\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n var options = {\n inHeader: {\n \"aeg-sas-key\": topicKey\n }\n };\n _this = _super.call(this, options) || this;\n return _this;\n }\n return TopicCredentials;\n}(ApiKeyCredentials));\n\nObject.defineProperty(exports, 'isTokenCredential', {\n enumerable: true,\n get: function () {\n return coreAuth.isTokenCredential;\n }\n});\nexports.AccessTokenRefresher = AccessTokenRefresher;\nexports.ApiKeyCredentials = ApiKeyCredentials;\nexports.BaseRequestPolicy = BaseRequestPolicy;\nexports.BasicAuthenticationCredentials = BasicAuthenticationCredentials;\nexports.Constants = Constants;\nexports.DefaultHttpClient = NodeFetchHttpClient;\nexports.ExpiringAccessTokenCache = ExpiringAccessTokenCache;\nexports.HttpHeaders = HttpHeaders;\nexports.MapperType = MapperType;\nexports.RequestPolicyOptions = RequestPolicyOptions;\nexports.RestError = RestError;\nexports.Serializer = Serializer;\nexports.ServiceClient = ServiceClient;\nexports.TopicCredentials = TopicCredentials;\nexports.URLBuilder = URLBuilder;\nexports.URLQuery = URLQuery;\nexports.WebResource = WebResource;\nexports.XML_ATTRKEY = XML_ATTRKEY;\nexports.XML_CHARKEY = XML_CHARKEY;\nexports.applyMixins = applyMixins;\nexports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy;\nexports.createPipelineFromOptions = createPipelineFromOptions;\nexports.createSpanFunction = createSpanFunction;\nexports.delay = delay;\nexports.deserializationPolicy = deserializationPolicy;\nexports.deserializeResponseBody = deserializeResponseBody;\nexports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy;\nexports.encodeUri = encodeUri;\nexports.executePromisesSequentially = executePromisesSequentially;\nexports.exponentialRetryPolicy = exponentialRetryPolicy;\nexports.flattenResponse = flattenResponse;\nexports.generateClientRequestIdPolicy = generateClientRequestIdPolicy;\nexports.generateUuid = generateUuid;\nexports.getDefaultProxySettings = getDefaultProxySettings;\nexports.getDefaultUserAgentValue = getDefaultUserAgentValue;\nexports.isDuration = isDuration;\nexports.isNode = isNode;\nexports.isValidUuid = isValidUuid;\nexports.keepAlivePolicy = keepAlivePolicy;\nexports.logPolicy = logPolicy;\nexports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase;\nexports.parseXML = parseXML;\nexports.promiseToCallback = promiseToCallback;\nexports.promiseToServiceCallback = promiseToServiceCallback;\nexports.proxyPolicy = proxyPolicy;\nexports.redirectPolicy = redirectPolicy;\nexports.serializeObject = serializeObject;\nexports.signingPolicy = signingPolicy;\nexports.stringifyXML = stringifyXML;\nexports.stripRequest = stripRequest;\nexports.stripResponse = stripResponse;\nexports.systemErrorRetryPolicy = systemErrorRetryPolicy;\nexports.throttlingRetryPolicy = throttlingRetryPolicy;\nexports.tracingPolicy = tracingPolicy;\nexports.userAgentPolicy = userAgentPolicy;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar api = require('@opentelemetry/api');\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A no-op implementation of Span that can safely be used without side-effects.\n */\nvar NoOpSpan = /** @class */ (function () {\n function NoOpSpan() {\n }\n /**\n * Returns the SpanContext associated with this Span.\n */\n NoOpSpan.prototype.context = function () {\n return {\n spanId: \"\",\n traceId: \"\",\n traceFlags: 0 /* NONE */\n };\n };\n /**\n * Marks the end of Span execution.\n * @param _endTime - The time to use as the Span's end time. Defaults to\n * the current time.\n */\n NoOpSpan.prototype.end = function (_endTime) {\n /* Noop */\n };\n /**\n * Sets an attribute on the Span\n * @param _key - The attribute key\n * @param _value - The attribute value\n */\n NoOpSpan.prototype.setAttribute = function (_key, _value) {\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param _attributes - The attributes to add\n */\n NoOpSpan.prototype.setAttributes = function (_attributes) {\n return this;\n };\n /**\n * Adds an event to the Span\n * @param _name - The name of the event\n * @param _attributes - The associated attributes to add for this event\n */\n NoOpSpan.prototype.addEvent = function (_name, _attributes) {\n return this;\n };\n /**\n * Sets a status on the span. Overrides the default of SpanStatusCode.OK.\n * @param _status - The status to set.\n */\n NoOpSpan.prototype.setStatus = function (_status) {\n return this;\n };\n /**\n * Updates the name of the Span\n * @param _name - the new Span name\n */\n NoOpSpan.prototype.updateName = function (_name) {\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n NoOpSpan.prototype.isRecording = function () {\n return false;\n };\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n NoOpSpan.prototype.recordException = function (_exception, _time) {\n /* do nothing */\n };\n return NoOpSpan;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A no-op implementation of Tracer that can be used when tracing\n * is disabled.\n */\nvar NoOpTracer = /** @class */ (function () {\n function NoOpTracer() {\n }\n /**\n * Starts a new Span.\n * @param _name - The name of the span.\n * @param _options - The SpanOptions used during Span creation.\n */\n NoOpTracer.prototype.startSpan = function (_name, _options) {\n return new NoOpSpan();\n };\n /**\n * Returns the current Span from the current context, if available.\n */\n NoOpTracer.prototype.getCurrentSpan = function () {\n return new NoOpSpan();\n };\n /**\n * Executes the given function within the context provided by a Span.\n * @param _span - The span that provides the context.\n * @param fn - The function to be executed.\n */\n NoOpTracer.prototype.withSpan = function (_span, fn) {\n return fn();\n };\n /**\n * Bind a Span as the target's scope\n * @param target - An object to bind the scope.\n * @param _span - A specific Span to use. Otherwise, use the current one.\n */\n NoOpTracer.prototype.bind = function (target, _span) {\n return target;\n };\n return NoOpTracer;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nfunction getGlobalObject() {\n return global;\n}\n\n// Copyright (c) Microsoft Corporation.\n// V1 = OpenTelemetry 0.1\n// V2 = OpenTelemetry 0.2\n// V3 = OpenTelemetry 0.6.1\n// V4 = OpenTelemetry 1.0.0-rc.0\nvar GLOBAL_TRACER_VERSION = 4;\n// preview5 shipped with @azure/core-tracing.tracerCache\n// and didn't have smart detection for collisions\nvar GLOBAL_TRACER_SYMBOL = Symbol.for(\"@azure/core-tracing.tracerCache3\");\nvar cache;\nfunction loadTracerCache() {\n var globalObj = getGlobalObject();\n var existingCache = globalObj[GLOBAL_TRACER_SYMBOL];\n var setGlobalCache = true;\n if (existingCache) {\n if (existingCache.version === GLOBAL_TRACER_VERSION) {\n cache = existingCache;\n }\n else {\n setGlobalCache = false;\n if (existingCache.tracer) {\n throw new Error(\"Two incompatible versions of @azure/core-tracing have been loaded.\\n This library is \" + GLOBAL_TRACER_VERSION + \", existing is \" + existingCache.version + \".\");\n }\n }\n }\n if (!cache) {\n cache = {\n tracer: undefined,\n version: GLOBAL_TRACER_VERSION\n };\n }\n if (setGlobalCache) {\n globalObj[GLOBAL_TRACER_SYMBOL] = cache;\n }\n}\nfunction getCache() {\n if (!cache) {\n loadTracerCache();\n }\n return cache;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar defaultTracer;\nfunction getDefaultTracer() {\n if (!defaultTracer) {\n defaultTracer = new NoOpTracer();\n }\n return defaultTracer;\n}\n/**\n * Sets the global tracer, enabling tracing for the Azure SDK.\n * @param tracer - An OpenTelemetry Tracer instance.\n */\nfunction setTracer(tracer) {\n var cache = getCache();\n cache.tracer = tracer;\n}\n/**\n * Retrieves the active tracer, or returns a\n * no-op implementation if one is not set.\n */\nfunction getTracer() {\n var cache = getCache();\n if (!cache.tracer) {\n return getDefaultTracer();\n }\n return cache.tracer;\n}\n\n// Copyright (c) Microsoft Corporation.\n(function (SpanKind) {\n /** Default value. Indicates that the span is used internally. */\n SpanKind[SpanKind[\"INTERNAL\"] = 0] = \"INTERNAL\";\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SpanKind[SpanKind[\"SERVER\"] = 1] = \"SERVER\";\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n SpanKind[SpanKind[\"CLIENT\"] = 2] = \"CLIENT\";\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"PRODUCER\"] = 3] = \"PRODUCER\";\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"CONSUMER\"] = 4] = \"CONSUMER\";\n})(exports.SpanKind || (exports.SpanKind = {}));\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nfunction getSpan(context) {\n return api.getSpan(context);\n}\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nfunction setSpan(context, span) {\n return api.setSpan(context, span);\n}\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nfunction setSpanContext(context, spanContext) {\n return api.setSpanContext(context, spanContext);\n}\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nfunction getSpanContext(context) {\n return api.getSpanContext(context);\n}\n/** Entrypoint for context API */\nvar context = api.context;\n(function (SpanStatusCode) {\n /**\n * The default status.\n */\n SpanStatusCode[SpanStatusCode[\"UNSET\"] = 0] = \"UNSET\";\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n SpanStatusCode[SpanStatusCode[\"OK\"] = 1] = \"OK\";\n /**\n * The operation contains an error.\n */\n SpanStatusCode[SpanStatusCode[\"ERROR\"] = 2] = \"ERROR\";\n})(exports.SpanStatusCode || (exports.SpanStatusCode = {}));\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * @internal\n */\nvar OpenCensusTraceStateWrapper = /** @class */ (function () {\n function OpenCensusTraceStateWrapper(state) {\n this._state = state;\n }\n OpenCensusTraceStateWrapper.prototype.get = function (_key) {\n throw new Error(\"Method not implemented.\");\n };\n OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) {\n throw new Error(\"Method not implemented.\");\n };\n OpenCensusTraceStateWrapper.prototype.unset = function (_key) {\n throw new Error(\"Method not implemented\");\n };\n OpenCensusTraceStateWrapper.prototype.serialize = function () {\n return this._state || \"\";\n };\n return OpenCensusTraceStateWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/** An enumeration of canonical status codes. */\nvar CanonicalCode;\n(function (CanonicalCode) {\n /**\n * Not an error; returned on success\n */\n CanonicalCode[CanonicalCode[\"OK\"] = 0] = \"OK\";\n /**\n * Internal errors. Means some invariants expected by underlying\n * system has been broken. If you see one of these errors,\n * something is very broken.\n */\n CanonicalCode[CanonicalCode[\"INTERNAL\"] = 13] = \"INTERNAL\";\n})(CanonicalCode || (CanonicalCode = {}));\nfunction isWrappedSpan(span) {\n return !!span && span.getWrappedSpan !== undefined;\n}\nfunction isTracer(tracerOrSpan) {\n return tracerOrSpan.getWrappedTracer !== undefined;\n}\n/**\n * An implementation of OpenTelemetry Span that wraps an OpenCensus Span.\n */\nvar OpenCensusSpanWrapper = /** @class */ (function () {\n function OpenCensusSpanWrapper(tracerOrSpan, name, options, context$1) {\n if (name === void 0) { name = \"\"; }\n if (options === void 0) { options = {}; }\n if (isTracer(tracerOrSpan)) {\n var span = getSpan(context$1 !== null && context$1 !== void 0 ? context$1 : context.active());\n var parent = isWrappedSpan(span) ? span.getWrappedSpan() : undefined;\n this._span = tracerOrSpan.getWrappedTracer().startChildSpan({\n name: name,\n childOf: parent\n });\n this._span.start();\n if (options.links) {\n for (var _i = 0, _a = options.links; _i < _a.length; _i++) {\n var link = _a[_i];\n // Since there is no way to set the link relationship, leave it as Unspecified.\n this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes);\n }\n }\n }\n else {\n this._span = tracerOrSpan;\n }\n }\n /**\n * The underlying OpenCensus Span\n */\n OpenCensusSpanWrapper.prototype.getWrappedSpan = function () {\n return this._span;\n };\n /**\n * Marks the end of Span execution.\n * @param endTime - The time to use as the Span's end time. Defaults to\n * the current time.\n */\n OpenCensusSpanWrapper.prototype.end = function (_endTime) {\n this._span.end();\n };\n /**\n * Returns the SpanContext associated with this Span.\n */\n OpenCensusSpanWrapper.prototype.context = function () {\n var openCensusSpanContext = this._span.spanContext;\n return {\n spanId: openCensusSpanContext.spanId,\n traceId: openCensusSpanContext.traceId,\n traceFlags: openCensusSpanContext.options,\n traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState)\n };\n };\n /**\n * Sets an attribute on the Span\n * @param key - The attribute key\n * @param value - The attribute value\n */\n OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) {\n this._span.addAttribute(key, value);\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param attributes - The attributes to add\n */\n OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) {\n this._span.attributes = attributes;\n return this;\n };\n /**\n * Adds an event to the Span\n * @param name - The name of the event\n * @param attributes - The associated attributes to add for this event\n */\n OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) {\n throw new Error(\"Method not implemented.\");\n };\n /**\n * Sets a status on the span. Overrides the default of SpanStatusCode.OK.\n * @param status - The status to set.\n */\n OpenCensusSpanWrapper.prototype.setStatus = function (status) {\n switch (status.code) {\n case exports.SpanStatusCode.ERROR: {\n this._span.setStatus(CanonicalCode.INTERNAL, status.message);\n break;\n }\n case exports.SpanStatusCode.OK: {\n this._span.setStatus(CanonicalCode.OK, status.message);\n break;\n }\n case exports.SpanStatusCode.UNSET: {\n break;\n }\n }\n return this;\n };\n /**\n * Updates the name of the Span\n * @param name - The new Span name\n */\n OpenCensusSpanWrapper.prototype.updateName = function (name) {\n this._span.name = name;\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n OpenCensusSpanWrapper.prototype.isRecording = function () {\n // NoRecordSpans have an empty traceId\n return !!this._span.traceId;\n };\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n OpenCensusSpanWrapper.prototype.recordException = function (_exception, _time) {\n throw new Error(\"Method not implemented\");\n };\n return OpenCensusSpanWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer.\n */\nvar OpenCensusTracerWrapper = /** @class */ (function () {\n /**\n * Create a new wrapper around a given OpenCensus Tracer.\n * @param tracer - The OpenCensus Tracer to wrap.\n */\n function OpenCensusTracerWrapper(tracer) {\n this._tracer = tracer;\n }\n /**\n * The wrapped OpenCensus Tracer\n */\n OpenCensusTracerWrapper.prototype.getWrappedTracer = function () {\n return this._tracer;\n };\n /**\n * Starts a new Span.\n * @param name - The name of the span.\n * @param options - The SpanOptions used during Span creation.\n */\n OpenCensusTracerWrapper.prototype.startSpan = function (name, options) {\n return new OpenCensusSpanWrapper(this, name, options);\n };\n /**\n * Returns the current Span from the current context, if available.\n */\n OpenCensusTracerWrapper.prototype.getCurrentSpan = function () {\n return undefined;\n };\n /**\n * Executes the given function within the context provided by a Span.\n * @param _span - The span that provides the context.\n * @param _fn - The function to be executed.\n */\n OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) {\n throw new Error(\"Method not implemented.\");\n };\n /**\n * Bind a Span as the target's scope\n * @param target - An object to bind the scope.\n * @param _span - A specific Span to use. Otherwise, use the current one.\n */\n OpenCensusTracerWrapper.prototype.bind = function (_target, _span) {\n throw new Error(\"Method not implemented.\");\n };\n return OpenCensusTracerWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A mock span useful for testing.\n */\nvar TestSpan = /** @class */ (function (_super) {\n tslib.__extends(TestSpan, _super);\n /**\n * Starts a new Span.\n * @param parentTracer- The tracer that created this Span\n * @param name - The name of the span.\n * @param context - The SpanContext this span belongs to\n * @param kind - The SpanKind of this Span\n * @param parentSpanId - The identifier of the parent Span\n * @param startTime - The startTime of the event (defaults to now)\n */\n function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) {\n if (startTime === void 0) { startTime = Date.now(); }\n var _this = _super.call(this) || this;\n _this._tracer = parentTracer;\n _this.name = name;\n _this.kind = kind;\n _this.startTime = startTime;\n _this.parentSpanId = parentSpanId;\n _this.status = {\n code: exports.SpanStatusCode.OK\n };\n _this.endCalled = false;\n _this._context = context;\n _this.attributes = {};\n return _this;\n }\n /**\n * Returns the Tracer that created this Span\n */\n TestSpan.prototype.tracer = function () {\n return this._tracer;\n };\n /**\n * Returns the SpanContext associated with this Span.\n */\n TestSpan.prototype.context = function () {\n return this._context;\n };\n /**\n * Marks the end of Span execution.\n * @param _endTime - The time to use as the Span's end time. Defaults to\n * the current time.\n */\n TestSpan.prototype.end = function (_endTime) {\n this.endCalled = true;\n };\n /**\n * Sets a status on the span. Overrides the default of SpanStatusCode.OK.\n * @param status - The status to set.\n */\n TestSpan.prototype.setStatus = function (status) {\n this.status = status;\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n TestSpan.prototype.isRecording = function () {\n return true;\n };\n /**\n * Sets an attribute on the Span\n * @param key - The attribute key\n * @param value - The attribute value\n */\n TestSpan.prototype.setAttribute = function (key, value) {\n this.attributes[key] = value;\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param attributes - The attributes to add\n */\n TestSpan.prototype.setAttributes = function (attributes) {\n for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) {\n var key = _a[_i];\n this.attributes[key] = attributes[key];\n }\n return this;\n };\n return TestSpan;\n}(NoOpSpan));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A mock tracer useful for testing\n */\nvar TestTracer = /** @class */ (function (_super) {\n tslib.__extends(TestTracer, _super);\n function TestTracer() {\n var _this = _super !== null && _super.apply(this, arguments) || this;\n _this.traceIdCounter = 0;\n _this.spanIdCounter = 0;\n _this.rootSpans = [];\n _this.knownSpans = [];\n return _this;\n }\n TestTracer.prototype.getNextTraceId = function () {\n this.traceIdCounter++;\n return String(this.traceIdCounter);\n };\n TestTracer.prototype.getNextSpanId = function () {\n this.spanIdCounter++;\n return String(this.spanIdCounter);\n };\n /**\n * Returns all Spans that were created without a parent\n */\n TestTracer.prototype.getRootSpans = function () {\n return this.rootSpans;\n };\n /**\n * Returns all Spans this Tracer knows about\n */\n TestTracer.prototype.getKnownSpans = function () {\n return this.knownSpans;\n };\n /**\n * Returns all Spans where end() has not been called\n */\n TestTracer.prototype.getActiveSpans = function () {\n return this.knownSpans.filter(function (span) {\n return !span.endCalled;\n });\n };\n /**\n * Return all Spans for a particular trace, grouped by their\n * parent Span in a tree-like structure\n * @param traceId - The traceId to return the graph for\n */\n TestTracer.prototype.getSpanGraph = function (traceId) {\n var traceSpans = this.knownSpans.filter(function (span) {\n return span.context().traceId === traceId;\n });\n var roots = [];\n var nodeMap = new Map();\n for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) {\n var span = traceSpans_1[_i];\n var spanId = span.context().spanId;\n var node = {\n name: span.name,\n children: []\n };\n nodeMap.set(spanId, node);\n if (span.parentSpanId) {\n var parent = nodeMap.get(span.parentSpanId);\n if (!parent) {\n throw new Error(\"Span with name \" + node.name + \" has an unknown parentSpan with id \" + span.parentSpanId);\n }\n parent.children.push(node);\n }\n else {\n roots.push(node);\n }\n }\n return {\n roots: roots\n };\n };\n /**\n * Starts a new Span.\n * @param name - The name of the span.\n * @param options - The SpanOptions used during Span creation.\n */\n TestTracer.prototype.startSpan = function (name, options, context$1) {\n var parentContext = getSpanContext(context$1 || context.active());\n var traceId;\n var isRootSpan = false;\n if (parentContext && parentContext.traceId) {\n traceId = parentContext.traceId;\n }\n else {\n traceId = this.getNextTraceId();\n isRootSpan = true;\n }\n var spanContext = {\n traceId: traceId,\n spanId: this.getNextSpanId(),\n traceFlags: 0 /* NONE */\n };\n var span = new TestSpan(this, name, spanContext, (options === null || options === void 0 ? void 0 : options.kind) || exports.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options === null || options === void 0 ? void 0 : options.startTime);\n this.knownSpans.push(span);\n if (isRootSpan) {\n this.rootSpans.push(span);\n }\n return span;\n };\n return TestTracer;\n}(NoOpTracer));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nfunction createSpanFunction(args) {\n return function (operationName, operationOptions) {\n var tracer = getTracer();\n var tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {};\n var spanOptions = tslib.__assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions);\n var spanName = args.packagePrefix ? args.packagePrefix + \".\" + operationName : operationName;\n var span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n var newSpanOptions = tracingOptions.spanOptions || {};\n if (span.isRecording() && args.namespace) {\n newSpanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { \"az.namespace\": args.namespace }) });\n }\n var newTracingOptions = tslib.__assign(tslib.__assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) });\n var newOperationOptions = tslib.__assign(tslib.__assign({}, operationOptions), { tracingOptions: newTracingOptions });\n return {\n span: span,\n updatedOptions: newOperationOptions\n };\n };\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar VERSION = \"00\";\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nfunction extractSpanContextFromTraceParentHeader(traceParentHeader) {\n var parts = traceParentHeader.split(\"-\");\n if (parts.length !== 4) {\n return;\n }\n var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3];\n if (version !== VERSION) {\n return;\n }\n var traceFlags = parseInt(traceOptions, 16);\n var spanContext = {\n spanId: spanId,\n traceId: traceId,\n traceFlags: traceFlags\n };\n return spanContext;\n}\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nfunction getTraceParentHeader(spanContext) {\n var missingFields = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n if (missingFields.length) {\n return;\n }\n var flags = spanContext.traceFlags || 0 /* NONE */;\n var hexFlags = flags.toString(16);\n var traceFlags = hexFlags.length === 1 ? \"0\" + hexFlags : hexFlags;\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return VERSION + \"-\" + spanContext.traceId + \"-\" + spanContext.spanId + \"-\" + traceFlags;\n}\n\nexports.NoOpSpan = NoOpSpan;\nexports.NoOpTracer = NoOpTracer;\nexports.OpenCensusSpanWrapper = OpenCensusSpanWrapper;\nexports.OpenCensusTracerWrapper = OpenCensusTracerWrapper;\nexports.TestSpan = TestSpan;\nexports.TestTracer = TestTracer;\nexports.context = context;\nexports.createSpanFunction = createSpanFunction;\nexports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader;\nexports.getSpan = getSpan;\nexports.getSpanContext = getSpanContext;\nexports.getTraceParentHeader = getTraceParentHeader;\nexports.getTracer = getTracer;\nexports.setSpan = setSpan;\nexports.setSpanContext = setSpanContext;\nexports.setTracer = setTracer;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __spreadArrays = (this && this.__spreadArrays) || function () {\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\n r[k] = a[j];\n return r;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ContextAPI = void 0;\nvar NoopContextManager_1 = require(\"../context/NoopContextManager\");\nvar global_utils_1 = require(\"../internal/global-utils\");\nvar API_NAME = 'context';\nvar NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager();\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nvar ContextAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function ContextAPI() {\n }\n /** Get the singleton instance of the Context API */\n ContextAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n return this._instance;\n };\n /**\n * Set the current context manager. Returns the initialized context manager\n */\n ContextAPI.prototype.setGlobalContextManager = function (contextManager) {\n global_utils_1.registerGlobal(API_NAME, contextManager);\n return contextManager;\n };\n /**\n * Get the currently active context\n */\n ContextAPI.prototype.active = function () {\n return this._getContextManager().active();\n };\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n ContextAPI.prototype.with = function (context, fn, thisArg) {\n var _a;\n var args = [];\n for (var _i = 3; _i < arguments.length; _i++) {\n args[_i - 3] = arguments[_i];\n }\n return (_a = this._getContextManager()).with.apply(_a, __spreadArrays([context, fn, thisArg], args));\n };\n /**\n * Bind a context to a target function or event emitter\n *\n * @param target function or event emitter to bind\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n */\n ContextAPI.prototype.bind = function (target, context) {\n if (context === void 0) { context = this.active(); }\n return this._getContextManager().bind(target, context);\n };\n ContextAPI.prototype._getContextManager = function () {\n return global_utils_1.getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n };\n /** Disable and remove the global context manager */\n ContextAPI.prototype.disable = function () {\n this._getContextManager().disable();\n global_utils_1.unregisterGlobal(API_NAME);\n };\n return ContextAPI;\n}());\nexports.ContextAPI = ContextAPI;\n//# sourceMappingURL=context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DiagAPI = void 0;\nvar logLevelLogger_1 = require(\"../diag/internal/logLevelLogger\");\nvar types_1 = require(\"../diag/types\");\nvar global_utils_1 = require(\"../internal/global-utils\");\nvar API_NAME = 'diag';\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nvar DiagAPI = /** @class */ (function () {\n /**\n * Private internal constructor\n * @private\n */\n function DiagAPI() {\n function _logProxy(funcName) {\n return function () {\n var logger = global_utils_1.getGlobal('diag');\n // shortcut if logger not set\n if (!logger)\n return;\n return logger[funcName].apply(logger, \n // work around Function.prototype.apply types\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n arguments);\n };\n }\n // Using self local variable for minification purposes as 'this' cannot be minified\n var self = this;\n // DiagAPI specific functions\n self.setLogger = function (logger, logLevel) {\n var _a;\n if (logLevel === void 0) { logLevel = types_1.DiagLogLevel.INFO; }\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation');\n self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message);\n return;\n }\n global_utils_1.registerGlobal('diag', logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger), true);\n };\n self.disable = function () {\n global_utils_1.unregisterGlobal(API_NAME);\n };\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n /** Get the singleton instance of the DiagAPI API */\n DiagAPI.instance = function () {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n return this._instance;\n };\n return DiagAPI;\n}());\nexports.DiagAPI = DiagAPI;\n//# sourceMappingURL=diag.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PropagationAPI = void 0;\nvar NoopTextMapPropagator_1 = require(\"../propagation/NoopTextMapPropagator\");\nvar TextMapPropagator_1 = require(\"../propagation/TextMapPropagator\");\nvar global_utils_1 = require(\"../internal/global-utils\");\nvar API_NAME = 'propagation';\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nvar PropagationAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function PropagationAPI() {\n }\n /** Get the singleton instance of the Propagator API */\n PropagationAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n return this._instance;\n };\n /**\n * Set the current propagator. Returns the initialized propagator\n */\n PropagationAPI.prototype.setGlobalPropagator = function (propagator) {\n global_utils_1.registerGlobal(API_NAME, propagator);\n return propagator;\n };\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n PropagationAPI.prototype.inject = function (context, carrier, setter) {\n if (setter === void 0) { setter = TextMapPropagator_1.defaultTextMapSetter; }\n return this._getGlobalPropagator().inject(context, carrier, setter);\n };\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n PropagationAPI.prototype.extract = function (context, carrier, getter) {\n if (getter === void 0) { getter = TextMapPropagator_1.defaultTextMapGetter; }\n return this._getGlobalPropagator().extract(context, carrier, getter);\n };\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n PropagationAPI.prototype.fields = function () {\n return this._getGlobalPropagator().fields();\n };\n /** Remove the global propagator */\n PropagationAPI.prototype.disable = function () {\n global_utils_1.unregisterGlobal(API_NAME);\n };\n PropagationAPI.prototype._getGlobalPropagator = function () {\n return global_utils_1.getGlobal(API_NAME) || NoopTextMapPropagator_1.NOOP_TEXT_MAP_PROPAGATOR;\n };\n return PropagationAPI;\n}());\nexports.PropagationAPI = PropagationAPI;\n//# sourceMappingURL=propagation.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TraceAPI = void 0;\nvar ProxyTracerProvider_1 = require(\"../trace/ProxyTracerProvider\");\nvar spancontext_utils_1 = require(\"../trace/spancontext-utils\");\nvar global_utils_1 = require(\"../internal/global-utils\");\nvar API_NAME = 'trace';\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nvar TraceAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function TraceAPI() {\n this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider();\n this.isSpanContextValid = spancontext_utils_1.isSpanContextValid;\n }\n /** Get the singleton instance of the Trace API */\n TraceAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n return this._instance;\n };\n /**\n * Set the current global tracer. Returns the initialized global tracer provider\n */\n TraceAPI.prototype.setGlobalTracerProvider = function (provider) {\n this._proxyTracerProvider.setDelegate(provider);\n global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider);\n return this._proxyTracerProvider;\n };\n /**\n * Returns the global tracer provider.\n */\n TraceAPI.prototype.getTracerProvider = function () {\n return global_utils_1.getGlobal(API_NAME) || this._proxyTracerProvider;\n };\n /**\n * Returns a tracer from the global tracer provider.\n */\n TraceAPI.prototype.getTracer = function (name, version) {\n return this.getTracerProvider().getTracer(name, version);\n };\n /** Remove the global tracer provider */\n TraceAPI.prototype.disable = function () {\n global_utils_1.unregisterGlobal(API_NAME);\n this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider();\n };\n return TraceAPI;\n}());\nexports.TraceAPI = TraceAPI;\n//# sourceMappingURL=trace.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Baggage.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Entry.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.baggageEntryMetadataFromString = exports.createBaggage = void 0;\nvar baggage_1 = require(\"./internal/baggage\");\nvar symbol_1 = require(\"./internal/symbol\");\n__exportStar(require(\"./Baggage\"), exports);\n__exportStar(require(\"./Entry\"), exports);\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nfunction createBaggage(entries) {\n if (entries === void 0) { entries = {}; }\n return new baggage_1.BaggageImpl(new Map(Object.entries(entries)));\n}\nexports.createBaggage = createBaggage;\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nfunction baggageEntryMetadataFromString(str) {\n if (typeof str !== 'string') {\n // @TODO log diagnostic\n str = '';\n }\n return {\n __TYPE__: symbol_1.baggageEntryMetadataSymbol,\n toString: function () {\n return str;\n },\n };\n}\nexports.baggageEntryMetadataFromString = baggageEntryMetadataFromString;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BaggageImpl = void 0;\nvar BaggageImpl = /** @class */ (function () {\n function BaggageImpl(entries) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n BaggageImpl.prototype.getEntry = function (key) {\n var entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n return Object.assign({}, entry);\n };\n BaggageImpl.prototype.getAllEntries = function () {\n return Array.from(this._entries.entries()).map(function (_a) {\n var k = _a[0], v = _a[1];\n return [k, v];\n });\n };\n BaggageImpl.prototype.setEntry = function (key, entry) {\n var newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n };\n BaggageImpl.prototype.removeEntry = function (key) {\n var newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n };\n BaggageImpl.prototype.removeEntries = function () {\n var keys = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n keys[_i] = arguments[_i];\n }\n var newBaggage = new BaggageImpl(this._entries);\n for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) {\n var key = keys_1[_a];\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n };\n BaggageImpl.prototype.clear = function () {\n return new BaggageImpl();\n };\n return BaggageImpl;\n}());\nexports.BaggageImpl = BaggageImpl;\n//# sourceMappingURL=baggage.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.baggageEntryMetadataSymbol = void 0;\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n//# sourceMappingURL=symbol.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Exception.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Time.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __spreadArrays = (this && this.__spreadArrays) || function () {\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\n r[k] = a[j];\n return r;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NoopContextManager = void 0;\nvar context_1 = require(\"./context\");\nvar NoopContextManager = /** @class */ (function () {\n function NoopContextManager() {\n }\n NoopContextManager.prototype.active = function () {\n return context_1.ROOT_CONTEXT;\n };\n NoopContextManager.prototype.with = function (_context, fn, thisArg) {\n var args = [];\n for (var _i = 3; _i < arguments.length; _i++) {\n args[_i - 3] = arguments[_i];\n }\n return fn.call.apply(fn, __spreadArrays([thisArg], args));\n };\n NoopContextManager.prototype.bind = function (target, _context) {\n return target;\n };\n NoopContextManager.prototype.enable = function () {\n return this;\n };\n NoopContextManager.prototype.disable = function () {\n return this;\n };\n return NoopContextManager;\n}());\nexports.NoopContextManager = NoopContextManager;\n//# sourceMappingURL=NoopContextManager.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ROOT_CONTEXT = exports.createContextKey = exports.setBaggage = exports.getBaggage = exports.isInstrumentationSuppressed = exports.unsuppressInstrumentation = exports.suppressInstrumentation = exports.getSpanContext = exports.setSpanContext = exports.setSpan = exports.getSpan = void 0;\nvar NoopSpan_1 = require(\"../trace/NoopSpan\");\n/**\n * span key\n */\nvar SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n/**\n * Shared key for indicating if instrumentation should be suppressed beyond\n * this current scope.\n */\nvar SUPPRESS_INSTRUMENTATION_KEY = createContextKey('OpenTelemetry Context Key SUPPRESS_INSTRUMENTATION');\n/**\n * Baggage key\n */\nvar BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nfunction getSpan(context) {\n return context.getValue(SPAN_KEY) || undefined;\n}\nexports.getSpan = getSpan;\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nfunction setSpan(context, span) {\n return context.setValue(SPAN_KEY, span);\n}\nexports.setSpan = setSpan;\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nfunction setSpanContext(context, spanContext) {\n return setSpan(context, new NoopSpan_1.NoopSpan(spanContext));\n}\nexports.setSpanContext = setSpanContext;\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nfunction getSpanContext(context) {\n var _a;\n return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.context();\n}\nexports.getSpanContext = getSpanContext;\n/**\n * Sets value on context to indicate that instrumentation should\n * be suppressed beyond this current scope.\n *\n * @param context context to set the suppress instrumentation value on.\n */\nfunction suppressInstrumentation(context) {\n return context.setValue(SUPPRESS_INSTRUMENTATION_KEY, true);\n}\nexports.suppressInstrumentation = suppressInstrumentation;\n/**\n * Sets value on context to indicate that instrumentation should\n * no-longer be suppressed beyond this current scope.\n *\n * @param context context to set the suppress instrumentation value on.\n */\nfunction unsuppressInstrumentation(context) {\n return context.setValue(SUPPRESS_INSTRUMENTATION_KEY, false);\n}\nexports.unsuppressInstrumentation = unsuppressInstrumentation;\n/**\n * Return current suppress instrumentation value for the given context,\n * if it exists.\n *\n * @param context context check for the suppress instrumentation value.\n */\nfunction isInstrumentationSuppressed(context) {\n return Boolean(context.getValue(SUPPRESS_INSTRUMENTATION_KEY));\n}\nexports.isInstrumentationSuppressed = isInstrumentationSuppressed;\n/**\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nfunction getBaggage(context) {\n return context.getValue(BAGGAGE_KEY) || undefined;\n}\nexports.getBaggage = getBaggage;\n/**\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nfunction setBaggage(context, baggage) {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\nexports.setBaggage = setBaggage;\n/** Get a key to uniquely identify a context value */\nfunction createContextKey(description) {\n return Symbol.for(description);\n}\nexports.createContextKey = createContextKey;\nvar BaseContext = /** @class */ (function () {\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n function BaseContext(parentContext) {\n // for minification\n var self = this;\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n self.getValue = function (key) { return self._currentContext.get(key); };\n self.setValue = function (key, value) {\n var context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n self.deleteValue = function (key) {\n var context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n return BaseContext;\n}());\n/** The root context is used as the default parent context when there is no active context */\nexports.ROOT_CONTEXT = new BaseContext();\n//# sourceMappingURL=context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=types.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DiagConsoleLogger = void 0;\nvar consoleMap = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nvar DiagConsoleLogger = /** @class */ (function () {\n function DiagConsoleLogger() {\n function _consoleFunc(funcName) {\n return function () {\n var orgArguments = arguments;\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n var theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n theFunc = console.log;\n }\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, orgArguments);\n }\n }\n };\n }\n for (var i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n return DiagConsoleLogger;\n}());\nexports.DiagConsoleLogger = DiagConsoleLogger;\n//# sourceMappingURL=consoleLogger.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./consoleLogger\"), exports);\n__exportStar(require(\"./types\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createLogLevelDiagLogger = void 0;\nvar types_1 = require(\"../types\");\nfunction createLogLevelDiagLogger(maxLevel, logger) {\n if (maxLevel < types_1.DiagLogLevel.NONE) {\n maxLevel = types_1.DiagLogLevel.NONE;\n }\n else if (maxLevel > types_1.DiagLogLevel.ALL) {\n maxLevel = types_1.DiagLogLevel.ALL;\n }\n // In case the logger is null or undefined\n logger = logger || {};\n function _filterFunc(funcName, theLevel) {\n var theFunc = logger[funcName];\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () { };\n }\n return {\n error: _filterFunc('error', types_1.DiagLogLevel.ERROR),\n warn: _filterFunc('warn', types_1.DiagLogLevel.WARN),\n info: _filterFunc('info', types_1.DiagLogLevel.INFO),\n debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE),\n };\n}\nexports.createLogLevelDiagLogger = createLogLevelDiagLogger;\n//# sourceMappingURL=logLevelLogger.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DiagLogLevel = void 0;\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nvar DiagLogLevel;\n(function (DiagLogLevel) {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n DiagLogLevel[DiagLogLevel[\"NONE\"] = 0] = \"NONE\";\n /** Identifies an error scenario */\n DiagLogLevel[DiagLogLevel[\"ERROR\"] = 30] = \"ERROR\";\n /** Identifies a warning scenario */\n DiagLogLevel[DiagLogLevel[\"WARN\"] = 50] = \"WARN\";\n /** General informational log message */\n DiagLogLevel[DiagLogLevel[\"INFO\"] = 60] = \"INFO\";\n /** General debug log message */\n DiagLogLevel[DiagLogLevel[\"DEBUG\"] = 70] = \"DEBUG\";\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n DiagLogLevel[DiagLogLevel[\"VERBOSE\"] = 80] = \"VERBOSE\";\n /** Used to set the logging level to include all logging */\n DiagLogLevel[DiagLogLevel[\"ALL\"] = 9999] = \"ALL\";\n})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {}));\n//# sourceMappingURL=types.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.diag = exports.propagation = exports.trace = exports.context = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0;\n__exportStar(require(\"./baggage\"), exports);\n__exportStar(require(\"./common/Exception\"), exports);\n__exportStar(require(\"./common/Time\"), exports);\n__exportStar(require(\"./diag\"), exports);\n__exportStar(require(\"./propagation/NoopTextMapPropagator\"), exports);\n__exportStar(require(\"./propagation/TextMapPropagator\"), exports);\n__exportStar(require(\"./trace/attributes\"), exports);\n__exportStar(require(\"./trace/Event\"), exports);\n__exportStar(require(\"./trace/link_context\"), exports);\n__exportStar(require(\"./trace/link\"), exports);\n__exportStar(require(\"./trace/NoopTracer\"), exports);\n__exportStar(require(\"./trace/NoopTracerProvider\"), exports);\n__exportStar(require(\"./trace/ProxyTracer\"), exports);\n__exportStar(require(\"./trace/ProxyTracerProvider\"), exports);\n__exportStar(require(\"./trace/Sampler\"), exports);\n__exportStar(require(\"./trace/SamplingResult\"), exports);\n__exportStar(require(\"./trace/span_context\"), exports);\n__exportStar(require(\"./trace/span_kind\"), exports);\n__exportStar(require(\"./trace/span\"), exports);\n__exportStar(require(\"./trace/SpanOptions\"), exports);\n__exportStar(require(\"./trace/status\"), exports);\n__exportStar(require(\"./trace/TimedEvent\"), exports);\n__exportStar(require(\"./trace/trace_flags\"), exports);\n__exportStar(require(\"./trace/trace_state\"), exports);\n__exportStar(require(\"./trace/tracer_provider\"), exports);\n__exportStar(require(\"./trace/tracer\"), exports);\nvar spancontext_utils_1 = require(\"./trace/spancontext-utils\");\nObject.defineProperty(exports, \"INVALID_SPANID\", { enumerable: true, get: function () { return spancontext_utils_1.INVALID_SPANID; } });\nObject.defineProperty(exports, \"INVALID_TRACEID\", { enumerable: true, get: function () { return spancontext_utils_1.INVALID_TRACEID; } });\nObject.defineProperty(exports, \"INVALID_SPAN_CONTEXT\", { enumerable: true, get: function () { return spancontext_utils_1.INVALID_SPAN_CONTEXT; } });\nObject.defineProperty(exports, \"isSpanContextValid\", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } });\nObject.defineProperty(exports, \"isValidTraceId\", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } });\nObject.defineProperty(exports, \"isValidSpanId\", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } });\n__exportStar(require(\"./context/context\"), exports);\n__exportStar(require(\"./context/NoopContextManager\"), exports);\n__exportStar(require(\"./context/types\"), exports);\nvar context_1 = require(\"./api/context\");\n/** Entrypoint for context API */\nexports.context = context_1.ContextAPI.getInstance();\nvar trace_1 = require(\"./api/trace\");\n/** Entrypoint for trace API */\nexports.trace = trace_1.TraceAPI.getInstance();\nvar propagation_1 = require(\"./api/propagation\");\n/** Entrypoint for propagation API */\nexports.propagation = propagation_1.PropagationAPI.getInstance();\nvar diag_1 = require(\"./api/diag\");\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexports.diag = diag_1.DiagAPI.instance();\nexports.default = {\n trace: exports.trace,\n context: exports.context,\n propagation: exports.propagation,\n diag: exports.diag,\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0;\nvar __1 = require(\"..\");\nvar platform_1 = require(\"../platform\");\nvar version_1 = require(\"../version\");\nvar semver_1 = require(\"./semver\");\nvar major = version_1.VERSION.split('.')[0];\nvar GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\"io.opentelemetry.js.api.\" + major);\nvar _global = platform_1._globalThis;\nfunction registerGlobal(type, instance, allowOverride) {\n var _a;\n if (allowOverride === void 0) { allowOverride = false; }\n _global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : {\n version: version_1.VERSION,\n };\n var api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n var err = new Error(\"@opentelemetry/api: Attempted duplicate registration of API: \" + type);\n __1.diag.error(err.stack || err.message);\n return;\n }\n if (api.version !== version_1.VERSION) {\n // All registered APIs must be of the same version exactly\n var err = new Error('@opentelemetry/api: All API registration versions must match');\n __1.diag.error(err.stack || err.message);\n return;\n }\n api[type] = instance;\n}\nexports.registerGlobal = registerGlobal;\nfunction getGlobal(type) {\n var _a, _b;\n var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version;\n if (!globalVersion || !semver_1.isCompatible(globalVersion)) {\n return;\n }\n return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type];\n}\nexports.getGlobal = getGlobal;\nfunction unregisterGlobal(type) {\n var api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n if (api) {\n delete api[type];\n }\n}\nexports.unregisterGlobal = unregisterGlobal;\n//# sourceMappingURL=global-utils.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isCompatible = exports._makeCompatibilityCheck = void 0;\nvar version_1 = require(\"../version\");\nvar re = /^(\\d+)\\.(\\d+)\\.(\\d+)(?:-(.*))?$/;\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nfunction _makeCompatibilityCheck(ownVersion) {\n var acceptedVersions = new Set([ownVersion]);\n var rejectedVersions = new Set();\n var myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return function () { return false; };\n }\n var ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n };\n function _reject(v) {\n rejectedVersions.add(v);\n return false;\n }\n function _accept(v) {\n acceptedVersions.add(v);\n return true;\n }\n return function isCompatible(globalVersion) {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n var globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n var globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n };\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n if (ownVersionParsed.major === 0) {\n if (ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch) {\n return _accept(globalVersion);\n }\n return _reject(globalVersion);\n }\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n return _reject(globalVersion);\n };\n}\nexports._makeCompatibilityCheck = _makeCompatibilityCheck;\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexports.isCompatible = _makeCompatibilityCheck(version_1.VERSION);\n//# sourceMappingURL=semver.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./node\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports._globalThis = void 0;\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexports._globalThis = typeof globalThis === 'object' ? globalThis : global;\n//# sourceMappingURL=globalThis.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./globalThis\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TEXT_MAP_PROPAGATOR = exports.NoopTextMapPropagator = void 0;\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nvar NoopTextMapPropagator = /** @class */ (function () {\n function NoopTextMapPropagator() {\n }\n /** Noop inject function does nothing */\n NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { };\n /** Noop extract function does nothing and returns the input context */\n NoopTextMapPropagator.prototype.extract = function (context, _carrier) {\n return context;\n };\n NoopTextMapPropagator.prototype.fields = function () {\n return [];\n };\n return NoopTextMapPropagator;\n}());\nexports.NoopTextMapPropagator = NoopTextMapPropagator;\nexports.NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n//# sourceMappingURL=NoopTextMapPropagator.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0;\nexports.defaultTextMapGetter = {\n get: function (carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n keys: function (carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\nexports.defaultTextMapSetter = {\n set: function (carrier, key, value) {\n if (carrier == null) {\n return;\n }\n carrier[key] = value;\n },\n};\n//# sourceMappingURL=TextMapPropagator.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Event.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NoopSpan = void 0;\nvar spancontext_utils_1 = require(\"./spancontext-utils\");\n/**\n * The NoopSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nvar NoopSpan = /** @class */ (function () {\n function NoopSpan(_spanContext) {\n if (_spanContext === void 0) { _spanContext = spancontext_utils_1.INVALID_SPAN_CONTEXT; }\n this._spanContext = _spanContext;\n }\n // Returns a SpanContext.\n NoopSpan.prototype.context = function () {\n return this._spanContext;\n };\n // By default does nothing\n NoopSpan.prototype.setAttribute = function (_key, _value) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.setAttributes = function (_attributes) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.addEvent = function (_name, _attributes) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.setStatus = function (_status) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.updateName = function (_name) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.end = function (_endTime) { };\n // isRecording always returns false for noopSpan.\n NoopSpan.prototype.isRecording = function () {\n return false;\n };\n // By default does nothing\n NoopSpan.prototype.recordException = function (_exception, _time) { };\n return NoopSpan;\n}());\nexports.NoopSpan = NoopSpan;\n//# sourceMappingURL=NoopSpan.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TRACER = exports.NoopTracer = void 0;\nvar context_1 = require(\"../context/context\");\nvar NoopSpan_1 = require(\"./NoopSpan\");\nvar spancontext_utils_1 = require(\"./spancontext-utils\");\n/**\n * No-op implementations of {@link Tracer}.\n */\nvar NoopTracer = /** @class */ (function () {\n function NoopTracer() {\n }\n // startSpan starts a noop span.\n NoopTracer.prototype.startSpan = function (name, options, context) {\n var root = Boolean(options === null || options === void 0 ? void 0 : options.root);\n if (root) {\n return new NoopSpan_1.NoopSpan();\n }\n var parentFromContext = context && context_1.getSpanContext(context);\n if (isSpanContext(parentFromContext) &&\n spancontext_utils_1.isSpanContextValid(parentFromContext)) {\n return new NoopSpan_1.NoopSpan(parentFromContext);\n }\n else {\n return new NoopSpan_1.NoopSpan();\n }\n };\n return NoopTracer;\n}());\nexports.NoopTracer = NoopTracer;\nfunction isSpanContext(spanContext) {\n return (typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number');\n}\nexports.NOOP_TRACER = new NoopTracer();\n//# sourceMappingURL=NoopTracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0;\nvar NoopTracer_1 = require(\"./NoopTracer\");\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nvar NoopTracerProvider = /** @class */ (function () {\n function NoopTracerProvider() {\n }\n NoopTracerProvider.prototype.getTracer = function (_name, _version) {\n return NoopTracer_1.NOOP_TRACER;\n };\n return NoopTracerProvider;\n}());\nexports.NoopTracerProvider = NoopTracerProvider;\nexports.NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n//# sourceMappingURL=NoopTracerProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ProxyTracer = void 0;\nvar NoopTracer_1 = require(\"./NoopTracer\");\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nvar ProxyTracer = /** @class */ (function () {\n function ProxyTracer(_provider, name, version) {\n this._provider = _provider;\n this.name = name;\n this.version = version;\n }\n ProxyTracer.prototype.startSpan = function (name, options, context) {\n return this._getTracer().startSpan(name, options, context);\n };\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n ProxyTracer.prototype._getTracer = function () {\n if (this._delegate) {\n return this._delegate;\n }\n var tracer = this._provider.getDelegateTracer(this.name, this.version);\n if (!tracer) {\n return NoopTracer_1.NOOP_TRACER;\n }\n this._delegate = tracer;\n return this._delegate;\n };\n return ProxyTracer;\n}());\nexports.ProxyTracer = ProxyTracer;\n//# sourceMappingURL=ProxyTracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ProxyTracerProvider = void 0;\nvar ProxyTracer_1 = require(\"./ProxyTracer\");\nvar NoopTracerProvider_1 = require(\"./NoopTracerProvider\");\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nvar ProxyTracerProvider = /** @class */ (function () {\n function ProxyTracerProvider() {\n }\n /**\n * Get a {@link ProxyTracer}\n */\n ProxyTracerProvider.prototype.getTracer = function (name, version) {\n var _a;\n return ((_a = this.getDelegateTracer(name, version)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version));\n };\n ProxyTracerProvider.prototype.getDelegate = function () {\n var _a;\n return (_a = this._delegate) !== null && _a !== void 0 ? _a : NoopTracerProvider_1.NOOP_TRACER_PROVIDER;\n };\n /**\n * Set the delegate tracer provider\n */\n ProxyTracerProvider.prototype.setDelegate = function (delegate) {\n this._delegate = delegate;\n };\n ProxyTracerProvider.prototype.getDelegateTracer = function (name, version) {\n var _a;\n return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version);\n };\n return ProxyTracerProvider;\n}());\nexports.ProxyTracerProvider = ProxyTracerProvider;\n//# sourceMappingURL=ProxyTracerProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Sampler.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SamplingDecision = void 0;\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nvar SamplingDecision;\n(function (SamplingDecision) {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n SamplingDecision[SamplingDecision[\"NOT_RECORD\"] = 0] = \"NOT_RECORD\";\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n SamplingDecision[SamplingDecision[\"RECORD\"] = 1] = \"RECORD\";\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n SamplingDecision[SamplingDecision[\"RECORD_AND_SAMPLED\"] = 2] = \"RECORD_AND_SAMPLED\";\n})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {}));\n//# sourceMappingURL=SamplingResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=SpanOptions.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=TimedEvent.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=attributes.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=link.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=link_context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=span.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=span_context.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SpanKind = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar SpanKind;\n(function (SpanKind) {\n /** Default value. Indicates that the span is used internally. */\n SpanKind[SpanKind[\"INTERNAL\"] = 0] = \"INTERNAL\";\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SpanKind[SpanKind[\"SERVER\"] = 1] = \"SERVER\";\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n SpanKind[SpanKind[\"CLIENT\"] = 2] = \"CLIENT\";\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"PRODUCER\"] = 3] = \"PRODUCER\";\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"CONSUMER\"] = 4] = \"CONSUMER\";\n})(SpanKind = exports.SpanKind || (exports.SpanKind = {}));\n//# sourceMappingURL=span_kind.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0;\nvar trace_flags_1 = require(\"./trace_flags\");\nvar VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nvar VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\nexports.INVALID_SPANID = '0000000000000000';\nexports.INVALID_TRACEID = '00000000000000000000000000000000';\nexports.INVALID_SPAN_CONTEXT = {\n traceId: exports.INVALID_TRACEID,\n spanId: exports.INVALID_SPANID,\n traceFlags: trace_flags_1.TraceFlags.NONE,\n};\nfunction isValidTraceId(traceId) {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== exports.INVALID_TRACEID;\n}\nexports.isValidTraceId = isValidTraceId;\nfunction isValidSpanId(spanId) {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== exports.INVALID_SPANID;\n}\nexports.isValidSpanId = isValidSpanId;\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nfunction isSpanContextValid(spanContext) {\n return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId));\n}\nexports.isSpanContextValid = isSpanContextValid;\n//# sourceMappingURL=spancontext-utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SpanStatusCode = void 0;\n/**\n * An enumeration of status codes.\n */\nvar SpanStatusCode;\n(function (SpanStatusCode) {\n /**\n * The default status.\n */\n SpanStatusCode[SpanStatusCode[\"UNSET\"] = 0] = \"UNSET\";\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n SpanStatusCode[SpanStatusCode[\"OK\"] = 1] = \"OK\";\n /**\n * The operation contains an error.\n */\n SpanStatusCode[SpanStatusCode[\"ERROR\"] = 2] = \"ERROR\";\n})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {}));\n//# sourceMappingURL=status.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TraceFlags = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar TraceFlags;\n(function (TraceFlags) {\n /** Represents no flag set. */\n TraceFlags[TraceFlags[\"NONE\"] = 0] = \"NONE\";\n /** Bit to represent whether trace is sampled in trace flags. */\n TraceFlags[TraceFlags[\"SAMPLED\"] = 1] = \"SAMPLED\";\n})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {}));\n//# sourceMappingURL=trace_flags.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=trace_state.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=tracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=tracer_provider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.VERSION = void 0;\n// this is autogenerated file, see scripts/version-update.js\nexports.VERSION = '1.0.0-rc.0';\n//# sourceMappingURL=version.js.map","var CombinedStream = require('combined-stream');\nvar util = require('util');\nvar path = require('path');\nvar http = require('http');\nvar https = require('https');\nvar parseUrl = require('url').parse;\nvar fs = require('fs');\nvar mime = require('mime-types');\nvar asynckit = require('asynckit');\nvar populate = require('./populate.js');\n\n// Public API\nmodule.exports = FormData;\n\n// make it a Stream\nutil.inherits(FormData, CombinedStream);\n\n/**\n * Create readable \"multipart/form-data\" streams.\n * Can be used to submit forms\n * and file uploads to other web applications.\n *\n * @constructor\n * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream\n */\nfunction FormData(options) {\n if (!(this instanceof FormData)) {\n return new FormData(options);\n }\n\n this._overheadLength = 0;\n this._valueLength = 0;\n this._valuesToMeasure = [];\n\n CombinedStream.call(this);\n\n options = options || {};\n for (var option in options) {\n this[option] = options[option];\n }\n}\n\nFormData.LINE_BREAK = '\\r\\n';\nFormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream';\n\nFormData.prototype.append = function(field, value, options) {\n\n options = options || {};\n\n // allow filename as single option\n if (typeof options == 'string') {\n options = {filename: options};\n }\n\n var append = CombinedStream.prototype.append.bind(this);\n\n // all that streamy business can't handle numbers\n if (typeof value == 'number') {\n value = '' + value;\n }\n\n // https://github.com/felixge/node-form-data/issues/38\n if (util.isArray(value)) {\n // Please convert your array into string\n // the way web server expects it\n this._error(new Error('Arrays are not supported.'));\n return;\n }\n\n var header = this._multiPartHeader(field, value, options);\n var footer = this._multiPartFooter();\n\n append(header);\n append(value);\n append(footer);\n\n // pass along options.knownLength\n this._trackLength(header, value, options);\n};\n\nFormData.prototype._trackLength = function(header, value, options) {\n var valueLength = 0;\n\n // used w/ getLengthSync(), when length is known.\n // e.g. for streaming directly from a remote server,\n // w/ a known file a size, and not wanting to wait for\n // incoming file to finish to get its size.\n if (options.knownLength != null) {\n valueLength += +options.knownLength;\n } else if (Buffer.isBuffer(value)) {\n valueLength = value.length;\n } else if (typeof value === 'string') {\n valueLength = Buffer.byteLength(value);\n }\n\n this._valueLength += valueLength;\n\n // @check why add CRLF? does this account for custom/multiple CRLFs?\n this._overheadLength +=\n Buffer.byteLength(header) +\n FormData.LINE_BREAK.length;\n\n // empty or either doesn't have path or not an http response\n if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) {\n return;\n }\n\n // no need to bother with the length\n if (!options.knownLength) {\n this._valuesToMeasure.push(value);\n }\n};\n\nFormData.prototype._lengthRetriever = function(value, callback) {\n\n if (value.hasOwnProperty('fd')) {\n\n // take read range into a account\n // `end` = Infinity –> read file till the end\n //\n // TODO: Looks like there is bug in Node fs.createReadStream\n // it doesn't respect `end` options without `start` options\n // Fix it when node fixes it.\n // https://github.com/joyent/node/issues/7819\n if (value.end != undefined && value.end != Infinity && value.start != undefined) {\n\n // when end specified\n // no need to calculate range\n // inclusive, starts with 0\n callback(null, value.end + 1 - (value.start ? value.start : 0));\n\n // not that fast snoopy\n } else {\n // still need to fetch file size from fs\n fs.stat(value.path, function(err, stat) {\n\n var fileSize;\n\n if (err) {\n callback(err);\n return;\n }\n\n // update final size based on the range options\n fileSize = stat.size - (value.start ? value.start : 0);\n callback(null, fileSize);\n });\n }\n\n // or http response\n } else if (value.hasOwnProperty('httpVersion')) {\n callback(null, +value.headers['content-length']);\n\n // or request stream http://github.com/mikeal/request\n } else if (value.hasOwnProperty('httpModule')) {\n // wait till response come back\n value.on('response', function(response) {\n value.pause();\n callback(null, +response.headers['content-length']);\n });\n value.resume();\n\n // something else\n } else {\n callback('Unknown stream');\n }\n};\n\nFormData.prototype._multiPartHeader = function(field, value, options) {\n // custom header specified (as string)?\n // it becomes responsible for boundary\n // (e.g. to handle extra CRLFs on .NET servers)\n if (typeof options.header == 'string') {\n return options.header;\n }\n\n var contentDisposition = this._getContentDisposition(value, options);\n var contentType = this._getContentType(value, options);\n\n var contents = '';\n var headers = {\n // add custom disposition as third element or keep it two elements if not\n 'Content-Disposition': ['form-data', 'name=\"' + field + '\"'].concat(contentDisposition || []),\n // if no content type. allow it to be empty array\n 'Content-Type': [].concat(contentType || [])\n };\n\n // allow custom headers.\n if (typeof options.header == 'object') {\n populate(headers, options.header);\n }\n\n var header;\n for (var prop in headers) {\n if (!headers.hasOwnProperty(prop)) continue;\n header = headers[prop];\n\n // skip nullish headers.\n if (header == null) {\n continue;\n }\n\n // convert all headers to arrays.\n if (!Array.isArray(header)) {\n header = [header];\n }\n\n // add non-empty headers.\n if (header.length) {\n contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK;\n }\n }\n\n return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK;\n};\n\nFormData.prototype._getContentDisposition = function(value, options) {\n\n var filename\n , contentDisposition\n ;\n\n if (typeof options.filepath === 'string') {\n // custom filepath for relative paths\n filename = path.normalize(options.filepath).replace(/\\\\/g, '/');\n } else if (options.filename || value.name || value.path) {\n // custom filename take precedence\n // formidable and the browser add a name property\n // fs- and request- streams have path property\n filename = path.basename(options.filename || value.name || value.path);\n } else if (value.readable && value.hasOwnProperty('httpVersion')) {\n // or try http response\n filename = path.basename(value.client._httpMessage.path || '');\n }\n\n if (filename) {\n contentDisposition = 'filename=\"' + filename + '\"';\n }\n\n return contentDisposition;\n};\n\nFormData.prototype._getContentType = function(value, options) {\n\n // use custom content-type above all\n var contentType = options.contentType;\n\n // or try `name` from formidable, browser\n if (!contentType && value.name) {\n contentType = mime.lookup(value.name);\n }\n\n // or try `path` from fs-, request- streams\n if (!contentType && value.path) {\n contentType = mime.lookup(value.path);\n }\n\n // or if it's http-reponse\n if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) {\n contentType = value.headers['content-type'];\n }\n\n // or guess it from the filepath or filename\n if (!contentType && (options.filepath || options.filename)) {\n contentType = mime.lookup(options.filepath || options.filename);\n }\n\n // fallback to the default content type if `value` is not simple value\n if (!contentType && typeof value == 'object') {\n contentType = FormData.DEFAULT_CONTENT_TYPE;\n }\n\n return contentType;\n};\n\nFormData.prototype._multiPartFooter = function() {\n return function(next) {\n var footer = FormData.LINE_BREAK;\n\n var lastPart = (this._streams.length === 0);\n if (lastPart) {\n footer += this._lastBoundary();\n }\n\n next(footer);\n }.bind(this);\n};\n\nFormData.prototype._lastBoundary = function() {\n return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK;\n};\n\nFormData.prototype.getHeaders = function(userHeaders) {\n var header;\n var formHeaders = {\n 'content-type': 'multipart/form-data; boundary=' + this.getBoundary()\n };\n\n for (header in userHeaders) {\n if (userHeaders.hasOwnProperty(header)) {\n formHeaders[header.toLowerCase()] = userHeaders[header];\n }\n }\n\n return formHeaders;\n};\n\nFormData.prototype.setBoundary = function(boundary) {\n this._boundary = boundary;\n};\n\nFormData.prototype.getBoundary = function() {\n if (!this._boundary) {\n this._generateBoundary();\n }\n\n return this._boundary;\n};\n\nFormData.prototype.getBuffer = function() {\n var dataBuffer = new Buffer.alloc( 0 );\n var boundary = this.getBoundary();\n\n // Create the form content. Add Line breaks to the end of data.\n for (var i = 0, len = this._streams.length; i < len; i++) {\n if (typeof this._streams[i] !== 'function') {\n\n // Add content to the buffer.\n if(Buffer.isBuffer(this._streams[i])) {\n dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]);\n }else {\n dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]);\n }\n\n // Add break after content.\n if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) {\n dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] );\n }\n }\n }\n\n // Add the footer and return the Buffer object.\n return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] );\n};\n\nFormData.prototype._generateBoundary = function() {\n // This generates a 50 character boundary similar to those used by Firefox.\n // They are optimized for boyer-moore parsing.\n var boundary = '--------------------------';\n for (var i = 0; i < 24; i++) {\n boundary += Math.floor(Math.random() * 10).toString(16);\n }\n\n this._boundary = boundary;\n};\n\n// Note: getLengthSync DOESN'T calculate streams length\n// As workaround one can calculate file size manually\n// and add it as knownLength option\nFormData.prototype.getLengthSync = function() {\n var knownLength = this._overheadLength + this._valueLength;\n\n // Don't get confused, there are 3 \"internal\" streams for each keyval pair\n // so it basically checks if there is any value added to the form\n if (this._streams.length) {\n knownLength += this._lastBoundary().length;\n }\n\n // https://github.com/form-data/form-data/issues/40\n if (!this.hasKnownLength()) {\n // Some async length retrievers are present\n // therefore synchronous length calculation is false.\n // Please use getLength(callback) to get proper length\n this._error(new Error('Cannot calculate proper length in synchronous way.'));\n }\n\n return knownLength;\n};\n\n// Public API to check if length of added values is known\n// https://github.com/form-data/form-data/issues/196\n// https://github.com/form-data/form-data/issues/262\nFormData.prototype.hasKnownLength = function() {\n var hasKnownLength = true;\n\n if (this._valuesToMeasure.length) {\n hasKnownLength = false;\n }\n\n return hasKnownLength;\n};\n\nFormData.prototype.getLength = function(cb) {\n var knownLength = this._overheadLength + this._valueLength;\n\n if (this._streams.length) {\n knownLength += this._lastBoundary().length;\n }\n\n if (!this._valuesToMeasure.length) {\n process.nextTick(cb.bind(this, null, knownLength));\n return;\n }\n\n asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) {\n if (err) {\n cb(err);\n return;\n }\n\n values.forEach(function(length) {\n knownLength += length;\n });\n\n cb(null, knownLength);\n });\n};\n\nFormData.prototype.submit = function(params, cb) {\n var request\n , options\n , defaults = {method: 'post'}\n ;\n\n // parse provided url if it's string\n // or treat it as options object\n if (typeof params == 'string') {\n\n params = parseUrl(params);\n options = populate({\n port: params.port,\n path: params.pathname,\n host: params.hostname,\n protocol: params.protocol\n }, defaults);\n\n // use custom params\n } else {\n\n options = populate(params, defaults);\n // if no port provided use default one\n if (!options.port) {\n options.port = options.protocol == 'https:' ? 443 : 80;\n }\n }\n\n // put that good code in getHeaders to some use\n options.headers = this.getHeaders(params.headers);\n\n // https if specified, fallback to http in any other case\n if (options.protocol == 'https:') {\n request = https.request(options);\n } else {\n request = http.request(options);\n }\n\n // get content length and fire away\n this.getLength(function(err, length) {\n if (err) {\n this._error(err);\n return;\n }\n\n // add content length\n request.setHeader('Content-Length', length);\n\n this.pipe(request);\n if (cb) {\n var onResponse;\n\n var callback = function (error, responce) {\n request.removeListener('error', callback);\n request.removeListener('response', onResponse);\n\n return cb.call(this, error, responce);\n };\n\n onResponse = callback.bind(this, null);\n\n request.on('error', callback);\n request.on('response', onResponse);\n }\n }.bind(this));\n\n return request;\n};\n\nFormData.prototype._error = function(err) {\n if (!this.error) {\n this.error = err;\n this.pause();\n this.emit('error', err);\n }\n};\n\nFormData.prototype.toString = function () {\n return '[object FormData]';\n};\n","// populates missing values\nmodule.exports = function(dst, src) {\n\n Object.keys(src).forEach(function(prop)\n {\n dst[prop] = dst[prop] || src[prop];\n });\n\n return dst;\n};\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst punycode = require(\"punycode\");\nconst urlParse = require(\"url\").parse;\nconst util = require(\"util\");\nconst pubsuffix = require(\"./pubsuffix-psl\");\nconst Store = require(\"./store\").Store;\nconst MemoryCookieStore = require(\"./memstore\").MemoryCookieStore;\nconst pathMatch = require(\"./pathMatch\").pathMatch;\nconst VERSION = require(\"./version\");\nconst { fromCallback } = require(\"universalify\");\n\n// From RFC6265 S4.1.1\n// note that it excludes \\x3B \";\"\nconst COOKIE_OCTETS = /^[\\x21\\x23-\\x2B\\x2D-\\x3A\\x3C-\\x5B\\x5D-\\x7E]+$/;\n\nconst CONTROL_CHARS = /[\\x00-\\x1F]/;\n\n// From Chromium // '\\r', '\\n' and '\\0' should be treated as a terminator in\n// the \"relaxed\" mode, see:\n// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60\nconst TERMINATORS = [\"\\n\", \"\\r\", \"\\0\"];\n\n// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or \";\"'\n// Note ';' is \\x3B\nconst PATH_VALUE = /[\\x20-\\x3A\\x3C-\\x7E]+/;\n\n// date-time parsing constants (RFC6265 S5.1.1)\n\nconst DATE_DELIM = /[\\x09\\x20-\\x2F\\x3B-\\x40\\x5B-\\x60\\x7B-\\x7E]/;\n\nconst MONTH_TO_NUM = {\n jan: 0,\n feb: 1,\n mar: 2,\n apr: 3,\n may: 4,\n jun: 5,\n jul: 6,\n aug: 7,\n sep: 8,\n oct: 9,\n nov: 10,\n dec: 11\n};\n\nconst MAX_TIME = 2147483647000; // 31-bit max\nconst MIN_TIME = 0; // 31-bit min\nconst SAME_SITE_CONTEXT_VAL_ERR =\n 'Invalid sameSiteContext option for getCookies(); expected one of \"strict\", \"lax\", or \"none\"';\n\nfunction checkSameSiteContext(value) {\n const context = String(value).toLowerCase();\n if (context === \"none\" || context === \"lax\" || context === \"strict\") {\n return context;\n } else {\n return null;\n }\n}\n\nconst PrefixSecurityEnum = Object.freeze({\n SILENT: \"silent\",\n STRICT: \"strict\",\n DISABLED: \"unsafe-disabled\"\n});\n\n// Dumped from ip-regex@4.0.0, with the following changes:\n// * all capturing groups converted to non-capturing -- \"(?:)\"\n// * support for IPv6 Scoped Literal (\"%eth1\") removed\n// * lowercase hexadecimal only\nvar IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$)|(?:^(?:(?:[a-f\\d]{1,4}:){7}(?:[a-f\\d]{1,4}|:)|(?:[a-f\\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|:[a-f\\d]{1,4}|:)|(?:[a-f\\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,2}|:)|(?:[a-f\\d]{1,4}:){4}(?:(?::[a-f\\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,3}|:)|(?:[a-f\\d]{1,4}:){3}(?:(?::[a-f\\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,4}|:)|(?:[a-f\\d]{1,4}:){2}(?:(?::[a-f\\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,5}|:)|(?:[a-f\\d]{1,4}:){1}(?:(?::[a-f\\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,7}|:)))$)/;\n\n/*\n * Parses a Natural number (i.e., non-negative integer) with either the\n * *DIGIT ( non-digit *OCTET )\n * or\n * *DIGIT\n * grammar (RFC6265 S5.1.1).\n *\n * The \"trailingOK\" boolean controls if the grammar accepts a\n * \"( non-digit *OCTET )\" trailer.\n */\nfunction parseDigits(token, minDigits, maxDigits, trailingOK) {\n let count = 0;\n while (count < token.length) {\n const c = token.charCodeAt(count);\n // \"non-digit = %x00-2F / %x3A-FF\"\n if (c <= 0x2f || c >= 0x3a) {\n break;\n }\n count++;\n }\n\n // constrain to a minimum and maximum number of digits.\n if (count < minDigits || count > maxDigits) {\n return null;\n }\n\n if (!trailingOK && count != token.length) {\n return null;\n }\n\n return parseInt(token.substr(0, count), 10);\n}\n\nfunction parseTime(token) {\n const parts = token.split(\":\");\n const result = [0, 0, 0];\n\n /* RF6256 S5.1.1:\n * time = hms-time ( non-digit *OCTET )\n * hms-time = time-field \":\" time-field \":\" time-field\n * time-field = 1*2DIGIT\n */\n\n if (parts.length !== 3) {\n return null;\n }\n\n for (let i = 0; i < 3; i++) {\n // \"time-field\" must be strictly \"1*2DIGIT\", HOWEVER, \"hms-time\" can be\n // followed by \"( non-digit *OCTET )\" so therefore the last time-field can\n // have a trailer\n const trailingOK = i == 2;\n const num = parseDigits(parts[i], 1, 2, trailingOK);\n if (num === null) {\n return null;\n }\n result[i] = num;\n }\n\n return result;\n}\n\nfunction parseMonth(token) {\n token = String(token)\n .substr(0, 3)\n .toLowerCase();\n const num = MONTH_TO_NUM[token];\n return num >= 0 ? num : null;\n}\n\n/*\n * RFC6265 S5.1.1 date parser (see RFC for full grammar)\n */\nfunction parseDate(str) {\n if (!str) {\n return;\n }\n\n /* RFC6265 S5.1.1:\n * 2. Process each date-token sequentially in the order the date-tokens\n * appear in the cookie-date\n */\n const tokens = str.split(DATE_DELIM);\n if (!tokens) {\n return;\n }\n\n let hour = null;\n let minute = null;\n let second = null;\n let dayOfMonth = null;\n let month = null;\n let year = null;\n\n for (let i = 0; i < tokens.length; i++) {\n const token = tokens[i].trim();\n if (!token.length) {\n continue;\n }\n\n let result;\n\n /* 2.1. If the found-time flag is not set and the token matches the time\n * production, set the found-time flag and set the hour- value,\n * minute-value, and second-value to the numbers denoted by the digits in\n * the date-token, respectively. Skip the remaining sub-steps and continue\n * to the next date-token.\n */\n if (second === null) {\n result = parseTime(token);\n if (result) {\n hour = result[0];\n minute = result[1];\n second = result[2];\n continue;\n }\n }\n\n /* 2.2. If the found-day-of-month flag is not set and the date-token matches\n * the day-of-month production, set the found-day-of- month flag and set\n * the day-of-month-value to the number denoted by the date-token. Skip\n * the remaining sub-steps and continue to the next date-token.\n */\n if (dayOfMonth === null) {\n // \"day-of-month = 1*2DIGIT ( non-digit *OCTET )\"\n result = parseDigits(token, 1, 2, true);\n if (result !== null) {\n dayOfMonth = result;\n continue;\n }\n }\n\n /* 2.3. If the found-month flag is not set and the date-token matches the\n * month production, set the found-month flag and set the month-value to\n * the month denoted by the date-token. Skip the remaining sub-steps and\n * continue to the next date-token.\n */\n if (month === null) {\n result = parseMonth(token);\n if (result !== null) {\n month = result;\n continue;\n }\n }\n\n /* 2.4. If the found-year flag is not set and the date-token matches the\n * year production, set the found-year flag and set the year-value to the\n * number denoted by the date-token. Skip the remaining sub-steps and\n * continue to the next date-token.\n */\n if (year === null) {\n // \"year = 2*4DIGIT ( non-digit *OCTET )\"\n result = parseDigits(token, 2, 4, true);\n if (result !== null) {\n year = result;\n /* From S5.1.1:\n * 3. If the year-value is greater than or equal to 70 and less\n * than or equal to 99, increment the year-value by 1900.\n * 4. If the year-value is greater than or equal to 0 and less\n * than or equal to 69, increment the year-value by 2000.\n */\n if (year >= 70 && year <= 99) {\n year += 1900;\n } else if (year >= 0 && year <= 69) {\n year += 2000;\n }\n }\n }\n }\n\n /* RFC 6265 S5.1.1\n * \"5. Abort these steps and fail to parse the cookie-date if:\n * * at least one of the found-day-of-month, found-month, found-\n * year, or found-time flags is not set,\n * * the day-of-month-value is less than 1 or greater than 31,\n * * the year-value is less than 1601,\n * * the hour-value is greater than 23,\n * * the minute-value is greater than 59, or\n * * the second-value is greater than 59.\n * (Note that leap seconds cannot be represented in this syntax.)\"\n *\n * So, in order as above:\n */\n if (\n dayOfMonth === null ||\n month === null ||\n year === null ||\n second === null ||\n dayOfMonth < 1 ||\n dayOfMonth > 31 ||\n year < 1601 ||\n hour > 23 ||\n minute > 59 ||\n second > 59\n ) {\n return;\n }\n\n return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second));\n}\n\nfunction formatDate(date) {\n return date.toUTCString();\n}\n\n// S5.1.2 Canonicalized Host Names\nfunction canonicalDomain(str) {\n if (str == null) {\n return null;\n }\n str = str.trim().replace(/^\\./, \"\"); // S4.1.2.3 & S5.2.3: ignore leading .\n\n // convert to IDN if any non-ASCII characters\n if (punycode && /[^\\u0001-\\u007f]/.test(str)) {\n str = punycode.toASCII(str);\n }\n\n return str.toLowerCase();\n}\n\n// S5.1.3 Domain Matching\nfunction domainMatch(str, domStr, canonicalize) {\n if (str == null || domStr == null) {\n return null;\n }\n if (canonicalize !== false) {\n str = canonicalDomain(str);\n domStr = canonicalDomain(domStr);\n }\n\n /*\n * S5.1.3:\n * \"A string domain-matches a given domain string if at least one of the\n * following conditions hold:\"\n *\n * \" o The domain string and the string are identical. (Note that both the\n * domain string and the string will have been canonicalized to lower case at\n * this point)\"\n */\n if (str == domStr) {\n return true;\n }\n\n /* \" o All of the following [three] conditions hold:\" */\n\n /* \"* The domain string is a suffix of the string\" */\n const idx = str.indexOf(domStr);\n if (idx <= 0) {\n return false; // it's a non-match (-1) or prefix (0)\n }\n\n // next, check it's a proper suffix\n // e.g., \"a.b.c\".indexOf(\"b.c\") === 2\n // 5 === 3+2\n if (str.length !== domStr.length + idx) {\n return false; // it's not a suffix\n }\n\n /* \" * The last character of the string that is not included in the\n * domain string is a %x2E (\".\") character.\" */\n if (str.substr(idx-1,1) !== '.') {\n return false; // doesn't align on \".\"\n }\n\n /* \" * The string is a host name (i.e., not an IP address).\" */\n if (IP_REGEX_LOWERCASE.test(str)) {\n return false; // it's an IP address\n }\n\n return true;\n}\n\n// RFC6265 S5.1.4 Paths and Path-Match\n\n/*\n * \"The user agent MUST use an algorithm equivalent to the following algorithm\n * to compute the default-path of a cookie:\"\n *\n * Assumption: the path (and not query part or absolute uri) is passed in.\n */\nfunction defaultPath(path) {\n // \"2. If the uri-path is empty or if the first character of the uri-path is not\n // a %x2F (\"/\") character, output %x2F (\"/\") and skip the remaining steps.\n if (!path || path.substr(0, 1) !== \"/\") {\n return \"/\";\n }\n\n // \"3. If the uri-path contains no more than one %x2F (\"/\") character, output\n // %x2F (\"/\") and skip the remaining step.\"\n if (path === \"/\") {\n return path;\n }\n\n const rightSlash = path.lastIndexOf(\"/\");\n if (rightSlash === 0) {\n return \"/\";\n }\n\n // \"4. Output the characters of the uri-path from the first character up to,\n // but not including, the right-most %x2F (\"/\").\"\n return path.slice(0, rightSlash);\n}\n\nfunction trimTerminator(str) {\n for (let t = 0; t < TERMINATORS.length; t++) {\n const terminatorIdx = str.indexOf(TERMINATORS[t]);\n if (terminatorIdx !== -1) {\n str = str.substr(0, terminatorIdx);\n }\n }\n\n return str;\n}\n\nfunction parseCookiePair(cookiePair, looseMode) {\n cookiePair = trimTerminator(cookiePair);\n\n let firstEq = cookiePair.indexOf(\"=\");\n if (looseMode) {\n if (firstEq === 0) {\n // '=' is immediately at start\n cookiePair = cookiePair.substr(1);\n firstEq = cookiePair.indexOf(\"=\"); // might still need to split on '='\n }\n } else {\n // non-loose mode\n if (firstEq <= 0) {\n // no '=' or is at start\n return; // needs to have non-empty \"cookie-name\"\n }\n }\n\n let cookieName, cookieValue;\n if (firstEq <= 0) {\n cookieName = \"\";\n cookieValue = cookiePair.trim();\n } else {\n cookieName = cookiePair.substr(0, firstEq).trim();\n cookieValue = cookiePair.substr(firstEq + 1).trim();\n }\n\n if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) {\n return;\n }\n\n const c = new Cookie();\n c.key = cookieName;\n c.value = cookieValue;\n return c;\n}\n\nfunction parse(str, options) {\n if (!options || typeof options !== \"object\") {\n options = {};\n }\n str = str.trim();\n\n // We use a regex to parse the \"name-value-pair\" part of S5.2\n const firstSemi = str.indexOf(\";\"); // S5.2 step 1\n const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi);\n const c = parseCookiePair(cookiePair, !!options.loose);\n if (!c) {\n return;\n }\n\n if (firstSemi === -1) {\n return c;\n }\n\n // S5.2.3 \"unparsed-attributes consist of the remainder of the set-cookie-string\n // (including the %x3B (\";\") in question).\" plus later on in the same section\n // \"discard the first \";\" and trim\".\n const unparsed = str.slice(firstSemi + 1).trim();\n\n // \"If the unparsed-attributes string is empty, skip the rest of these\n // steps.\"\n if (unparsed.length === 0) {\n return c;\n }\n\n /*\n * S5.2 says that when looping over the items \"[p]rocess the attribute-name\n * and attribute-value according to the requirements in the following\n * subsections\" for every item. Plus, for many of the individual attributes\n * in S5.3 it says to use the \"attribute-value of the last attribute in the\n * cookie-attribute-list\". Therefore, in this implementation, we overwrite\n * the previous value.\n */\n const cookie_avs = unparsed.split(\";\");\n while (cookie_avs.length) {\n const av = cookie_avs.shift().trim();\n if (av.length === 0) {\n // happens if \";;\" appears\n continue;\n }\n const av_sep = av.indexOf(\"=\");\n let av_key, av_value;\n\n if (av_sep === -1) {\n av_key = av;\n av_value = null;\n } else {\n av_key = av.substr(0, av_sep);\n av_value = av.substr(av_sep + 1);\n }\n\n av_key = av_key.trim().toLowerCase();\n\n if (av_value) {\n av_value = av_value.trim();\n }\n\n switch (av_key) {\n case \"expires\": // S5.2.1\n if (av_value) {\n const exp = parseDate(av_value);\n // \"If the attribute-value failed to parse as a cookie date, ignore the\n // cookie-av.\"\n if (exp) {\n // over and underflow not realistically a concern: V8's getTime() seems to\n // store something larger than a 32-bit time_t (even with 32-bit node)\n c.expires = exp;\n }\n }\n break;\n\n case \"max-age\": // S5.2.2\n if (av_value) {\n // \"If the first character of the attribute-value is not a DIGIT or a \"-\"\n // character ...[or]... If the remainder of attribute-value contains a\n // non-DIGIT character, ignore the cookie-av.\"\n if (/^-?[0-9]+$/.test(av_value)) {\n const delta = parseInt(av_value, 10);\n // \"If delta-seconds is less than or equal to zero (0), let expiry-time\n // be the earliest representable date and time.\"\n c.setMaxAge(delta);\n }\n }\n break;\n\n case \"domain\": // S5.2.3\n // \"If the attribute-value is empty, the behavior is undefined. However,\n // the user agent SHOULD ignore the cookie-av entirely.\"\n if (av_value) {\n // S5.2.3 \"Let cookie-domain be the attribute-value without the leading %x2E\n // (\".\") character.\"\n const domain = av_value.trim().replace(/^\\./, \"\");\n if (domain) {\n // \"Convert the cookie-domain to lower case.\"\n c.domain = domain.toLowerCase();\n }\n }\n break;\n\n case \"path\": // S5.2.4\n /*\n * \"If the attribute-value is empty or if the first character of the\n * attribute-value is not %x2F (\"/\"):\n * Let cookie-path be the default-path.\n * Otherwise:\n * Let cookie-path be the attribute-value.\"\n *\n * We'll represent the default-path as null since it depends on the\n * context of the parsing.\n */\n c.path = av_value && av_value[0] === \"/\" ? av_value : null;\n break;\n\n case \"secure\": // S5.2.5\n /*\n * \"If the attribute-name case-insensitively matches the string \"Secure\",\n * the user agent MUST append an attribute to the cookie-attribute-list\n * with an attribute-name of Secure and an empty attribute-value.\"\n */\n c.secure = true;\n break;\n\n case \"httponly\": // S5.2.6 -- effectively the same as 'secure'\n c.httpOnly = true;\n break;\n\n case \"samesite\": // RFC6265bis-02 S5.3.7\n const enforcement = av_value ? av_value.toLowerCase() : \"\";\n switch (enforcement) {\n case \"strict\":\n c.sameSite = \"strict\";\n break;\n case \"lax\":\n c.sameSite = \"lax\";\n break;\n default:\n // RFC6265bis-02 S5.3.7 step 1:\n // \"If cookie-av's attribute-value is not a case-insensitive match\n // for \"Strict\" or \"Lax\", ignore the \"cookie-av\".\"\n // This effectively sets it to 'none' from the prototype.\n break;\n }\n break;\n\n default:\n c.extensions = c.extensions || [];\n c.extensions.push(av);\n break;\n }\n }\n\n return c;\n}\n\n/**\n * If the cookie-name begins with a case-sensitive match for the\n * string \"__Secure-\", abort these steps and ignore the cookie\n * entirely unless the cookie's secure-only-flag is true.\n * @param cookie\n * @returns boolean\n */\nfunction isSecurePrefixConditionMet(cookie) {\n return !cookie.key.startsWith(\"__Secure-\") || cookie.secure;\n}\n\n/**\n * If the cookie-name begins with a case-sensitive match for the\n * string \"__Host-\", abort these steps and ignore the cookie\n * entirely unless the cookie meets all the following criteria:\n * 1. The cookie's secure-only-flag is true.\n * 2. The cookie's host-only-flag is true.\n * 3. The cookie-attribute-list contains an attribute with an\n * attribute-name of \"Path\", and the cookie's path is \"/\".\n * @param cookie\n * @returns boolean\n */\nfunction isHostPrefixConditionMet(cookie) {\n return (\n !cookie.key.startsWith(\"__Host-\") ||\n (cookie.secure &&\n cookie.hostOnly &&\n cookie.path != null &&\n cookie.path === \"/\")\n );\n}\n\n// avoid the V8 deoptimization monster!\nfunction jsonParse(str) {\n let obj;\n try {\n obj = JSON.parse(str);\n } catch (e) {\n return e;\n }\n return obj;\n}\n\nfunction fromJSON(str) {\n if (!str) {\n return null;\n }\n\n let obj;\n if (typeof str === \"string\") {\n obj = jsonParse(str);\n if (obj instanceof Error) {\n return null;\n }\n } else {\n // assume it's an Object\n obj = str;\n }\n\n const c = new Cookie();\n for (let i = 0; i < Cookie.serializableProperties.length; i++) {\n const prop = Cookie.serializableProperties[i];\n if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) {\n continue; // leave as prototype default\n }\n\n if (prop === \"expires\" || prop === \"creation\" || prop === \"lastAccessed\") {\n if (obj[prop] === null) {\n c[prop] = null;\n } else {\n c[prop] = obj[prop] == \"Infinity\" ? \"Infinity\" : new Date(obj[prop]);\n }\n } else {\n c[prop] = obj[prop];\n }\n }\n\n return c;\n}\n\n/* Section 5.4 part 2:\n * \"* Cookies with longer paths are listed before cookies with\n * shorter paths.\n *\n * * Among cookies that have equal-length path fields, cookies with\n * earlier creation-times are listed before cookies with later\n * creation-times.\"\n */\n\nfunction cookieCompare(a, b) {\n let cmp = 0;\n\n // descending for length: b CMP a\n const aPathLen = a.path ? a.path.length : 0;\n const bPathLen = b.path ? b.path.length : 0;\n cmp = bPathLen - aPathLen;\n if (cmp !== 0) {\n return cmp;\n }\n\n // ascending for time: a CMP b\n const aTime = a.creation ? a.creation.getTime() : MAX_TIME;\n const bTime = b.creation ? b.creation.getTime() : MAX_TIME;\n cmp = aTime - bTime;\n if (cmp !== 0) {\n return cmp;\n }\n\n // break ties for the same millisecond (precision of JavaScript's clock)\n cmp = a.creationIndex - b.creationIndex;\n\n return cmp;\n}\n\n// Gives the permutation of all possible pathMatch()es of a given path. The\n// array is in longest-to-shortest order. Handy for indexing.\nfunction permutePath(path) {\n if (path === \"/\") {\n return [\"/\"];\n }\n const permutations = [path];\n while (path.length > 1) {\n const lindex = path.lastIndexOf(\"/\");\n if (lindex === 0) {\n break;\n }\n path = path.substr(0, lindex);\n permutations.push(path);\n }\n permutations.push(\"/\");\n return permutations;\n}\n\nfunction getCookieContext(url) {\n if (url instanceof Object) {\n return url;\n }\n // NOTE: decodeURI will throw on malformed URIs (see GH-32).\n // Therefore, we will just skip decoding for such URIs.\n try {\n url = decodeURI(url);\n } catch (err) {\n // Silently swallow error\n }\n\n return urlParse(url);\n}\n\nconst cookieDefaults = {\n // the order in which the RFC has them:\n key: \"\",\n value: \"\",\n expires: \"Infinity\",\n maxAge: null,\n domain: null,\n path: null,\n secure: false,\n httpOnly: false,\n extensions: null,\n // set by the CookieJar:\n hostOnly: null,\n pathIsDefault: null,\n creation: null,\n lastAccessed: null,\n sameSite: \"none\"\n};\n\nclass Cookie {\n constructor(options = {}) {\n if (util.inspect.custom) {\n this[util.inspect.custom] = this.inspect;\n }\n\n Object.assign(this, cookieDefaults, options);\n this.creation = this.creation || new Date();\n\n // used to break creation ties in cookieCompare():\n Object.defineProperty(this, \"creationIndex\", {\n configurable: false,\n enumerable: false, // important for assert.deepEqual checks\n writable: true,\n value: ++Cookie.cookiesCreated\n });\n }\n\n inspect() {\n const now = Date.now();\n const hostOnly = this.hostOnly != null ? this.hostOnly : \"?\";\n const createAge = this.creation\n ? `${now - this.creation.getTime()}ms`\n : \"?\";\n const accessAge = this.lastAccessed\n ? `${now - this.lastAccessed.getTime()}ms`\n : \"?\";\n return `Cookie=\"${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}\"`;\n }\n\n toJSON() {\n const obj = {};\n\n for (const prop of Cookie.serializableProperties) {\n if (this[prop] === cookieDefaults[prop]) {\n continue; // leave as prototype default\n }\n\n if (\n prop === \"expires\" ||\n prop === \"creation\" ||\n prop === \"lastAccessed\"\n ) {\n if (this[prop] === null) {\n obj[prop] = null;\n } else {\n obj[prop] =\n this[prop] == \"Infinity\" // intentionally not ===\n ? \"Infinity\"\n : this[prop].toISOString();\n }\n } else if (prop === \"maxAge\") {\n if (this[prop] !== null) {\n // again, intentionally not ===\n obj[prop] =\n this[prop] == Infinity || this[prop] == -Infinity\n ? this[prop].toString()\n : this[prop];\n }\n } else {\n if (this[prop] !== cookieDefaults[prop]) {\n obj[prop] = this[prop];\n }\n }\n }\n\n return obj;\n }\n\n clone() {\n return fromJSON(this.toJSON());\n }\n\n validate() {\n if (!COOKIE_OCTETS.test(this.value)) {\n return false;\n }\n if (\n this.expires != Infinity &&\n !(this.expires instanceof Date) &&\n !parseDate(this.expires)\n ) {\n return false;\n }\n if (this.maxAge != null && this.maxAge <= 0) {\n return false; // \"Max-Age=\" non-zero-digit *DIGIT\n }\n if (this.path != null && !PATH_VALUE.test(this.path)) {\n return false;\n }\n\n const cdomain = this.cdomain();\n if (cdomain) {\n if (cdomain.match(/\\.$/)) {\n return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this\n }\n const suffix = pubsuffix.getPublicSuffix(cdomain);\n if (suffix == null) {\n // it's a public suffix\n return false;\n }\n }\n return true;\n }\n\n setExpires(exp) {\n if (exp instanceof Date) {\n this.expires = exp;\n } else {\n this.expires = parseDate(exp) || \"Infinity\";\n }\n }\n\n setMaxAge(age) {\n if (age === Infinity || age === -Infinity) {\n this.maxAge = age.toString(); // so JSON.stringify() works\n } else {\n this.maxAge = age;\n }\n }\n\n cookieString() {\n let val = this.value;\n if (val == null) {\n val = \"\";\n }\n if (this.key === \"\") {\n return val;\n }\n return `${this.key}=${val}`;\n }\n\n // gives Set-Cookie header format\n toString() {\n let str = this.cookieString();\n\n if (this.expires != Infinity) {\n if (this.expires instanceof Date) {\n str += `; Expires=${formatDate(this.expires)}`;\n } else {\n str += `; Expires=${this.expires}`;\n }\n }\n\n if (this.maxAge != null && this.maxAge != Infinity) {\n str += `; Max-Age=${this.maxAge}`;\n }\n\n if (this.domain && !this.hostOnly) {\n str += `; Domain=${this.domain}`;\n }\n if (this.path) {\n str += `; Path=${this.path}`;\n }\n\n if (this.secure) {\n str += \"; Secure\";\n }\n if (this.httpOnly) {\n str += \"; HttpOnly\";\n }\n if (this.sameSite && this.sameSite !== \"none\") {\n const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()];\n str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`;\n }\n if (this.extensions) {\n this.extensions.forEach(ext => {\n str += `; ${ext}`;\n });\n }\n\n return str;\n }\n\n // TTL() partially replaces the \"expiry-time\" parts of S5.3 step 3 (setCookie()\n // elsewhere)\n // S5.3 says to give the \"latest representable date\" for which we use Infinity\n // For \"expired\" we use 0\n TTL(now) {\n /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires\n * attribute, the Max-Age attribute has precedence and controls the\n * expiration date of the cookie.\n * (Concurs with S5.3 step 3)\n */\n if (this.maxAge != null) {\n return this.maxAge <= 0 ? 0 : this.maxAge * 1000;\n }\n\n let expires = this.expires;\n if (expires != Infinity) {\n if (!(expires instanceof Date)) {\n expires = parseDate(expires) || Infinity;\n }\n\n if (expires == Infinity) {\n return Infinity;\n }\n\n return expires.getTime() - (now || Date.now());\n }\n\n return Infinity;\n }\n\n // expiryTime() replaces the \"expiry-time\" parts of S5.3 step 3 (setCookie()\n // elsewhere)\n expiryTime(now) {\n if (this.maxAge != null) {\n const relativeTo = now || this.creation || new Date();\n const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000;\n return relativeTo.getTime() + age;\n }\n\n if (this.expires == Infinity) {\n return Infinity;\n }\n return this.expires.getTime();\n }\n\n // expiryDate() replaces the \"expiry-time\" parts of S5.3 step 3 (setCookie()\n // elsewhere), except it returns a Date\n expiryDate(now) {\n const millisec = this.expiryTime(now);\n if (millisec == Infinity) {\n return new Date(MAX_TIME);\n } else if (millisec == -Infinity) {\n return new Date(MIN_TIME);\n } else {\n return new Date(millisec);\n }\n }\n\n // This replaces the \"persistent-flag\" parts of S5.3 step 3\n isPersistent() {\n return this.maxAge != null || this.expires != Infinity;\n }\n\n // Mostly S5.1.2 and S5.2.3:\n canonicalizedDomain() {\n if (this.domain == null) {\n return null;\n }\n return canonicalDomain(this.domain);\n }\n\n cdomain() {\n return this.canonicalizedDomain();\n }\n}\n\nCookie.cookiesCreated = 0;\nCookie.parse = parse;\nCookie.fromJSON = fromJSON;\nCookie.serializableProperties = Object.keys(cookieDefaults);\nCookie.sameSiteLevel = {\n strict: 3,\n lax: 2,\n none: 1\n};\n\nCookie.sameSiteCanonical = {\n strict: \"Strict\",\n lax: \"Lax\"\n};\n\nfunction getNormalizedPrefixSecurity(prefixSecurity) {\n if (prefixSecurity != null) {\n const normalizedPrefixSecurity = prefixSecurity.toLowerCase();\n /* The three supported options */\n switch (normalizedPrefixSecurity) {\n case PrefixSecurityEnum.STRICT:\n case PrefixSecurityEnum.SILENT:\n case PrefixSecurityEnum.DISABLED:\n return normalizedPrefixSecurity;\n }\n }\n /* Default is SILENT */\n return PrefixSecurityEnum.SILENT;\n}\n\nclass CookieJar {\n constructor(store, options = { rejectPublicSuffixes: true }) {\n if (typeof options === \"boolean\") {\n options = { rejectPublicSuffixes: options };\n }\n this.rejectPublicSuffixes = options.rejectPublicSuffixes;\n this.enableLooseMode = !!options.looseMode;\n this.allowSpecialUseDomain = !!options.allowSpecialUseDomain;\n this.store = store || new MemoryCookieStore();\n this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity);\n this._cloneSync = syncWrap(\"clone\");\n this._importCookiesSync = syncWrap(\"_importCookies\");\n this.getCookiesSync = syncWrap(\"getCookies\");\n this.getCookieStringSync = syncWrap(\"getCookieString\");\n this.getSetCookieStringsSync = syncWrap(\"getSetCookieStrings\");\n this.removeAllCookiesSync = syncWrap(\"removeAllCookies\");\n this.setCookieSync = syncWrap(\"setCookie\");\n this.serializeSync = syncWrap(\"serialize\");\n }\n\n setCookie(cookie, url, options, cb) {\n let err;\n const context = getCookieContext(url);\n if (typeof options === \"function\") {\n cb = options;\n options = {};\n }\n\n const host = canonicalDomain(context.hostname);\n const loose = options.loose || this.enableLooseMode;\n\n let sameSiteContext = null;\n if (options.sameSiteContext) {\n sameSiteContext = checkSameSiteContext(options.sameSiteContext);\n if (!sameSiteContext) {\n return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR));\n }\n }\n\n // S5.3 step 1\n if (typeof cookie === \"string\" || cookie instanceof String) {\n cookie = Cookie.parse(cookie, { loose: loose });\n if (!cookie) {\n err = new Error(\"Cookie failed to parse\");\n return cb(options.ignoreError ? null : err);\n }\n } else if (!(cookie instanceof Cookie)) {\n // If you're seeing this error, and are passing in a Cookie object,\n // it *might* be a Cookie object from another loaded version of tough-cookie.\n err = new Error(\n \"First argument to setCookie must be a Cookie object or string\"\n );\n return cb(options.ignoreError ? null : err);\n }\n\n // S5.3 step 2\n const now = options.now || new Date(); // will assign later to save effort in the face of errors\n\n // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie()\n\n // S5.3 step 4: NOOP; domain is null by default\n\n // S5.3 step 5: public suffixes\n if (this.rejectPublicSuffixes && cookie.domain) {\n const suffix = pubsuffix.getPublicSuffix(cookie.cdomain());\n if (suffix == null) {\n // e.g. \"com\"\n err = new Error(\"Cookie has domain set to a public suffix\");\n return cb(options.ignoreError ? null : err);\n }\n }\n\n // S5.3 step 6:\n if (cookie.domain) {\n if (!domainMatch(host, cookie.cdomain(), false)) {\n err = new Error(\n `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}`\n );\n return cb(options.ignoreError ? null : err);\n }\n\n if (cookie.hostOnly == null) {\n // don't reset if already set\n cookie.hostOnly = false;\n }\n } else {\n cookie.hostOnly = true;\n cookie.domain = host;\n }\n\n //S5.2.4 If the attribute-value is empty or if the first character of the\n //attribute-value is not %x2F (\"/\"):\n //Let cookie-path be the default-path.\n if (!cookie.path || cookie.path[0] !== \"/\") {\n cookie.path = defaultPath(context.pathname);\n cookie.pathIsDefault = true;\n }\n\n // S5.3 step 8: NOOP; secure attribute\n // S5.3 step 9: NOOP; httpOnly attribute\n\n // S5.3 step 10\n if (options.http === false && cookie.httpOnly) {\n err = new Error(\"Cookie is HttpOnly and this isn't an HTTP API\");\n return cb(options.ignoreError ? null : err);\n }\n\n // 6252bis-02 S5.4 Step 13 & 14:\n if (cookie.sameSite !== \"none\" && sameSiteContext) {\n // \"If the cookie's \"same-site-flag\" is not \"None\", and the cookie\n // is being set from a context whose \"site for cookies\" is not an\n // exact match for request-uri's host's registered domain, then\n // abort these steps and ignore the newly created cookie entirely.\"\n if (sameSiteContext === \"none\") {\n err = new Error(\n \"Cookie is SameSite but this is a cross-origin request\"\n );\n return cb(options.ignoreError ? null : err);\n }\n }\n\n /* 6265bis-02 S5.4 Steps 15 & 16 */\n const ignoreErrorForPrefixSecurity =\n this.prefixSecurity === PrefixSecurityEnum.SILENT;\n const prefixSecurityDisabled =\n this.prefixSecurity === PrefixSecurityEnum.DISABLED;\n /* If prefix checking is not disabled ...*/\n if (!prefixSecurityDisabled) {\n let errorFound = false;\n let errorMsg;\n /* Check secure prefix condition */\n if (!isSecurePrefixConditionMet(cookie)) {\n errorFound = true;\n errorMsg = \"Cookie has __Secure prefix but Secure attribute is not set\";\n } else if (!isHostPrefixConditionMet(cookie)) {\n /* Check host prefix condition */\n errorFound = true;\n errorMsg =\n \"Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'\";\n }\n if (errorFound) {\n return cb(\n options.ignoreError || ignoreErrorForPrefixSecurity\n ? null\n : new Error(errorMsg)\n );\n }\n }\n\n const store = this.store;\n\n if (!store.updateCookie) {\n store.updateCookie = function(oldCookie, newCookie, cb) {\n this.putCookie(newCookie, cb);\n };\n }\n\n function withCookie(err, oldCookie) {\n if (err) {\n return cb(err);\n }\n\n const next = function(err) {\n if (err) {\n return cb(err);\n } else {\n cb(null, cookie);\n }\n };\n\n if (oldCookie) {\n // S5.3 step 11 - \"If the cookie store contains a cookie with the same name,\n // domain, and path as the newly created cookie:\"\n if (options.http === false && oldCookie.httpOnly) {\n // step 11.2\n err = new Error(\"old Cookie is HttpOnly and this isn't an HTTP API\");\n return cb(options.ignoreError ? null : err);\n }\n cookie.creation = oldCookie.creation; // step 11.3\n cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker\n cookie.lastAccessed = now;\n // Step 11.4 (delete cookie) is implied by just setting the new one:\n store.updateCookie(oldCookie, cookie, next); // step 12\n } else {\n cookie.creation = cookie.lastAccessed = now;\n store.putCookie(cookie, next); // step 12\n }\n }\n\n store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie);\n }\n\n // RFC6365 S5.4\n getCookies(url, options, cb) {\n const context = getCookieContext(url);\n if (typeof options === \"function\") {\n cb = options;\n options = {};\n }\n\n const host = canonicalDomain(context.hostname);\n const path = context.pathname || \"/\";\n\n let secure = options.secure;\n if (\n secure == null &&\n context.protocol &&\n (context.protocol == \"https:\" || context.protocol == \"wss:\")\n ) {\n secure = true;\n }\n\n let sameSiteLevel = 0;\n if (options.sameSiteContext) {\n const sameSiteContext = checkSameSiteContext(options.sameSiteContext);\n sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext];\n if (!sameSiteLevel) {\n return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR));\n }\n }\n\n let http = options.http;\n if (http == null) {\n http = true;\n }\n\n const now = options.now || Date.now();\n const expireCheck = options.expire !== false;\n const allPaths = !!options.allPaths;\n const store = this.store;\n\n function matchingCookie(c) {\n // \"Either:\n // The cookie's host-only-flag is true and the canonicalized\n // request-host is identical to the cookie's domain.\n // Or:\n // The cookie's host-only-flag is false and the canonicalized\n // request-host domain-matches the cookie's domain.\"\n if (c.hostOnly) {\n if (c.domain != host) {\n return false;\n }\n } else {\n if (!domainMatch(host, c.domain, false)) {\n return false;\n }\n }\n\n // \"The request-uri's path path-matches the cookie's path.\"\n if (!allPaths && !pathMatch(path, c.path)) {\n return false;\n }\n\n // \"If the cookie's secure-only-flag is true, then the request-uri's\n // scheme must denote a \"secure\" protocol\"\n if (c.secure && !secure) {\n return false;\n }\n\n // \"If the cookie's http-only-flag is true, then exclude the cookie if the\n // cookie-string is being generated for a \"non-HTTP\" API\"\n if (c.httpOnly && !http) {\n return false;\n }\n\n // RFC6265bis-02 S5.3.7\n if (sameSiteLevel) {\n const cookieLevel = Cookie.sameSiteLevel[c.sameSite || \"none\"];\n if (cookieLevel > sameSiteLevel) {\n // only allow cookies at or below the request level\n return false;\n }\n }\n\n // deferred from S5.3\n // non-RFC: allow retention of expired cookies by choice\n if (expireCheck && c.expiryTime() <= now) {\n store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored\n return false;\n }\n\n return true;\n }\n\n store.findCookies(\n host,\n allPaths ? null : path,\n this.allowSpecialUseDomain,\n (err, cookies) => {\n if (err) {\n return cb(err);\n }\n\n cookies = cookies.filter(matchingCookie);\n\n // sorting of S5.4 part 2\n if (options.sort !== false) {\n cookies = cookies.sort(cookieCompare);\n }\n\n // S5.4 part 3\n const now = new Date();\n for (const cookie of cookies) {\n cookie.lastAccessed = now;\n }\n // TODO persist lastAccessed\n\n cb(null, cookies);\n }\n );\n }\n\n getCookieString(...args) {\n const cb = args.pop();\n const next = function(err, cookies) {\n if (err) {\n cb(err);\n } else {\n cb(\n null,\n cookies\n .sort(cookieCompare)\n .map(c => c.cookieString())\n .join(\"; \")\n );\n }\n };\n args.push(next);\n this.getCookies.apply(this, args);\n }\n\n getSetCookieStrings(...args) {\n const cb = args.pop();\n const next = function(err, cookies) {\n if (err) {\n cb(err);\n } else {\n cb(\n null,\n cookies.map(c => {\n return c.toString();\n })\n );\n }\n };\n args.push(next);\n this.getCookies.apply(this, args);\n }\n\n serialize(cb) {\n let type = this.store.constructor.name;\n if (type === \"Object\") {\n type = null;\n }\n\n // update README.md \"Serialization Format\" if you change this, please!\n const serialized = {\n // The version of tough-cookie that serialized this jar. Generally a good\n // practice since future versions can make data import decisions based on\n // known past behavior. When/if this matters, use `semver`.\n version: `tough-cookie@${VERSION}`,\n\n // add the store type, to make humans happy:\n storeType: type,\n\n // CookieJar configuration:\n rejectPublicSuffixes: !!this.rejectPublicSuffixes,\n\n // this gets filled from getAllCookies:\n cookies: []\n };\n\n if (\n !(\n this.store.getAllCookies &&\n typeof this.store.getAllCookies === \"function\"\n )\n ) {\n return cb(\n new Error(\n \"store does not support getAllCookies and cannot be serialized\"\n )\n );\n }\n\n this.store.getAllCookies((err, cookies) => {\n if (err) {\n return cb(err);\n }\n\n serialized.cookies = cookies.map(cookie => {\n // convert to serialized 'raw' cookies\n cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie;\n\n // Remove the index so new ones get assigned during deserialization\n delete cookie.creationIndex;\n\n return cookie;\n });\n\n return cb(null, serialized);\n });\n }\n\n toJSON() {\n return this.serializeSync();\n }\n\n // use the class method CookieJar.deserialize instead of calling this directly\n _importCookies(serialized, cb) {\n let cookies = serialized.cookies;\n if (!cookies || !Array.isArray(cookies)) {\n return cb(new Error(\"serialized jar has no cookies array\"));\n }\n cookies = cookies.slice(); // do not modify the original\n\n const putNext = err => {\n if (err) {\n return cb(err);\n }\n\n if (!cookies.length) {\n return cb(err, this);\n }\n\n let cookie;\n try {\n cookie = fromJSON(cookies.shift());\n } catch (e) {\n return cb(e);\n }\n\n if (cookie === null) {\n return putNext(null); // skip this cookie\n }\n\n this.store.putCookie(cookie, putNext);\n };\n\n putNext();\n }\n\n clone(newStore, cb) {\n if (arguments.length === 1) {\n cb = newStore;\n newStore = null;\n }\n\n this.serialize((err, serialized) => {\n if (err) {\n return cb(err);\n }\n CookieJar.deserialize(serialized, newStore, cb);\n });\n }\n\n cloneSync(newStore) {\n if (arguments.length === 0) {\n return this._cloneSync();\n }\n if (!newStore.synchronous) {\n throw new Error(\n \"CookieJar clone destination store is not synchronous; use async API instead.\"\n );\n }\n return this._cloneSync(newStore);\n }\n\n removeAllCookies(cb) {\n const store = this.store;\n\n // Check that the store implements its own removeAllCookies(). The default\n // implementation in Store will immediately call the callback with a \"not\n // implemented\" Error.\n if (\n typeof store.removeAllCookies === \"function\" &&\n store.removeAllCookies !== Store.prototype.removeAllCookies\n ) {\n return store.removeAllCookies(cb);\n }\n\n store.getAllCookies((err, cookies) => {\n if (err) {\n return cb(err);\n }\n\n if (cookies.length === 0) {\n return cb(null);\n }\n\n let completedCount = 0;\n const removeErrors = [];\n\n function removeCookieCb(removeErr) {\n if (removeErr) {\n removeErrors.push(removeErr);\n }\n\n completedCount++;\n\n if (completedCount === cookies.length) {\n return cb(removeErrors.length ? removeErrors[0] : null);\n }\n }\n\n cookies.forEach(cookie => {\n store.removeCookie(\n cookie.domain,\n cookie.path,\n cookie.key,\n removeCookieCb\n );\n });\n });\n }\n\n static deserialize(strOrObj, store, cb) {\n if (arguments.length !== 3) {\n // store is optional\n cb = store;\n store = null;\n }\n\n let serialized;\n if (typeof strOrObj === \"string\") {\n serialized = jsonParse(strOrObj);\n if (serialized instanceof Error) {\n return cb(serialized);\n }\n } else {\n serialized = strOrObj;\n }\n\n const jar = new CookieJar(store, serialized.rejectPublicSuffixes);\n jar._importCookies(serialized, err => {\n if (err) {\n return cb(err);\n }\n cb(null, jar);\n });\n }\n\n static deserializeSync(strOrObj, store) {\n const serialized =\n typeof strOrObj === \"string\" ? JSON.parse(strOrObj) : strOrObj;\n const jar = new CookieJar(store, serialized.rejectPublicSuffixes);\n\n // catch this mistake early:\n if (!jar.store.synchronous) {\n throw new Error(\n \"CookieJar store is not synchronous; use async API instead.\"\n );\n }\n\n jar._importCookiesSync(serialized);\n return jar;\n }\n}\nCookieJar.fromJSON = CookieJar.deserializeSync;\n\n[\n \"_importCookies\",\n \"clone\",\n \"getCookies\",\n \"getCookieString\",\n \"getSetCookieStrings\",\n \"removeAllCookies\",\n \"serialize\",\n \"setCookie\"\n].forEach(name => {\n CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]);\n});\nCookieJar.deserialize = fromCallback(CookieJar.deserialize);\n\n// Use a closure to provide a true imperative API for synchronous stores.\nfunction syncWrap(method) {\n return function(...args) {\n if (!this.store.synchronous) {\n throw new Error(\n \"CookieJar store is not synchronous; use async API instead.\"\n );\n }\n\n let syncErr, syncResult;\n this[method](...args, (err, result) => {\n syncErr = err;\n syncResult = result;\n });\n\n if (syncErr) {\n throw syncErr;\n }\n return syncResult;\n };\n}\n\nexports.version = VERSION;\nexports.CookieJar = CookieJar;\nexports.Cookie = Cookie;\nexports.Store = Store;\nexports.MemoryCookieStore = MemoryCookieStore;\nexports.parseDate = parseDate;\nexports.formatDate = formatDate;\nexports.parse = parse;\nexports.fromJSON = fromJSON;\nexports.domainMatch = domainMatch;\nexports.defaultPath = defaultPath;\nexports.pathMatch = pathMatch;\nexports.getPublicSuffix = pubsuffix.getPublicSuffix;\nexports.cookieCompare = cookieCompare;\nexports.permuteDomain = require(\"./permuteDomain\").permuteDomain;\nexports.permutePath = permutePath;\nexports.canonicalDomain = canonicalDomain;\nexports.PrefixSecurityEnum = PrefixSecurityEnum;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst { fromCallback } = require(\"universalify\");\nconst Store = require(\"./store\").Store;\nconst permuteDomain = require(\"./permuteDomain\").permuteDomain;\nconst pathMatch = require(\"./pathMatch\").pathMatch;\nconst util = require(\"util\");\n\nclass MemoryCookieStore extends Store {\n constructor() {\n super();\n this.synchronous = true;\n this.idx = {};\n if (util.inspect.custom) {\n this[util.inspect.custom] = this.inspect;\n }\n }\n\n inspect() {\n return `{ idx: ${util.inspect(this.idx, false, 2)} }`;\n }\n\n findCookie(domain, path, key, cb) {\n if (!this.idx[domain]) {\n return cb(null, undefined);\n }\n if (!this.idx[domain][path]) {\n return cb(null, undefined);\n }\n return cb(null, this.idx[domain][path][key] || null);\n }\n findCookies(domain, path, allowSpecialUseDomain, cb) {\n const results = [];\n if (typeof allowSpecialUseDomain === \"function\") {\n cb = allowSpecialUseDomain;\n allowSpecialUseDomain = false;\n }\n if (!domain) {\n return cb(null, []);\n }\n\n let pathMatcher;\n if (!path) {\n // null means \"all paths\"\n pathMatcher = function matchAll(domainIndex) {\n for (const curPath in domainIndex) {\n const pathIndex = domainIndex[curPath];\n for (const key in pathIndex) {\n results.push(pathIndex[key]);\n }\n }\n };\n } else {\n pathMatcher = function matchRFC(domainIndex) {\n //NOTE: we should use path-match algorithm from S5.1.4 here\n //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)\n Object.keys(domainIndex).forEach(cookiePath => {\n if (pathMatch(path, cookiePath)) {\n const pathIndex = domainIndex[cookiePath];\n for (const key in pathIndex) {\n results.push(pathIndex[key]);\n }\n }\n });\n };\n }\n\n const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain];\n const idx = this.idx;\n domains.forEach(curDomain => {\n const domainIndex = idx[curDomain];\n if (!domainIndex) {\n return;\n }\n pathMatcher(domainIndex);\n });\n\n cb(null, results);\n }\n\n putCookie(cookie, cb) {\n if (!this.idx[cookie.domain]) {\n this.idx[cookie.domain] = {};\n }\n if (!this.idx[cookie.domain][cookie.path]) {\n this.idx[cookie.domain][cookie.path] = {};\n }\n this.idx[cookie.domain][cookie.path][cookie.key] = cookie;\n cb(null);\n }\n updateCookie(oldCookie, newCookie, cb) {\n // updateCookie() may avoid updating cookies that are identical. For example,\n // lastAccessed may not be important to some stores and an equality\n // comparison could exclude that field.\n this.putCookie(newCookie, cb);\n }\n removeCookie(domain, path, key, cb) {\n if (\n this.idx[domain] &&\n this.idx[domain][path] &&\n this.idx[domain][path][key]\n ) {\n delete this.idx[domain][path][key];\n }\n cb(null);\n }\n removeCookies(domain, path, cb) {\n if (this.idx[domain]) {\n if (path) {\n delete this.idx[domain][path];\n } else {\n delete this.idx[domain];\n }\n }\n return cb(null);\n }\n removeAllCookies(cb) {\n this.idx = {};\n return cb(null);\n }\n getAllCookies(cb) {\n const cookies = [];\n const idx = this.idx;\n\n const domains = Object.keys(idx);\n domains.forEach(domain => {\n const paths = Object.keys(idx[domain]);\n paths.forEach(path => {\n const keys = Object.keys(idx[domain][path]);\n keys.forEach(key => {\n if (key !== null) {\n cookies.push(idx[domain][path][key]);\n }\n });\n });\n });\n\n // Sort by creationIndex so deserializing retains the creation order.\n // When implementing your own store, this SHOULD retain the order too\n cookies.sort((a, b) => {\n return (a.creationIndex || 0) - (b.creationIndex || 0);\n });\n\n cb(null, cookies);\n }\n}\n\n[\n \"findCookie\",\n \"findCookies\",\n \"putCookie\",\n \"updateCookie\",\n \"removeCookie\",\n \"removeCookies\",\n \"removeAllCookies\",\n \"getAllCookies\"\n].forEach(name => {\n MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]);\n});\n\nexports.MemoryCookieStore = MemoryCookieStore;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\n/*\n * \"A request-path path-matches a given cookie-path if at least one of the\n * following conditions holds:\"\n */\nfunction pathMatch(reqPath, cookiePath) {\n // \"o The cookie-path and the request-path are identical.\"\n if (cookiePath === reqPath) {\n return true;\n }\n\n const idx = reqPath.indexOf(cookiePath);\n if (idx === 0) {\n // \"o The cookie-path is a prefix of the request-path, and the last\n // character of the cookie-path is %x2F (\"/\").\"\n if (cookiePath.substr(-1) === \"/\") {\n return true;\n }\n\n // \" o The cookie-path is a prefix of the request-path, and the first\n // character of the request-path that is not included in the cookie- path\n // is a %x2F (\"/\") character.\"\n if (reqPath.substr(cookiePath.length, 1) === \"/\") {\n return true;\n }\n }\n\n return false;\n}\n\nexports.pathMatch = pathMatch;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst pubsuffix = require(\"./pubsuffix-psl\");\n\n// Gives the permutation of all possible domainMatch()es of a given domain. The\n// array is in shortest-to-longest order. Handy for indexing.\nconst SPECIAL_USE_DOMAINS = [\"local\"]; // RFC 6761\nfunction permuteDomain(domain, allowSpecialUseDomain) {\n let pubSuf = null;\n if (allowSpecialUseDomain) {\n const domainParts = domain.split(\".\");\n if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) {\n pubSuf = `${domainParts[domainParts.length - 2]}.${\n domainParts[domainParts.length - 1]\n }`;\n } else {\n pubSuf = pubsuffix.getPublicSuffix(domain);\n }\n } else {\n pubSuf = pubsuffix.getPublicSuffix(domain);\n }\n\n if (!pubSuf) {\n return null;\n }\n if (pubSuf == domain) {\n return [domain];\n }\n\n const prefix = domain.slice(0, -(pubSuf.length + 1)); // \".example.com\"\n const parts = prefix.split(\".\").reverse();\n let cur = pubSuf;\n const permutations = [cur];\n while (parts.length) {\n cur = `${parts.shift()}.${cur}`;\n permutations.push(cur);\n }\n return permutations;\n}\n\nexports.permuteDomain = permuteDomain;\n","/*!\n * Copyright (c) 2018, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst psl = require(\"psl\");\n\nfunction getPublicSuffix(domain) {\n return psl.get(domain);\n}\n\nexports.getPublicSuffix = getPublicSuffix;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\n/*jshint unused:false */\n\nclass Store {\n constructor() {\n this.synchronous = false;\n }\n\n findCookie(domain, path, key, cb) {\n throw new Error(\"findCookie is not implemented\");\n }\n\n findCookies(domain, path, allowSpecialUseDomain, cb) {\n throw new Error(\"findCookies is not implemented\");\n }\n\n putCookie(cookie, cb) {\n throw new Error(\"putCookie is not implemented\");\n }\n\n updateCookie(oldCookie, newCookie, cb) {\n // recommended default implementation:\n // return this.putCookie(newCookie, cb);\n throw new Error(\"updateCookie is not implemented\");\n }\n\n removeCookie(domain, path, key, cb) {\n throw new Error(\"removeCookie is not implemented\");\n }\n\n removeCookies(domain, path, cb) {\n throw new Error(\"removeCookies is not implemented\");\n }\n\n removeAllCookies(cb) {\n throw new Error(\"removeAllCookies is not implemented\");\n }\n\n getAllCookies(cb) {\n throw new Error(\n \"getAllCookies is not implemented (therefore jar cannot be serialized)\"\n );\n }\n}\n\nexports.Store = Store;\n","// generated by genversion\nmodule.exports = '4.0.0'\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","/*!\n * Copyright (c) Microsoft and contributors. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n * \n * Azure Core LRO SDK for JavaScript - 1.0.5\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nvar PollerStoppedError = /** @class */ (function (_super) {\n tslib.__extends(PollerStoppedError, _super);\n function PollerStoppedError(message) {\n var _this = _super.call(this, message) || this;\n _this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(_this, PollerStoppedError.prototype);\n return _this;\n }\n return PollerStoppedError;\n}(Error));\n/**\n * When a poller is cancelled through the `cancelOperation` method,\n * the poller will be rejected with an instance of the PollerCancelledError.\n */\nvar PollerCancelledError = /** @class */ (function (_super) {\n tslib.__extends(PollerCancelledError, _super);\n function PollerCancelledError(message) {\n var _this = _super.call(this, message) || this;\n _this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(_this, PollerCancelledError.prototype);\n return _this;\n }\n return PollerCancelledError;\n}(Error));\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nvar Poller = /** @class */ (function () {\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n function Poller(operation) {\n var _this = this;\n this.stopped = true;\n this.pollProgressCallbacks = [];\n this.operation = operation;\n this.promise = new Promise(function (resolve, reject) {\n _this.resolve = resolve;\n _this.reject = reject;\n });\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(function () {\n /* intentionally blank */\n });\n }\n /**\n * @internal\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n Poller.prototype.startPolling = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (this.stopped) {\n this.stopped = false;\n }\n _a.label = 1;\n case 1:\n if (!(!this.isStopped() && !this.isDone())) return [3 /*break*/, 4];\n return [4 /*yield*/, this.poll()];\n case 2:\n _a.sent();\n return [4 /*yield*/, this.delay()];\n case 3:\n _a.sent();\n return [3 /*break*/, 1];\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * @internal\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n Poller.prototype.pollOnce = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _b.trys.push([0, 3, , 4]);\n if (!!this.isDone()) return [3 /*break*/, 2];\n _a = this;\n return [4 /*yield*/, this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this)\n })];\n case 1:\n _a.operation = _b.sent();\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.operation.state.result);\n }\n _b.label = 2;\n case 2: return [3 /*break*/, 4];\n case 3:\n e_1 = _b.sent();\n this.operation.state.error = e_1;\n if (this.reject) {\n this.reject(e_1);\n }\n throw e_1;\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * @internal\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n Poller.prototype.fireProgress = function (state) {\n for (var _i = 0, _a = this.pollProgressCallbacks; _i < _a.length; _i++) {\n var callback = _a[_i];\n callback(state);\n }\n };\n /**\n * @internal\n * Invokes the underlying operation's cancel method, and rejects the\n * pollUntilDone promise.\n */\n Poller.prototype.cancelOnce = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = this;\n return [4 /*yield*/, this.operation.cancel(options)];\n case 1:\n _a.operation = _b.sent();\n if (this.reject) {\n this.reject(new PollerCancelledError(\"Poller cancelled\"));\n }\n return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n Poller.prototype.poll = function (options) {\n var _this = this;\n if (options === void 0) { options = {}; }\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n var clearPollOncePromise = function () {\n _this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n };\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n Poller.prototype.pollUntilDone = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n if (this.stopped) {\n this.startPolling().catch(this.reject);\n }\n return [2 /*return*/, this.promise];\n });\n });\n };\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n Poller.prototype.onProgress = function (callback) {\n var _this = this;\n this.pollProgressCallbacks.push(callback);\n return function () {\n _this.pollProgressCallbacks = _this.pollProgressCallbacks.filter(function (c) { return c !== callback; });\n };\n };\n /**\n * Returns true if the poller has finished polling.\n */\n Poller.prototype.isDone = function () {\n var state = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n };\n /**\n * Stops the poller from continuing to poll.\n */\n Poller.prototype.stopPolling = function () {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n };\n /**\n * Returns true if the poller is stopped.\n */\n Poller.prototype.isStopped = function () {\n return this.stopped;\n };\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n Poller.prototype.cancelOperation = function (options) {\n if (options === void 0) { options = {}; }\n if (!this.stopped) {\n this.stopped = true;\n }\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n }\n else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n };\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n Poller.prototype.getOperationState = function () {\n return this.operation.state;\n };\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n Poller.prototype.getResult = function () {\n var state = this.operation.state;\n return state.result;\n };\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n Poller.prototype.toString = function () {\n return this.operation.toString();\n };\n return Poller;\n}());\n\nexports.Poller = Poller;\nexports.PollerCancelledError = PollerCancelledError;\nexports.PollerStoppedError = PollerStoppedError;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n","\"use strict\";\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nrequire(\"@azure/core-asynciterator-polyfill\");\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar api = require('@opentelemetry/api');\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A no-op implementation of Span that can safely be used without side-effects.\n */\nvar NoOpSpan = /** @class */ (function () {\n function NoOpSpan() {\n }\n /**\n * Returns the SpanContext associated with this Span.\n */\n NoOpSpan.prototype.context = function () {\n return {\n spanId: \"\",\n traceId: \"\",\n traceFlags: api.TraceFlags.NONE\n };\n };\n /**\n * Marks the end of Span execution.\n * @param _endTime - The time to use as the Span's end time. Defaults to\n * the current time.\n */\n NoOpSpan.prototype.end = function (_endTime) {\n /* Noop */\n };\n /**\n * Sets an attribute on the Span\n * @param _key - The attribute key\n * @param _value - The attribute value\n */\n NoOpSpan.prototype.setAttribute = function (_key, _value) {\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param _attributes - The attributes to add\n */\n NoOpSpan.prototype.setAttributes = function (_attributes) {\n return this;\n };\n /**\n * Adds an event to the Span\n * @param _name - The name of the event\n * @param _attributes - The associated attributes to add for this event\n */\n NoOpSpan.prototype.addEvent = function (_name, _attributes) {\n return this;\n };\n /**\n * Sets a status on the span. Overrides the default of CanonicalCode.OK.\n * @param _status - The status to set.\n */\n NoOpSpan.prototype.setStatus = function (_status) {\n return this;\n };\n /**\n * Updates the name of the Span\n * @param _name - the new Span name\n */\n NoOpSpan.prototype.updateName = function (_name) {\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n NoOpSpan.prototype.isRecording = function () {\n return false;\n };\n return NoOpSpan;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A no-op implementation of Tracer that can be used when tracing\n * is disabled.\n */\nvar NoOpTracer = /** @class */ (function () {\n function NoOpTracer() {\n }\n /**\n * Starts a new Span.\n * @param _name - The name of the span.\n * @param _options - The SpanOptions used during Span creation.\n */\n NoOpTracer.prototype.startSpan = function (_name, _options) {\n return new NoOpSpan();\n };\n /**\n * Returns the current Span from the current context, if available.\n */\n NoOpTracer.prototype.getCurrentSpan = function () {\n return new NoOpSpan();\n };\n /**\n * Executes the given function within the context provided by a Span.\n * @param _span - The span that provides the context.\n * @param fn - The function to be executed.\n */\n NoOpTracer.prototype.withSpan = function (_span, fn) {\n return fn();\n };\n /**\n * Bind a Span as the target's scope\n * @param target - An object to bind the scope.\n * @param _span - A specific Span to use. Otherwise, use the current one.\n */\n NoOpTracer.prototype.bind = function (target, _span) {\n return target;\n };\n return NoOpTracer;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nfunction getGlobalObject() {\n return global;\n}\n\n// Copyright (c) Microsoft Corporation.\n// V1 = OpenTelemetry 0.1\n// V2 = OpenTelemetry 0.2\n// V3 = OpenTelemetry 0.6.1\nvar GLOBAL_TRACER_VERSION = 3;\n// preview5 shipped with @azure/core-tracing.tracerCache\n// and didn't have smart detection for collisions\nvar GLOBAL_TRACER_SYMBOL = Symbol.for(\"@azure/core-tracing.tracerCache2\");\nvar cache;\nfunction loadTracerCache() {\n var globalObj = getGlobalObject();\n var existingCache = globalObj[GLOBAL_TRACER_SYMBOL];\n var setGlobalCache = true;\n if (existingCache) {\n if (existingCache.version === GLOBAL_TRACER_VERSION) {\n cache = existingCache;\n }\n else {\n setGlobalCache = false;\n if (existingCache.tracer) {\n throw new Error(\"Two incompatible versions of @azure/core-tracing have been loaded.\\n This library is \" + GLOBAL_TRACER_VERSION + \", existing is \" + existingCache.version + \".\");\n }\n }\n }\n if (!cache) {\n cache = {\n tracer: undefined,\n version: GLOBAL_TRACER_VERSION\n };\n }\n if (setGlobalCache) {\n globalObj[GLOBAL_TRACER_SYMBOL] = cache;\n }\n}\nfunction getCache() {\n if (!cache) {\n loadTracerCache();\n }\n return cache;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar defaultTracer;\nfunction getDefaultTracer() {\n if (!defaultTracer) {\n defaultTracer = new NoOpTracer();\n }\n return defaultTracer;\n}\n/**\n * Sets the global tracer, enabling tracing for the Azure SDK.\n * @param tracer - An OpenTelemetry Tracer instance.\n */\nfunction setTracer(tracer) {\n var cache = getCache();\n cache.tracer = tracer;\n}\n/**\n * Retrieves the active tracer, or returns a\n * no-op implementation if one is not set.\n */\nfunction getTracer() {\n var cache = getCache();\n if (!cache.tracer) {\n return getDefaultTracer();\n }\n return cache.tracer;\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * @internal\n */\nvar OpenCensusTraceStateWrapper = /** @class */ (function () {\n function OpenCensusTraceStateWrapper(state) {\n this._state = state;\n }\n OpenCensusTraceStateWrapper.prototype.get = function (_key) {\n throw new Error(\"Method not implemented.\");\n };\n OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) {\n throw new Error(\"Method not implemented.\");\n };\n OpenCensusTraceStateWrapper.prototype.unset = function (_key) {\n throw new Error(\"Method not implemented\");\n };\n OpenCensusTraceStateWrapper.prototype.serialize = function () {\n return this._state || \"\";\n };\n return OpenCensusTraceStateWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\nfunction isWrappedSpan(span) {\n return !!span && span.getWrappedSpan !== undefined;\n}\nfunction isTracer(tracerOrSpan) {\n return tracerOrSpan.getWrappedTracer !== undefined;\n}\n/**\n * An implementation of OpenTelemetry Span that wraps an OpenCensus Span.\n */\nvar OpenCensusSpanWrapper = /** @class */ (function () {\n function OpenCensusSpanWrapper(tracerOrSpan, name, options) {\n if (name === void 0) { name = \"\"; }\n if (options === void 0) { options = {}; }\n if (isTracer(tracerOrSpan)) {\n var parent = isWrappedSpan(options.parent) ? options.parent.getWrappedSpan() : undefined;\n this._span = tracerOrSpan.getWrappedTracer().startChildSpan({\n name: name,\n childOf: parent\n });\n this._span.start();\n if (options.links) {\n for (var _i = 0, _a = options.links; _i < _a.length; _i++) {\n var link = _a[_i];\n // Since there is no way to set the link relationship, leave it as Unspecified.\n this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes);\n }\n }\n }\n else {\n this._span = tracerOrSpan;\n }\n }\n /**\n * The underlying OpenCensus Span\n */\n OpenCensusSpanWrapper.prototype.getWrappedSpan = function () {\n return this._span;\n };\n /**\n * Marks the end of Span execution.\n * @param endTime - The time to use as the Span's end time. Defaults to\n * the current time.\n */\n OpenCensusSpanWrapper.prototype.end = function (_endTime) {\n this._span.end();\n };\n /**\n * Returns the SpanContext associated with this Span.\n */\n OpenCensusSpanWrapper.prototype.context = function () {\n var openCensusSpanContext = this._span.spanContext;\n return {\n spanId: openCensusSpanContext.spanId,\n traceId: openCensusSpanContext.traceId,\n traceFlags: openCensusSpanContext.options,\n traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState)\n };\n };\n /**\n * Sets an attribute on the Span\n * @param key - The attribute key\n * @param value - The attribute value\n */\n OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) {\n this._span.addAttribute(key, value);\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param attributes - The attributes to add\n */\n OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) {\n this._span.attributes = attributes;\n return this;\n };\n /**\n * Adds an event to the Span\n * @param name - The name of the event\n * @param attributes - The associated attributes to add for this event\n */\n OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) {\n throw new Error(\"Method not implemented.\");\n };\n /**\n * Sets a status on the span. Overrides the default of CanonicalCode.OK.\n * @param status - The status to set.\n */\n OpenCensusSpanWrapper.prototype.setStatus = function (status) {\n this._span.setStatus(status.code, status.message);\n return this;\n };\n /**\n * Updates the name of the Span\n * @param name - The new Span name\n */\n OpenCensusSpanWrapper.prototype.updateName = function (name) {\n this._span.name = name;\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n OpenCensusSpanWrapper.prototype.isRecording = function () {\n // NoRecordSpans have an empty traceId\n return !!this._span.traceId;\n };\n return OpenCensusSpanWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer.\n */\nvar OpenCensusTracerWrapper = /** @class */ (function () {\n /**\n * Create a new wrapper around a given OpenCensus Tracer.\n * @param tracer - The OpenCensus Tracer to wrap.\n */\n function OpenCensusTracerWrapper(tracer) {\n this._tracer = tracer;\n }\n /**\n * The wrapped OpenCensus Tracer\n */\n OpenCensusTracerWrapper.prototype.getWrappedTracer = function () {\n return this._tracer;\n };\n /**\n * Starts a new Span.\n * @param name - The name of the span.\n * @param options - The SpanOptions used during Span creation.\n */\n OpenCensusTracerWrapper.prototype.startSpan = function (name, options) {\n return new OpenCensusSpanWrapper(this, name, options);\n };\n /**\n * Returns the current Span from the current context, if available.\n */\n OpenCensusTracerWrapper.prototype.getCurrentSpan = function () {\n return undefined;\n };\n /**\n * Executes the given function within the context provided by a Span.\n * @param _span - The span that provides the context.\n * @param _fn - The function to be executed.\n */\n OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) {\n throw new Error(\"Method not implemented.\");\n };\n /**\n * Bind a Span as the target's scope\n * @param target - An object to bind the scope.\n * @param _span - A specific Span to use. Otherwise, use the current one.\n */\n OpenCensusTracerWrapper.prototype.bind = function (_target, _span) {\n throw new Error(\"Method not implemented.\");\n };\n return OpenCensusTracerWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A mock span useful for testing.\n */\nvar TestSpan = /** @class */ (function (_super) {\n tslib.__extends(TestSpan, _super);\n /**\n * Starts a new Span.\n * @param parentTracer- The tracer that created this Span\n * @param name - The name of the span.\n * @param context - The SpanContext this span belongs to\n * @param kind - The SpanKind of this Span\n * @param parentSpanId - The identifier of the parent Span\n * @param startTime - The startTime of the event (defaults to now)\n */\n function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) {\n if (startTime === void 0) { startTime = Date.now(); }\n var _this = _super.call(this) || this;\n _this._tracer = parentTracer;\n _this.name = name;\n _this.kind = kind;\n _this.startTime = startTime;\n _this.parentSpanId = parentSpanId;\n _this.status = {\n code: api.CanonicalCode.OK\n };\n _this.endCalled = false;\n _this._context = context;\n _this.attributes = {};\n return _this;\n }\n /**\n * Returns the Tracer that created this Span\n */\n TestSpan.prototype.tracer = function () {\n return this._tracer;\n };\n /**\n * Returns the SpanContext associated with this Span.\n */\n TestSpan.prototype.context = function () {\n return this._context;\n };\n /**\n * Marks the end of Span execution.\n * @param _endTime - The time to use as the Span's end time. Defaults to\n * the current time.\n */\n TestSpan.prototype.end = function (_endTime) {\n this.endCalled = true;\n };\n /**\n * Sets a status on the span. Overrides the default of CanonicalCode.OK.\n * @param status - The status to set.\n */\n TestSpan.prototype.setStatus = function (status) {\n this.status = status;\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n TestSpan.prototype.isRecording = function () {\n return true;\n };\n /**\n * Sets an attribute on the Span\n * @param key - The attribute key\n * @param value - The attribute value\n */\n TestSpan.prototype.setAttribute = function (key, value) {\n this.attributes[key] = value;\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param attributes - The attributes to add\n */\n TestSpan.prototype.setAttributes = function (attributes) {\n for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) {\n var key = _a[_i];\n this.attributes[key] = attributes[key];\n }\n return this;\n };\n return TestSpan;\n}(NoOpSpan));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A mock tracer useful for testing\n */\nvar TestTracer = /** @class */ (function (_super) {\n tslib.__extends(TestTracer, _super);\n function TestTracer() {\n var _this = _super !== null && _super.apply(this, arguments) || this;\n _this.traceIdCounter = 0;\n _this.spanIdCounter = 0;\n _this.rootSpans = [];\n _this.knownSpans = [];\n return _this;\n }\n TestTracer.prototype.getNextTraceId = function () {\n this.traceIdCounter++;\n return String(this.traceIdCounter);\n };\n TestTracer.prototype.getNextSpanId = function () {\n this.spanIdCounter++;\n return String(this.spanIdCounter);\n };\n /**\n * Returns all Spans that were created without a parent\n */\n TestTracer.prototype.getRootSpans = function () {\n return this.rootSpans;\n };\n /**\n * Returns all Spans this Tracer knows about\n */\n TestTracer.prototype.getKnownSpans = function () {\n return this.knownSpans;\n };\n /**\n * Returns all Spans where end() has not been called\n */\n TestTracer.prototype.getActiveSpans = function () {\n return this.knownSpans.filter(function (span) {\n return !span.endCalled;\n });\n };\n /**\n * Return all Spans for a particular trace, grouped by their\n * parent Span in a tree-like structure\n * @param traceId - The traceId to return the graph for\n */\n TestTracer.prototype.getSpanGraph = function (traceId) {\n var traceSpans = this.knownSpans.filter(function (span) {\n return span.context().traceId === traceId;\n });\n var roots = [];\n var nodeMap = new Map();\n for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) {\n var span = traceSpans_1[_i];\n var spanId = span.context().spanId;\n var node = {\n name: span.name,\n children: []\n };\n nodeMap.set(spanId, node);\n if (span.parentSpanId) {\n var parent = nodeMap.get(span.parentSpanId);\n if (!parent) {\n throw new Error(\"Span with name \" + node.name + \" has an unknown parentSpan with id \" + span.parentSpanId);\n }\n parent.children.push(node);\n }\n else {\n roots.push(node);\n }\n }\n return {\n roots: roots\n };\n };\n /**\n * Starts a new Span.\n * @param name - The name of the span.\n * @param options - The SpanOptions used during Span creation.\n */\n TestTracer.prototype.startSpan = function (name, options) {\n if (options === void 0) { options = {}; }\n var parentContext = this._getParentContext(options);\n var traceId;\n var isRootSpan = false;\n if (parentContext && parentContext.traceId) {\n traceId = parentContext.traceId;\n }\n else {\n traceId = this.getNextTraceId();\n isRootSpan = true;\n }\n var context = {\n traceId: traceId,\n spanId: this.getNextSpanId(),\n traceFlags: api.TraceFlags.NONE\n };\n var span = new TestSpan(this, name, context, options.kind || api.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options.startTime);\n this.knownSpans.push(span);\n if (isRootSpan) {\n this.rootSpans.push(span);\n }\n return span;\n };\n TestTracer.prototype._getParentContext = function (options) {\n var parent = options.parent;\n var result;\n if (parent) {\n if (\"traceId\" in parent) {\n result = parent;\n }\n else {\n result = parent.context();\n }\n }\n return result;\n };\n return TestTracer;\n}(NoOpTracer));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nfunction createSpanFunction(args) {\n return function (operationName, operationOptions) {\n var tracer = getTracer();\n var tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {};\n var spanOptions = tslib.__assign({ kind: api.SpanKind.INTERNAL }, tracingOptions.spanOptions);\n var spanName = args.packagePrefix ? args.packagePrefix + \".\" + operationName : operationName;\n var span = tracer.startSpan(spanName, spanOptions);\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n var newSpanOptions = tracingOptions.spanOptions || {};\n if (span.isRecording() && args.namespace) {\n newSpanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { parent: span.context(), attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { \"az.namespace\": args.namespace }) });\n }\n var newTracingOptions = tslib.__assign(tslib.__assign({}, tracingOptions), { spanOptions: newSpanOptions });\n var newOperationOptions = tslib.__assign(tslib.__assign({}, operationOptions), { tracingOptions: newTracingOptions });\n return {\n span: span,\n updatedOptions: newOperationOptions\n };\n };\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar VERSION = \"00\";\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nfunction extractSpanContextFromTraceParentHeader(traceParentHeader) {\n var parts = traceParentHeader.split(\"-\");\n if (parts.length !== 4) {\n return;\n }\n var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3];\n if (version !== VERSION) {\n return;\n }\n var traceFlags = parseInt(traceOptions, 16);\n var spanContext = {\n spanId: spanId,\n traceId: traceId,\n traceFlags: traceFlags\n };\n return spanContext;\n}\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nfunction getTraceParentHeader(spanContext) {\n var missingFields = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n if (missingFields.length) {\n return;\n }\n var flags = spanContext.traceFlags || 0 /* NONE */;\n var hexFlags = flags.toString(16);\n var traceFlags = hexFlags.length === 1 ? \"0\" + hexFlags : hexFlags;\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return VERSION + \"-\" + spanContext.traceId + \"-\" + spanContext.spanId + \"-\" + traceFlags;\n}\n\nexports.NoOpSpan = NoOpSpan;\nexports.NoOpTracer = NoOpTracer;\nexports.OpenCensusSpanWrapper = OpenCensusSpanWrapper;\nexports.OpenCensusTracerWrapper = OpenCensusTracerWrapper;\nexports.TestSpan = TestSpan;\nexports.TestTracer = TestTracer;\nexports.createSpanFunction = createSpanFunction;\nexports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader;\nexports.getTraceParentHeader = getTraceParentHeader;\nexports.getTracer = getTracer;\nexports.setTracer = setTracer;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar tslib = require('tslib');\nvar util = _interopDefault(require('util'));\nvar os = require('os');\n\n// Copyright (c) Microsoft Corporation.\nfunction log(message) {\n var args = [];\n for (var _i = 1; _i < arguments.length; _i++) {\n args[_i - 1] = arguments[_i];\n }\n process.stderr.write(\"\" + util.format.apply(util, tslib.__spread([message], args)) + os.EOL);\n}\n\n// Copyright (c) Microsoft Corporation.\nvar debugEnvVariable = (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\nvar enabledString;\nvar enabledNamespaces = [];\nvar skippedNamespaces = [];\nvar debuggers = [];\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\nvar debugObj = Object.assign(function (namespace) {\n return createDebugger(namespace);\n}, {\n enable: enable,\n enabled: enabled,\n disable: disable,\n log: log\n});\nfunction enable(namespaces) {\n var e_1, _a, e_2, _b;\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n var wildcard = /\\*/g;\n var namespaceList = namespaces.split(\",\").map(function (ns) { return ns.trim().replace(wildcard, \".*?\"); });\n try {\n for (var namespaceList_1 = tslib.__values(namespaceList), namespaceList_1_1 = namespaceList_1.next(); !namespaceList_1_1.done; namespaceList_1_1 = namespaceList_1.next()) {\n var ns = namespaceList_1_1.value;\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(\"^\" + ns.substr(1) + \"$\"));\n }\n else {\n enabledNamespaces.push(new RegExp(\"^\" + ns + \"$\"));\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (namespaceList_1_1 && !namespaceList_1_1.done && (_a = namespaceList_1.return)) _a.call(namespaceList_1);\n }\n finally { if (e_1) throw e_1.error; }\n }\n try {\n for (var debuggers_1 = tslib.__values(debuggers), debuggers_1_1 = debuggers_1.next(); !debuggers_1_1.done; debuggers_1_1 = debuggers_1.next()) {\n var instance = debuggers_1_1.value;\n instance.enabled = enabled(instance.namespace);\n }\n }\n catch (e_2_1) { e_2 = { error: e_2_1 }; }\n finally {\n try {\n if (debuggers_1_1 && !debuggers_1_1.done && (_b = debuggers_1.return)) _b.call(debuggers_1);\n }\n finally { if (e_2) throw e_2.error; }\n }\n}\nfunction enabled(namespace) {\n var e_3, _a, e_4, _b;\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n try {\n for (var skippedNamespaces_1 = tslib.__values(skippedNamespaces), skippedNamespaces_1_1 = skippedNamespaces_1.next(); !skippedNamespaces_1_1.done; skippedNamespaces_1_1 = skippedNamespaces_1.next()) {\n var skipped = skippedNamespaces_1_1.value;\n if (skipped.test(namespace)) {\n return false;\n }\n }\n }\n catch (e_3_1) { e_3 = { error: e_3_1 }; }\n finally {\n try {\n if (skippedNamespaces_1_1 && !skippedNamespaces_1_1.done && (_a = skippedNamespaces_1.return)) _a.call(skippedNamespaces_1);\n }\n finally { if (e_3) throw e_3.error; }\n }\n try {\n for (var enabledNamespaces_1 = tslib.__values(enabledNamespaces), enabledNamespaces_1_1 = enabledNamespaces_1.next(); !enabledNamespaces_1_1.done; enabledNamespaces_1_1 = enabledNamespaces_1.next()) {\n var enabledNamespace = enabledNamespaces_1_1.value;\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n }\n catch (e_4_1) { e_4 = { error: e_4_1 }; }\n finally {\n try {\n if (enabledNamespaces_1_1 && !enabledNamespaces_1_1.done && (_b = enabledNamespaces_1.return)) _b.call(enabledNamespaces_1);\n }\n finally { if (e_4) throw e_4.error; }\n }\n return false;\n}\nfunction disable() {\n var result = enabledString || \"\";\n enable(\"\");\n return result;\n}\nfunction createDebugger(namespace) {\n var newDebugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy: destroy,\n log: debugObj.log,\n namespace: namespace,\n extend: extend\n });\n function debug() {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = namespace + \" \" + args[0];\n }\n newDebugger.log.apply(newDebugger, tslib.__spread(args));\n }\n debuggers.push(newDebugger);\n return newDebugger;\n}\nfunction destroy() {\n var index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\nfunction extend(namespace) {\n var newDebugger = createDebugger(this.namespace + \":\" + namespace);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar registeredLoggers = new Set();\nvar logLevelFromEnv = (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\nvar azureLogLevel;\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nvar AzureLogger = debugObj(\"azure\");\nAzureLogger.log = function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n debugObj.log.apply(debugObj, tslib.__spread(args));\n};\nvar AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n }\n else {\n console.error(\"AZURE_LOG_LEVEL set to unknown log level '\" + logLevelFromEnv + \"'; logging is not enabled. Acceptable values: \" + AZURE_LOG_LEVELS.join(\", \") + \".\");\n }\n}\n/**\n * Immediately enables logging at the specified log level.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nfunction setLogLevel(level) {\n var e_1, _a;\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\"Unknown log level '\" + level + \"'. Acceptable values: \" + AZURE_LOG_LEVELS.join(\",\"));\n }\n azureLogLevel = level;\n var enabledNamespaces = [];\n try {\n for (var registeredLoggers_1 = tslib.__values(registeredLoggers), registeredLoggers_1_1 = registeredLoggers_1.next(); !registeredLoggers_1_1.done; registeredLoggers_1_1 = registeredLoggers_1.next()) {\n var logger = registeredLoggers_1_1.value;\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (registeredLoggers_1_1 && !registeredLoggers_1_1.done && (_a = registeredLoggers_1.return)) _a.call(registeredLoggers_1);\n }\n finally { if (e_1) throw e_1.error; }\n }\n debugObj.enable(enabledNamespaces.join(\",\"));\n}\n/**\n * Retrieves the currently specified log level.\n */\nfunction getLogLevel() {\n return azureLogLevel;\n}\nvar levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100\n};\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nfunction createClientLogger(namespace) {\n var clientRootLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\")\n };\n}\nfunction patchLogMethod(parent, child) {\n child.log = function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n parent.log.apply(parent, tslib.__spread(args));\n };\n}\nfunction createLogger(parent, level) {\n var logger = Object.assign(parent.extend(level), {\n level: level\n });\n patchLogMethod(parent, logger);\n if (shouldEnable(logger)) {\n var enabledNamespaces = debugObj.disable();\n debugObj.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n registeredLoggers.add(logger);\n return logger;\n}\nfunction shouldEnable(logger) {\n if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) {\n return true;\n }\n else {\n return false;\n }\n}\nfunction isAzureLogLevel(logLevel) {\n return AZURE_LOG_LEVELS.includes(logLevel);\n}\n\nexports.AzureLogger = AzureLogger;\nexports.createClientLogger = createClientLogger;\nexports.getLogLevel = getLogLevel;\nexports.setLogLevel = setLogLevel;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar coreHttp = require('@azure/core-http');\nvar tslib = require('tslib');\nvar api = require('@opentelemetry/api');\nvar logger$1 = require('@azure/logger');\nvar abortController = require('@azure/abort-controller');\nvar os = require('os');\nvar crypto = require('crypto');\nvar coreTracing = require('@azure/core-tracing');\nvar stream = require('stream');\nrequire('@azure/core-paging');\nvar coreLro = require('@azure/core-lro');\nvar events = require('events');\nvar fs = require('fs');\nvar util = require('util');\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\nvar KeyInfo = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n xmlName: \"Start\",\n required: true,\n serializedName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n xmlName: \"Expiry\",\n required: true,\n serializedName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar UserDelegationKey = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n xmlName: \"SignedOid\",\n required: true,\n serializedName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n xmlName: \"SignedTid\",\n required: true,\n serializedName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n xmlName: \"SignedStart\",\n required: true,\n serializedName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n xmlName: \"SignedExpiry\",\n required: true,\n serializedName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n xmlName: \"SignedService\",\n required: true,\n serializedName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n xmlName: \"SignedVersion\",\n required: true,\n serializedName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n xmlName: \"Value\",\n required: true,\n serializedName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar StorageError = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n xmlName: \"Message\",\n serializedName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n xmlName: \"Code\",\n serializedName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar DataLakeStorageErrorError = {\n serializedName: \"DataLakeStorageError_error\",\n type: {\n name: \"Composite\",\n className: \"DataLakeStorageErrorError\",\n modelProperties: {\n code: {\n xmlName: \"Code\",\n serializedName: \"Code\",\n type: {\n name: \"String\"\n }\n },\n message: {\n xmlName: \"Message\",\n serializedName: \"Message\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar DataLakeStorageError = {\n serializedName: \"DataLakeStorageError\",\n type: {\n name: \"Composite\",\n className: \"DataLakeStorageError\",\n modelProperties: {\n dataLakeStorageErrorDetails: {\n xmlName: \"error\",\n serializedName: \"error\",\n type: {\n name: \"Composite\",\n className: \"DataLakeStorageErrorError\"\n }\n }\n }\n }\n};\nvar AccessPolicy = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n xmlName: \"Start\",\n serializedName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n xmlName: \"Expiry\",\n serializedName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n xmlName: \"Permission\",\n serializedName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobPropertiesInternal = {\n xmlName: \"Properties\",\n serializedName: \"BlobPropertiesInternal\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n xmlName: \"Creation-Time\",\n serializedName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n xmlName: \"Last-Modified\",\n required: true,\n serializedName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n xmlName: \"Etag\",\n required: true,\n serializedName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n xmlName: \"Content-Length\",\n serializedName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n xmlName: \"Content-Type\",\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n xmlName: \"Content-Encoding\",\n serializedName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n xmlName: \"Content-Language\",\n serializedName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n xmlName: \"Content-MD5\",\n serializedName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n xmlName: \"Content-Disposition\",\n serializedName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n xmlName: \"Cache-Control\",\n serializedName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n xmlName: \"x-ms-blob-sequence-number\",\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n xmlName: \"BlobType\",\n serializedName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n leaseStatus: {\n xmlName: \"LeaseStatus\",\n serializedName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n leaseState: {\n xmlName: \"LeaseState\",\n serializedName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n xmlName: \"LeaseDuration\",\n serializedName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n copyId: {\n xmlName: \"CopyId\",\n serializedName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n xmlName: \"CopyStatus\",\n serializedName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n copySource: {\n xmlName: \"CopySource\",\n serializedName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n xmlName: \"CopyProgress\",\n serializedName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n xmlName: \"CopyCompletionTime\",\n serializedName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n xmlName: \"CopyStatusDescription\",\n serializedName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n xmlName: \"ServerEncrypted\",\n serializedName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n xmlName: \"IncrementalCopy\",\n serializedName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n xmlName: \"DestinationSnapshot\",\n serializedName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n xmlName: \"DeletedTime\",\n serializedName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n xmlName: \"RemainingRetentionDays\",\n serializedName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n xmlName: \"AccessTier\",\n serializedName: \"AccessTier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n xmlName: \"AccessTierInferred\",\n serializedName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n xmlName: \"ArchiveStatus\",\n serializedName: \"ArchiveStatus\",\n type: {\n name: \"String\"\n }\n },\n customerProvidedKeySha256: {\n xmlName: \"CustomerProvidedKeySha256\",\n serializedName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n xmlName: \"EncryptionScope\",\n serializedName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n xmlName: \"AccessTierChangeTime\",\n serializedName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n xmlName: \"TagCount\",\n serializedName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n xmlName: \"Expiry-Time\",\n serializedName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n xmlName: \"Sealed\",\n serializedName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n xmlName: \"RehydratePriority\",\n serializedName: \"RehydratePriority\",\n type: {\n name: \"String\"\n }\n },\n lastAccessedOn: {\n xmlName: \"LastAccessTime\",\n serializedName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\nvar BlobTag = {\n xmlName: \"Tag\",\n serializedName: \"BlobTag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n xmlName: \"Key\",\n required: true,\n serializedName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n xmlName: \"Value\",\n required: true,\n serializedName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobTags = {\n xmlName: \"Tags\",\n serializedName: \"BlobTags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n xmlIsWrapped: true,\n xmlName: \"TagSet\",\n xmlElementName: \"Tag\",\n required: true,\n serializedName: \"BlobTagSet\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\nvar BlobItemInternal = {\n xmlName: \"Blob\",\n serializedName: \"BlobItemInternal\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n xmlName: \"Deleted\",\n required: true,\n serializedName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n xmlName: \"Snapshot\",\n required: true,\n serializedName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n xmlName: \"VersionId\",\n serializedName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n xmlName: \"IsCurrentVersion\",\n serializedName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n xmlName: \"Properties\",\n required: true,\n serializedName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n xmlName: \"Metadata\",\n serializedName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n blobTags: {\n xmlName: \"Tags\",\n serializedName: \"BlobTags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n xmlName: \"OrMetadata\",\n serializedName: \"ObjectReplicationMetadata\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\nvar BlobFlatListSegment = {\n xmlName: \"Blobs\",\n serializedName: \"BlobFlatListSegment\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n required: true,\n serializedName: \"BlobItems\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\nvar ListBlobsFlatSegmentResponse = {\n xmlName: \"EnumerationResults\",\n serializedName: \"ListBlobsFlatSegmentResponse\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n xmlIsAttribute: true,\n xmlName: \"ContainerName\",\n required: true,\n serializedName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n prefix: {\n xmlName: \"Prefix\",\n serializedName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n xmlName: \"Marker\",\n serializedName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n xmlName: \"MaxResults\",\n serializedName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n xmlName: \"Blobs\",\n required: true,\n serializedName: \"Segment\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobPrefix = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobHierarchyListSegment = {\n xmlName: \"Blobs\",\n serializedName: \"BlobHierarchyListSegment\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n serializedName: \"BlobPrefixes\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n required: true,\n serializedName: \"BlobItems\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\nvar ListBlobsHierarchySegmentResponse = {\n xmlName: \"EnumerationResults\",\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n xmlIsAttribute: true,\n xmlName: \"ContainerName\",\n required: true,\n serializedName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n prefix: {\n xmlName: \"Prefix\",\n serializedName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n xmlName: \"Marker\",\n serializedName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n xmlName: \"MaxResults\",\n serializedName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n xmlName: \"Delimiter\",\n serializedName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n xmlName: \"Blobs\",\n required: true,\n serializedName: \"Segment\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar Block = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n xmlName: \"Size\",\n required: true,\n serializedName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar BlockList = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n xmlIsWrapped: true,\n xmlName: \"CommittedBlocks\",\n xmlElementName: \"Block\",\n serializedName: \"CommittedBlocks\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n xmlIsWrapped: true,\n xmlName: \"UncommittedBlocks\",\n xmlElementName: \"Block\",\n serializedName: \"UncommittedBlocks\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\nvar BlockLookupList = {\n xmlName: \"BlockList\",\n serializedName: \"BlockLookupList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n serializedName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n serializedName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n serializedName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\nvar ContainerProperties = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n xmlName: \"Last-Modified\",\n required: true,\n serializedName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n xmlName: \"Etag\",\n required: true,\n serializedName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n xmlName: \"LeaseStatus\",\n serializedName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n leaseState: {\n xmlName: \"LeaseState\",\n serializedName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n xmlName: \"LeaseDuration\",\n serializedName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n publicAccess: {\n xmlName: \"PublicAccess\",\n serializedName: \"PublicAccess\",\n type: {\n name: \"String\"\n }\n },\n hasImmutabilityPolicy: {\n xmlName: \"HasImmutabilityPolicy\",\n serializedName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n xmlName: \"HasLegalHold\",\n serializedName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n xmlName: \"DefaultEncryptionScope\",\n serializedName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n xmlName: \"DenyEncryptionScopeOverride\",\n serializedName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n xmlName: \"DeletedTime\",\n serializedName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n xmlName: \"RemainingRetentionDays\",\n serializedName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar ContainerItem = {\n xmlName: \"Container\",\n serializedName: \"ContainerItem\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n xmlName: \"Deleted\",\n serializedName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n xmlName: \"Version\",\n serializedName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n xmlName: \"Properties\",\n required: true,\n serializedName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n xmlName: \"Metadata\",\n serializedName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\nvar DelimitedTextConfiguration = {\n serializedName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n xmlName: \"ColumnSeparator\",\n required: true,\n serializedName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n xmlName: \"FieldQuote\",\n required: true,\n serializedName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n xmlName: \"RecordSeparator\",\n required: true,\n serializedName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n xmlName: \"EscapeChar\",\n required: true,\n serializedName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n xmlName: \"HasHeaders\",\n required: true,\n serializedName: \"HeadersPresent\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\nvar JsonTextConfiguration = {\n serializedName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n xmlName: \"RecordSeparator\",\n required: true,\n serializedName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ArrowField = {\n xmlName: \"Field\",\n serializedName: \"ArrowField\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n xmlName: \"Type\",\n required: true,\n serializedName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n xmlName: \"Name\",\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n xmlName: \"Precision\",\n serializedName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n xmlName: \"Scale\",\n serializedName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar ArrowConfiguration = {\n serializedName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n xmlIsWrapped: true,\n xmlName: \"Schema\",\n xmlElementName: \"Field\",\n required: true,\n serializedName: \"Schema\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\nvar ListContainersSegmentResponse = {\n xmlName: \"EnumerationResults\",\n serializedName: \"ListContainersSegmentResponse\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n prefix: {\n xmlName: \"Prefix\",\n serializedName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n xmlName: \"Marker\",\n serializedName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n xmlName: \"MaxResults\",\n serializedName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n xmlIsWrapped: true,\n xmlName: \"Containers\",\n xmlElementName: \"Container\",\n required: true,\n serializedName: \"ContainerItems\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar CorsRule = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n xmlName: \"AllowedOrigins\",\n required: true,\n serializedName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n xmlName: \"AllowedMethods\",\n required: true,\n serializedName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n xmlName: \"AllowedHeaders\",\n required: true,\n serializedName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n xmlName: \"ExposedHeaders\",\n required: true,\n serializedName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n xmlName: \"MaxAgeInSeconds\",\n required: true,\n serializedName: \"MaxAgeInSeconds\",\n constraints: {\n InclusiveMinimum: 0\n },\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar FilterBlobItem = {\n xmlName: \"Blob\",\n serializedName: \"FilterBlobItem\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n xmlName: \"ContainerName\",\n required: true,\n serializedName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n xmlName: \"Tags\",\n serializedName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\nvar FilterBlobSegment = {\n xmlName: \"EnumerationResults\",\n serializedName: \"FilterBlobSegment\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n where: {\n xmlName: \"Where\",\n required: true,\n serializedName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n xmlIsWrapped: true,\n xmlName: \"Blobs\",\n xmlElementName: \"Blob\",\n required: true,\n serializedName: \"Blobs\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar GeoReplication = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n xmlName: \"Status\",\n required: true,\n serializedName: \"Status\",\n type: {\n name: \"String\"\n }\n },\n lastSyncOn: {\n xmlName: \"LastSyncTime\",\n required: true,\n serializedName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\nvar RetentionPolicy = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n xmlName: \"Enabled\",\n required: true,\n serializedName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n xmlName: \"Days\",\n serializedName: \"Days\",\n constraints: {\n InclusiveMinimum: 1\n },\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar Logging = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n xmlName: \"Version\",\n required: true,\n serializedName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n xmlName: \"Delete\",\n required: true,\n serializedName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n xmlName: \"Read\",\n required: true,\n serializedName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n xmlName: \"Write\",\n required: true,\n serializedName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n xmlName: \"RetentionPolicy\",\n required: true,\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\nvar Metrics = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n xmlName: \"Version\",\n serializedName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n xmlName: \"Enabled\",\n required: true,\n serializedName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n xmlName: \"IncludeAPIs\",\n serializedName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n xmlName: \"RetentionPolicy\",\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\nvar PageRange = {\n serializedName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n xmlName: \"Start\",\n required: true,\n serializedName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n xmlName: \"End\",\n required: true,\n serializedName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar ClearRange = {\n serializedName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n xmlName: \"Start\",\n required: true,\n serializedName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n xmlName: \"End\",\n required: true,\n serializedName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar PageList = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n serializedName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n serializedName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n }\n }\n }\n};\nvar QueryFormat = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n xmlName: \"Type\",\n serializedName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"delimited\",\n \"json\",\n \"arrow\"\n ]\n }\n },\n delimitedTextConfiguration: {\n xmlName: \"DelimitedTextConfiguration\",\n serializedName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n xmlName: \"JsonTextConfiguration\",\n serializedName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n xmlName: \"ArrowConfiguration\",\n serializedName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n }\n }\n }\n};\nvar QuerySerialization = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n xmlName: \"Format\",\n required: true,\n serializedName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\nvar QueryRequest = {\n serializedName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n xmlName: \"QueryType\",\n required: true,\n isConstant: true,\n serializedName: \"QueryType\",\n defaultValue: 'SQL',\n type: {\n name: \"String\"\n }\n },\n expression: {\n xmlName: \"Expression\",\n required: true,\n serializedName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n xmlName: \"InputSerialization\",\n serializedName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n xmlName: \"OutputSerialization\",\n serializedName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\nvar SignedIdentifier = {\n serializedName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n xmlName: \"Id\",\n required: true,\n serializedName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n xmlName: \"AccessPolicy\",\n required: true,\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\nvar StaticWebsite = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n xmlName: \"Enabled\",\n required: true,\n serializedName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n xmlName: \"IndexDocument\",\n serializedName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n xmlName: \"ErrorDocument404Path\",\n serializedName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n xmlName: \"DefaultIndexDocumentPath\",\n serializedName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobServiceProperties = {\n xmlName: \"StorageServiceProperties\",\n serializedName: \"BlobServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n xmlName: \"Logging\",\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n xmlName: \"HourMetrics\",\n serializedName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n xmlName: \"MinuteMetrics\",\n serializedName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n xmlIsWrapped: true,\n xmlName: \"Cors\",\n xmlElementName: \"CorsRule\",\n serializedName: \"Cors\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n xmlName: \"DefaultServiceVersion\",\n serializedName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n xmlName: \"DeleteRetentionPolicy\",\n serializedName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n xmlName: \"StaticWebsite\",\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\nvar BlobServiceStatistics = {\n xmlName: \"StorageServiceStats\",\n serializedName: \"BlobServiceStatistics\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n xmlName: \"GeoReplication\",\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\nvar ServiceSetPropertiesHeaders = {\n serializedName: \"service-setproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetPropertiesHeaders = {\n serializedName: \"service-getproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetStatisticsHeaders = {\n serializedName: \"service-getstatistics-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceListContainersSegmentHeaders = {\n serializedName: \"service-listcontainerssegment-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetUserDelegationKeyHeaders = {\n serializedName: \"service-getuserdelegationkey-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetAccountInfoHeaders = {\n serializedName: \"service-getaccountinfo-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceSubmitBatchHeaders = {\n serializedName: \"service-submitbatch-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceFilterBlobsHeaders = {\n serializedName: \"service-filterblobs-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerCreateHeaders = {\n serializedName: \"container-create-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerGetPropertiesHeaders = {\n serializedName: \"container-getproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n type: {\n name: \"String\"\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerDeleteHeaders = {\n serializedName: \"container-delete-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerSetMetadataHeaders = {\n serializedName: \"container-setmetadata-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerGetAccessPolicyHeaders = {\n serializedName: \"container-getaccesspolicy-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerSetAccessPolicyHeaders = {\n serializedName: \"container-setaccesspolicy-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerRestoreHeaders = {\n serializedName: \"container-restore-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerRenameHeaders = {\n serializedName: \"container-rename-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerSubmitBatchHeaders = {\n serializedName: \"container-submitbatch-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerAcquireLeaseHeaders = {\n serializedName: \"container-acquirelease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerReleaseLeaseHeaders = {\n serializedName: \"container-releaselease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerRenewLeaseHeaders = {\n serializedName: \"container-renewlease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerBreakLeaseHeaders = {\n serializedName: \"container-breaklease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerChangeLeaseHeaders = {\n serializedName: \"container-changelease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerListBlobFlatSegmentHeaders = {\n serializedName: \"container-listblobflatsegment-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerListBlobHierarchySegmentHeaders = {\n serializedName: \"container-listblobhierarchysegment-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerGetAccountInfoHeaders = {\n serializedName: \"container-getaccountinfo-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobDownloadHeaders = {\n serializedName: \"blob-download-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetPropertiesHeaders = {\n serializedName: \"blob-getproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"String\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobDeleteHeaders = {\n serializedName: \"blob-delete-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetAccessControlHeaders = {\n serializedName: \"blob-setaccesscontrol-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetAccessControlHeaders\",\n modelProperties: {\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetAccessControlHeaders = {\n serializedName: \"blob-getaccesscontrol-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccessControlHeaders\",\n modelProperties: {\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsOwner: {\n serializedName: \"x-ms-owner\",\n type: {\n name: \"String\"\n }\n },\n xMsGroup: {\n serializedName: \"x-ms-group\",\n type: {\n name: \"String\"\n }\n },\n xMsPermissions: {\n serializedName: \"x-ms-permissions\",\n type: {\n name: \"String\"\n }\n },\n xMsAcl: {\n serializedName: \"x-ms-acl\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobRenameHeaders = {\n serializedName: \"blob-rename-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobRenameHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\nvar PageBlobCreateHeaders = {\n serializedName: \"pageblob-create-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobCreateHeaders = {\n serializedName: \"appendblob-create-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobUploadHeaders = {\n serializedName: \"blockblob-upload-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobPutBlobFromUrlHeaders = {\n serializedName: \"blockblob-putblobfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobUndeleteHeaders = {\n serializedName: \"blob-undelete-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetExpiryHeaders = {\n serializedName: \"blob-setexpiry-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetHTTPHeadersHeaders = {\n serializedName: \"blob-sethttpheaders-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHTTPHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetMetadataHeaders = {\n serializedName: \"blob-setmetadata-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobAcquireLeaseHeaders = {\n serializedName: \"blob-acquirelease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobReleaseLeaseHeaders = {\n serializedName: \"blob-releaselease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobRenewLeaseHeaders = {\n serializedName: \"blob-renewlease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobChangeLeaseHeaders = {\n serializedName: \"blob-changelease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobBreakLeaseHeaders = {\n serializedName: \"blob-breaklease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobCreateSnapshotHeaders = {\n serializedName: \"blob-createsnapshot-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobStartCopyFromURLHeaders = {\n serializedName: \"blob-startcopyfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobCopyFromURLHeaders = {\n serializedName: \"blob-copyfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"success\"\n ]\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobAbortCopyFromURLHeaders = {\n serializedName: \"blob-abortcopyfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetTierHeaders = {\n serializedName: \"blob-settier-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetAccountInfoHeaders = {\n serializedName: \"blob-getaccountinfo-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobStageBlockHeaders = {\n serializedName: \"blockblob-stageblock-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobStageBlockFromURLHeaders = {\n serializedName: \"blockblob-stageblockfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobCommitBlockListHeaders = {\n serializedName: \"blockblob-commitblocklist-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobGetBlockListHeaders = {\n serializedName: \"blockblob-getblocklist-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobUploadPagesHeaders = {\n serializedName: \"pageblob-uploadpages-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobClearPagesHeaders = {\n serializedName: \"pageblob-clearpages-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobUploadPagesFromURLHeaders = {\n serializedName: \"pageblob-uploadpagesfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobGetPageRangesHeaders = {\n serializedName: \"pageblob-getpageranges-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobGetPageRangesDiffHeaders = {\n serializedName: \"pageblob-getpagerangesdiff-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobResizeHeaders = {\n serializedName: \"pageblob-resize-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobUpdateSequenceNumberHeaders = {\n serializedName: \"pageblob-updatesequencenumber-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobCopyIncrementalHeaders = {\n serializedName: \"pageblob-copyincremental-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobAppendBlockHeaders = {\n serializedName: \"appendblob-appendblock-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobAppendBlockFromUrlHeaders = {\n serializedName: \"appendblob-appendblockfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobSealHeaders = {\n serializedName: \"appendblob-seal-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobQueryHeaders = {\n serializedName: \"blob-query-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetTagsHeaders = {\n serializedName: \"blob-gettags-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetTagsHeaders = {\n serializedName: \"blob-settags-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers = /*#__PURE__*/Object.freeze({\n __proto__: null,\n BlobServiceProperties: BlobServiceProperties,\n BlobServiceStatistics: BlobServiceStatistics,\n BlobTag: BlobTag,\n BlobTags: BlobTags,\n ContainerItem: ContainerItem,\n ContainerProperties: ContainerProperties,\n CorsRule: CorsRule,\n FilterBlobItem: FilterBlobItem,\n FilterBlobSegment: FilterBlobSegment,\n GeoReplication: GeoReplication,\n KeyInfo: KeyInfo,\n ListContainersSegmentResponse: ListContainersSegmentResponse,\n Logging: Logging,\n Metrics: Metrics,\n RetentionPolicy: RetentionPolicy,\n ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders,\n ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders,\n ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders,\n ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders,\n ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders,\n ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders,\n ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders,\n ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders,\n StaticWebsite: StaticWebsite,\n StorageError: StorageError,\n UserDelegationKey: UserDelegationKey\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\nvar access = {\n parameterPath: [\n \"options\",\n \"access\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n type: {\n name: \"String\"\n }\n }\n};\nvar action0 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'acquire',\n type: {\n name: \"String\"\n }\n }\n};\nvar action1 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'release',\n type: {\n name: \"String\"\n }\n }\n};\nvar action2 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'renew',\n type: {\n name: \"String\"\n }\n }\n};\nvar action3 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'break',\n type: {\n name: \"String\"\n }\n }\n};\nvar action4 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'change',\n type: {\n name: \"String\"\n }\n }\n};\nvar action5 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"action\",\n defaultValue: 'setAccessControl',\n type: {\n name: \"String\"\n }\n }\n};\nvar action6 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"action\",\n defaultValue: 'getAccessControl',\n type: {\n name: \"String\"\n }\n }\n};\nvar appendPosition = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar blobCacheControl = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobCacheControl\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentDisposition = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentDisposition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentEncoding = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentEncoding\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentLanguage = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentLanguage\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentLength = {\n parameterPath: \"blobContentLength\",\n mapper: {\n required: true,\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar blobContentMD5 = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentMD5\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar blobContentType = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentType\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobDeleteType = {\n parameterPath: [\n \"options\",\n \"blobDeleteType\"\n ],\n mapper: {\n serializedName: \"deletetype\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Permanent\"\n ]\n }\n }\n};\nvar blobSequenceNumber = {\n parameterPath: [\n \"options\",\n \"blobSequenceNumber\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n defaultValue: 0,\n type: {\n name: \"Number\"\n }\n }\n};\nvar blobTagsString = {\n parameterPath: [\n \"options\",\n \"blobTagsString\"\n ],\n mapper: {\n serializedName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobType0 = {\n parameterPath: \"blobType\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n defaultValue: 'PageBlob',\n type: {\n name: \"String\"\n }\n }\n};\nvar blobType1 = {\n parameterPath: \"blobType\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n defaultValue: 'AppendBlob',\n type: {\n name: \"String\"\n }\n }\n};\nvar blobType2 = {\n parameterPath: \"blobType\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n defaultValue: 'BlockBlob',\n type: {\n name: \"String\"\n }\n }\n};\nvar blockId = {\n parameterPath: \"blockId\",\n mapper: {\n required: true,\n serializedName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\nvar breakPeriod = {\n parameterPath: [\n \"options\",\n \"breakPeriod\"\n ],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar cacheControl = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"cacheControl\"\n ],\n mapper: {\n serializedName: \"x-ms-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\nvar comp0 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'properties',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp1 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'stats',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp10 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'lease',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp11 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'expiry',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp12 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'snapshot',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp13 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'copy',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp14 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'tier',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp15 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'query',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp16 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'tags',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp17 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'page',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp18 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'pagelist',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp19 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'incrementalcopy',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp2 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'list',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp20 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'appendblock',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp21 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'seal',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp22 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'block',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp23 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'blocklist',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp3 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'userdelegationkey',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp4 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'batch',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp5 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'blobs',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp6 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'metadata',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp7 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'acl',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp8 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'undelete',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp9 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'rename',\n type: {\n name: \"String\"\n }\n }\n};\nvar contentDisposition = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentDisposition\"\n ],\n mapper: {\n serializedName: \"x-ms-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\nvar contentEncoding = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentEncoding\"\n ],\n mapper: {\n serializedName: \"x-ms-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\nvar contentLanguage = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentLanguage\"\n ],\n mapper: {\n serializedName: \"x-ms-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\nvar contentLength = {\n parameterPath: \"contentLength\",\n mapper: {\n required: true,\n serializedName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar contentType = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentType\"\n ],\n mapper: {\n serializedName: \"x-ms-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\nvar copyActionAbortConstant = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n defaultValue: 'abort',\n type: {\n name: \"String\"\n }\n }\n};\nvar copyId = {\n parameterPath: \"copyId\",\n mapper: {\n required: true,\n serializedName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\nvar copySource = {\n parameterPath: \"copySource\",\n mapper: {\n required: true,\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\nvar copySourceBlobProperties = {\n parameterPath: [\n \"options\",\n \"copySourceBlobProperties\"\n ],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar defaultEncryptionScope = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\nvar deletedContainerName = {\n parameterPath: [\n \"options\",\n \"deletedContainerName\"\n ],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\nvar deletedContainerVersion = {\n parameterPath: [\n \"options\",\n \"deletedContainerVersion\"\n ],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\nvar deleteSnapshots = {\n parameterPath: [\n \"options\",\n \"deleteSnapshots\"\n ],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"include\",\n \"only\"\n ]\n }\n }\n};\nvar delimiter = {\n parameterPath: \"delimiter\",\n mapper: {\n required: true,\n serializedName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\nvar directoryProperties = {\n parameterPath: [\n \"options\",\n \"directoryProperties\"\n ],\n mapper: {\n serializedName: \"x-ms-properties\",\n type: {\n name: \"String\"\n }\n }\n};\nvar duration = {\n parameterPath: [\n \"options\",\n \"duration\"\n ],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar encryptionAlgorithm = {\n parameterPath: [\n \"options\",\n \"cpkInfo\",\n \"encryptionAlgorithm\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"AES256\"\n ]\n }\n }\n};\nvar encryptionKey = {\n parameterPath: [\n \"options\",\n \"cpkInfo\",\n \"encryptionKey\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\nvar encryptionKeySha256 = {\n parameterPath: [\n \"options\",\n \"cpkInfo\",\n \"encryptionKeySha256\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\nvar encryptionScope = {\n parameterPath: [\n \"options\",\n \"encryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\nvar expiresOn = {\n parameterPath: [\n \"options\",\n \"expiresOn\"\n ],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\nvar expiryOptions = {\n parameterPath: \"expiryOptions\",\n mapper: {\n required: true,\n serializedName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\nvar group = {\n parameterPath: [\n \"options\",\n \"group\"\n ],\n mapper: {\n serializedName: \"x-ms-group\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifMatch = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifMatch\"\n ],\n mapper: {\n serializedName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifModifiedSince = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifModifiedSince\"\n ],\n mapper: {\n serializedName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar ifNoneMatch = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifNoneMatch\"\n ],\n mapper: {\n serializedName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifSequenceNumberEqualTo = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar ifSequenceNumberLessThan = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar ifSequenceNumberLessThanOrEqualTo = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar ifTags = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifTags\"\n ],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifUnmodifiedSince = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar include0 = {\n parameterPath: [\n \"options\",\n \"include\"\n ],\n mapper: {\n serializedName: \"include\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"metadata\",\n \"deleted\"\n ]\n }\n }\n }\n },\n collectionFormat: coreHttp.QueryCollectionFormat.Csv\n};\nvar include1 = {\n parameterPath: [\n \"options\",\n \"include\"\n ],\n mapper: {\n serializedName: \"include\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\"\n ]\n }\n }\n }\n },\n collectionFormat: coreHttp.QueryCollectionFormat.Csv\n};\nvar leaseId0 = {\n parameterPath: [\n \"options\",\n \"leaseAccessConditions\",\n \"leaseId\"\n ],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar leaseId1 = {\n parameterPath: \"leaseId\",\n mapper: {\n required: true,\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar listType = {\n parameterPath: \"listType\",\n mapper: {\n required: true,\n serializedName: \"blocklisttype\",\n defaultValue: 'committed',\n type: {\n name: \"Enum\",\n allowedValues: [\n \"committed\",\n \"uncommitted\",\n \"all\"\n ]\n }\n }\n};\nvar marker0 = {\n parameterPath: [\n \"options\",\n \"marker\"\n ],\n mapper: {\n serializedName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\nvar maxPageSize = {\n parameterPath: [\n \"options\",\n \"maxPageSize\"\n ],\n mapper: {\n serializedName: \"maxresults\",\n constraints: {\n InclusiveMinimum: 1\n },\n type: {\n name: \"Number\"\n }\n }\n};\nvar maxSize = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"maxSize\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar metadata = {\n parameterPath: [\n \"options\",\n \"metadata\"\n ],\n mapper: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\nvar multipartContentType = {\n parameterPath: \"multipartContentType\",\n mapper: {\n required: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\nvar owner = {\n parameterPath: [\n \"options\",\n \"owner\"\n ],\n mapper: {\n serializedName: \"x-ms-owner\",\n type: {\n name: \"String\"\n }\n }\n};\nvar pageWrite0 = {\n parameterPath: \"pageWrite\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n defaultValue: 'update',\n type: {\n name: \"String\"\n }\n }\n};\nvar pageWrite1 = {\n parameterPath: \"pageWrite\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n defaultValue: 'clear',\n type: {\n name: \"String\"\n }\n }\n};\nvar pathRenameMode = {\n parameterPath: [\n \"options\",\n \"pathRenameMode\"\n ],\n mapper: {\n serializedName: \"mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"legacy\",\n \"posix\"\n ]\n }\n }\n};\nvar posixAcl = {\n parameterPath: [\n \"options\",\n \"posixAcl\"\n ],\n mapper: {\n serializedName: \"x-ms-acl\",\n type: {\n name: \"String\"\n }\n }\n};\nvar posixPermissions = {\n parameterPath: [\n \"options\",\n \"posixPermissions\"\n ],\n mapper: {\n serializedName: \"x-ms-permissions\",\n type: {\n name: \"String\"\n }\n }\n};\nvar posixUmask = {\n parameterPath: [\n \"options\",\n \"posixUmask\"\n ],\n mapper: {\n serializedName: \"x-ms-umask\",\n type: {\n name: \"String\"\n }\n }\n};\nvar prefix = {\n parameterPath: [\n \"options\",\n \"prefix\"\n ],\n mapper: {\n serializedName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\nvar preventEncryptionScopeOverride = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar prevsnapshot = {\n parameterPath: [\n \"options\",\n \"prevsnapshot\"\n ],\n mapper: {\n serializedName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\nvar prevSnapshotUrl = {\n parameterPath: [\n \"options\",\n \"prevSnapshotUrl\"\n ],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\nvar proposedLeaseId0 = {\n parameterPath: [\n \"options\",\n \"proposedLeaseId\"\n ],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar proposedLeaseId1 = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n required: true,\n serializedName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar range0 = {\n parameterPath: [\n \"options\",\n \"range\"\n ],\n mapper: {\n serializedName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar range1 = {\n parameterPath: \"range\",\n mapper: {\n required: true,\n serializedName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar rangeGetContentCRC64 = {\n parameterPath: [\n \"options\",\n \"rangeGetContentCRC64\"\n ],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar rangeGetContentMD5 = {\n parameterPath: [\n \"options\",\n \"rangeGetContentMD5\"\n ],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar rehydratePriority = {\n parameterPath: [\n \"options\",\n \"rehydratePriority\"\n ],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"String\"\n }\n }\n};\nvar renameSource = {\n parameterPath: \"renameSource\",\n mapper: {\n required: true,\n serializedName: \"x-ms-rename-source\",\n type: {\n name: \"String\"\n }\n }\n};\nvar requestId = {\n parameterPath: [\n \"options\",\n \"requestId\"\n ],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar restype0 = {\n parameterPath: \"restype\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"restype\",\n defaultValue: 'service',\n type: {\n name: \"String\"\n }\n }\n};\nvar restype1 = {\n parameterPath: \"restype\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"restype\",\n defaultValue: 'account',\n type: {\n name: \"String\"\n }\n }\n};\nvar restype2 = {\n parameterPath: \"restype\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"restype\",\n defaultValue: 'container',\n type: {\n name: \"String\"\n }\n }\n};\nvar sealBlob = {\n parameterPath: [\n \"options\",\n \"sealBlob\"\n ],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar sequenceNumberAction = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n required: true,\n serializedName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"max\",\n \"update\",\n \"increment\"\n ]\n }\n }\n};\nvar snapshot = {\n parameterPath: [\n \"options\",\n \"snapshot\"\n ],\n mapper: {\n serializedName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceContainerName = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n required: true,\n serializedName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceContentCrc64 = {\n parameterPath: [\n \"options\",\n \"sourceContentCrc64\"\n ],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar sourceContentMD5 = {\n parameterPath: [\n \"options\",\n \"sourceContentMD5\"\n ],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar sourceIfMatch = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceIfModifiedSince = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar sourceIfNoneMatch = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceIfTags = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfTags\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceIfUnmodifiedSince = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar sourceLeaseId = {\n parameterPath: [\n \"options\",\n \"sourceLeaseId\"\n ],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceRange0 = {\n parameterPath: \"sourceRange\",\n mapper: {\n required: true,\n serializedName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceRange1 = {\n parameterPath: [\n \"options\",\n \"sourceRange\"\n ],\n mapper: {\n serializedName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceUrl = {\n parameterPath: \"sourceUrl\",\n mapper: {\n required: true,\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\nvar tier0 = {\n parameterPath: [\n \"options\",\n \"tier\"\n ],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n }\n};\nvar tier1 = {\n parameterPath: \"tier\",\n mapper: {\n required: true,\n serializedName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n }\n};\nvar timeoutInSeconds = {\n parameterPath: [\n \"options\",\n \"timeoutInSeconds\"\n ],\n mapper: {\n serializedName: \"timeout\",\n constraints: {\n InclusiveMinimum: 0\n },\n type: {\n name: \"Number\"\n }\n }\n};\nvar transactionalContentCrc64 = {\n parameterPath: [\n \"options\",\n \"transactionalContentCrc64\"\n ],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar transactionalContentMD5 = {\n parameterPath: [\n \"options\",\n \"transactionalContentMD5\"\n ],\n mapper: {\n serializedName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar upn = {\n parameterPath: [\n \"options\",\n \"upn\"\n ],\n mapper: {\n serializedName: \"upn\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar url = {\n parameterPath: \"url\",\n mapper: {\n required: true,\n serializedName: \"url\",\n defaultValue: '',\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\nvar version = {\n parameterPath: \"version\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-version\",\n defaultValue: '2020-06-12',\n type: {\n name: \"String\"\n }\n }\n};\nvar versionId = {\n parameterPath: [\n \"options\",\n \"versionId\"\n ],\n mapper: {\n serializedName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\nvar where = {\n parameterPath: [\n \"options\",\n \"where\"\n ],\n mapper: {\n serializedName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\nvar xMsRequiresSync = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n defaultValue: 'true',\n type: {\n name: \"String\"\n }\n }\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a Service. */\nvar Service = /** @class */ (function () {\n /**\n * Create a Service.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function Service(client) {\n this.client = client;\n }\n Service.prototype.setProperties = function (blobServiceProperties, options, callback) {\n return this.client.sendOperationRequest({\n blobServiceProperties: blobServiceProperties,\n options: options\n }, setPropertiesOperationSpec, callback);\n };\n Service.prototype.getProperties = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPropertiesOperationSpec, callback);\n };\n Service.prototype.getStatistics = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getStatisticsOperationSpec, callback);\n };\n Service.prototype.listContainersSegment = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, listContainersSegmentOperationSpec, callback);\n };\n Service.prototype.getUserDelegationKey = function (keyInfo, options, callback) {\n return this.client.sendOperationRequest({\n keyInfo: keyInfo,\n options: options\n }, getUserDelegationKeyOperationSpec, callback);\n };\n Service.prototype.getAccountInfo = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccountInfoOperationSpec, callback);\n };\n Service.prototype.submitBatch = function (body, contentLength, multipartContentType, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n multipartContentType: multipartContentType,\n options: options\n }, submitBatchOperationSpec, callback);\n };\n Service.prototype.filterBlobs = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, filterBlobsOperationSpec, callback);\n };\n return Service;\n}());\n// Operation Specifications\nvar serializer = new coreHttp.Serializer(Mappers, true);\nvar setPropertiesOperationSpec = {\n httpMethod: \"PUT\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp0\n ],\n headerParameters: [\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"blobServiceProperties\",\n mapper: tslib.__assign(tslib.__assign({}, BlobServiceProperties), { required: true })\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 202: {\n headersMapper: ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceSetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getPropertiesOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp0\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: BlobServiceProperties,\n headersMapper: ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getStatisticsOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp1\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: BlobServiceStatistics,\n headersMapper: ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetStatisticsHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar listContainersSegmentOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n prefix,\n marker0,\n maxPageSize,\n include0,\n timeoutInSeconds,\n comp2\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: ListContainersSegmentResponse,\n headersMapper: ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceListContainersSegmentHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getUserDelegationKeyOperationSpec = {\n httpMethod: \"POST\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp3\n ],\n headerParameters: [\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"keyInfo\",\n mapper: tslib.__assign(tslib.__assign({}, KeyInfo), { required: true })\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 200: {\n bodyMapper: UserDelegationKey,\n headersMapper: ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetUserDelegationKeyHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getAccountInfoOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n restype1,\n comp0\n ],\n headerParameters: [\n version\n ],\n responses: {\n 200: {\n headersMapper: ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetAccountInfoHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar submitBatchOperationSpec = {\n httpMethod: \"POST\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp4\n ],\n headerParameters: [\n contentLength,\n multipartContentType,\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 202: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceSubmitBatchHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar filterBlobsOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n where,\n marker0,\n maxPageSize,\n comp5\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: FilterBlobSegment,\n headersMapper: ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceFilterBlobsHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$1 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n AccessPolicy: AccessPolicy,\n BlobFlatListSegment: BlobFlatListSegment,\n BlobHierarchyListSegment: BlobHierarchyListSegment,\n BlobItemInternal: BlobItemInternal,\n BlobPrefix: BlobPrefix,\n BlobPropertiesInternal: BlobPropertiesInternal,\n BlobTag: BlobTag,\n BlobTags: BlobTags,\n ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders,\n ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders,\n ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders,\n ContainerCreateHeaders: ContainerCreateHeaders,\n ContainerDeleteHeaders: ContainerDeleteHeaders,\n ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders,\n ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders,\n ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders,\n ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders,\n ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders,\n ContainerRenameHeaders: ContainerRenameHeaders,\n ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders,\n ContainerRestoreHeaders: ContainerRestoreHeaders,\n ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders,\n ContainerSetMetadataHeaders: ContainerSetMetadataHeaders,\n ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders,\n ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse,\n SignedIdentifier: SignedIdentifier,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a Container. */\nvar Container = /** @class */ (function () {\n /**\n * Create a Container.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function Container(client) {\n this.client = client;\n }\n Container.prototype.create = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, createOperationSpec, callback);\n };\n Container.prototype.getProperties = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPropertiesOperationSpec$1, callback);\n };\n Container.prototype.deleteMethod = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, deleteMethodOperationSpec, callback);\n };\n Container.prototype.setMetadata = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setMetadataOperationSpec, callback);\n };\n Container.prototype.getAccessPolicy = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccessPolicyOperationSpec, callback);\n };\n Container.prototype.setAccessPolicy = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setAccessPolicyOperationSpec, callback);\n };\n Container.prototype.restore = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, restoreOperationSpec, callback);\n };\n Container.prototype.rename = function (sourceContainerName, options, callback) {\n return this.client.sendOperationRequest({\n sourceContainerName: sourceContainerName,\n options: options\n }, renameOperationSpec, callback);\n };\n Container.prototype.submitBatch = function (body, contentLength, multipartContentType, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n multipartContentType: multipartContentType,\n options: options\n }, submitBatchOperationSpec$1, callback);\n };\n Container.prototype.acquireLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, acquireLeaseOperationSpec, callback);\n };\n Container.prototype.releaseLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, releaseLeaseOperationSpec, callback);\n };\n Container.prototype.renewLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, renewLeaseOperationSpec, callback);\n };\n Container.prototype.breakLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, breakLeaseOperationSpec, callback);\n };\n Container.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n proposedLeaseId: proposedLeaseId,\n options: options\n }, changeLeaseOperationSpec, callback);\n };\n Container.prototype.listBlobFlatSegment = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, listBlobFlatSegmentOperationSpec, callback);\n };\n Container.prototype.listBlobHierarchySegment = function (delimiter, options, callback) {\n return this.client.sendOperationRequest({\n delimiter: delimiter,\n options: options\n }, listBlobHierarchySegmentOperationSpec, callback);\n };\n Container.prototype.getAccountInfo = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccountInfoOperationSpec$1, callback);\n };\n return Container;\n}());\n// Operation Specifications\nvar serializer$1 = new coreHttp.Serializer(Mappers$1, true);\nvar createOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2\n ],\n headerParameters: [\n metadata,\n access,\n version,\n requestId,\n defaultEncryptionScope,\n preventEncryptionScopeOverride\n ],\n responses: {\n 201: {\n headersMapper: ContainerCreateHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerCreateHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar getPropertiesOperationSpec$1 = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0\n ],\n responses: {\n 200: {\n headersMapper: ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerGetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar deleteMethodOperationSpec = {\n httpMethod: \"DELETE\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 202: {\n headersMapper: ContainerDeleteHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerDeleteHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar setMetadataOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp6\n ],\n headerParameters: [\n metadata,\n version,\n requestId,\n leaseId0,\n ifModifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerSetMetadataHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar getAccessPolicyOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp7\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0\n ],\n responses: {\n 200: {\n bodyMapper: {\n xmlElementName: \"SignedIdentifier\",\n serializedName: \"parsedResponse\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n },\n headersMapper: ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerGetAccessPolicyHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar setAccessPolicyOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp7\n ],\n headerParameters: [\n access,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n requestBody: {\n parameterPath: [\n \"options\",\n \"containerAcl\"\n ],\n mapper: {\n xmlName: \"SignedIdentifiers\",\n xmlElementName: \"SignedIdentifier\",\n serializedName: \"containerAcl\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 200: {\n headersMapper: ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerSetAccessPolicyHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar restoreOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp8\n ],\n headerParameters: [\n version,\n requestId,\n deletedContainerName,\n deletedContainerVersion\n ],\n responses: {\n 201: {\n headersMapper: ContainerRestoreHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerRestoreHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar renameOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp9\n ],\n headerParameters: [\n version,\n requestId,\n sourceContainerName,\n sourceLeaseId\n ],\n responses: {\n 200: {\n headersMapper: ContainerRenameHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerRenameHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar submitBatchOperationSpec$1 = {\n httpMethod: \"POST\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp4\n ],\n headerParameters: [\n contentLength,\n multipartContentType,\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 202: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerSubmitBatchHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar acquireLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10,\n restype2\n ],\n headerParameters: [\n duration,\n proposedLeaseId0,\n version,\n requestId,\n action0,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 201: {\n headersMapper: ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerAcquireLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar releaseLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10,\n restype2\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action1,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerReleaseLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar renewLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10,\n restype2\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action2,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerRenewLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar breakLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10,\n restype2\n ],\n headerParameters: [\n breakPeriod,\n version,\n requestId,\n action3,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 202: {\n headersMapper: ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerBreakLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar changeLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10,\n restype2\n ],\n headerParameters: [\n leaseId1,\n proposedLeaseId1,\n version,\n requestId,\n action4,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerChangeLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar listBlobFlatSegmentOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n prefix,\n marker0,\n maxPageSize,\n include1,\n timeoutInSeconds,\n restype2,\n comp2\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: ListBlobsFlatSegmentResponse,\n headersMapper: ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerListBlobFlatSegmentHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar listBlobHierarchySegmentOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n prefix,\n delimiter,\n marker0,\n maxPageSize,\n include1,\n timeoutInSeconds,\n restype2,\n comp2\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: ListBlobsHierarchySegmentResponse,\n headersMapper: ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerListBlobHierarchySegmentHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar getAccountInfoOperationSpec$1 = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n restype1,\n comp0\n ],\n headerParameters: [\n version\n ],\n responses: {\n 200: {\n headersMapper: ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerGetAccountInfoHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$2 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n ArrowConfiguration: ArrowConfiguration,\n ArrowField: ArrowField,\n BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders,\n BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders,\n BlobBreakLeaseHeaders: BlobBreakLeaseHeaders,\n BlobChangeLeaseHeaders: BlobChangeLeaseHeaders,\n BlobCopyFromURLHeaders: BlobCopyFromURLHeaders,\n BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders,\n BlobDeleteHeaders: BlobDeleteHeaders,\n BlobDownloadHeaders: BlobDownloadHeaders,\n BlobGetAccessControlHeaders: BlobGetAccessControlHeaders,\n BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders,\n BlobGetPropertiesHeaders: BlobGetPropertiesHeaders,\n BlobGetTagsHeaders: BlobGetTagsHeaders,\n BlobQueryHeaders: BlobQueryHeaders,\n BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders,\n BlobRenameHeaders: BlobRenameHeaders,\n BlobRenewLeaseHeaders: BlobRenewLeaseHeaders,\n BlobSetAccessControlHeaders: BlobSetAccessControlHeaders,\n BlobSetExpiryHeaders: BlobSetExpiryHeaders,\n BlobSetHTTPHeadersHeaders: BlobSetHTTPHeadersHeaders,\n BlobSetMetadataHeaders: BlobSetMetadataHeaders,\n BlobSetTagsHeaders: BlobSetTagsHeaders,\n BlobSetTierHeaders: BlobSetTierHeaders,\n BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders,\n BlobTag: BlobTag,\n BlobTags: BlobTags,\n BlobUndeleteHeaders: BlobUndeleteHeaders,\n DataLakeStorageError: DataLakeStorageError,\n DataLakeStorageErrorError: DataLakeStorageErrorError,\n DelimitedTextConfiguration: DelimitedTextConfiguration,\n JsonTextConfiguration: JsonTextConfiguration,\n QueryFormat: QueryFormat,\n QueryRequest: QueryRequest,\n QuerySerialization: QuerySerialization,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a Blob. */\nvar Blob$1 = /** @class */ (function () {\n /**\n * Create a Blob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function Blob(client) {\n this.client = client;\n }\n Blob.prototype.download = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, downloadOperationSpec, callback);\n };\n Blob.prototype.getProperties = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPropertiesOperationSpec$2, callback);\n };\n Blob.prototype.deleteMethod = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, deleteMethodOperationSpec$1, callback);\n };\n Blob.prototype.setAccessControl = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setAccessControlOperationSpec, callback);\n };\n Blob.prototype.getAccessControl = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccessControlOperationSpec, callback);\n };\n Blob.prototype.rename = function (renameSource, options, callback) {\n return this.client.sendOperationRequest({\n renameSource: renameSource,\n options: options\n }, renameOperationSpec$1, callback);\n };\n Blob.prototype.undelete = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, undeleteOperationSpec, callback);\n };\n Blob.prototype.setExpiry = function (expiryOptions, options, callback) {\n return this.client.sendOperationRequest({\n expiryOptions: expiryOptions,\n options: options\n }, setExpiryOperationSpec, callback);\n };\n Blob.prototype.setHTTPHeaders = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setHTTPHeadersOperationSpec, callback);\n };\n Blob.prototype.setMetadata = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setMetadataOperationSpec$1, callback);\n };\n Blob.prototype.acquireLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, acquireLeaseOperationSpec$1, callback);\n };\n Blob.prototype.releaseLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, releaseLeaseOperationSpec$1, callback);\n };\n Blob.prototype.renewLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, renewLeaseOperationSpec$1, callback);\n };\n Blob.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n proposedLeaseId: proposedLeaseId,\n options: options\n }, changeLeaseOperationSpec$1, callback);\n };\n Blob.prototype.breakLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, breakLeaseOperationSpec$1, callback);\n };\n Blob.prototype.createSnapshot = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, createSnapshotOperationSpec, callback);\n };\n Blob.prototype.startCopyFromURL = function (copySource, options, callback) {\n return this.client.sendOperationRequest({\n copySource: copySource,\n options: options\n }, startCopyFromURLOperationSpec, callback);\n };\n Blob.prototype.copyFromURL = function (copySource, options, callback) {\n return this.client.sendOperationRequest({\n copySource: copySource,\n options: options\n }, copyFromURLOperationSpec, callback);\n };\n Blob.prototype.abortCopyFromURL = function (copyId, options, callback) {\n return this.client.sendOperationRequest({\n copyId: copyId,\n options: options\n }, abortCopyFromURLOperationSpec, callback);\n };\n Blob.prototype.setTier = function (tier, options, callback) {\n return this.client.sendOperationRequest({\n tier: tier,\n options: options\n }, setTierOperationSpec, callback);\n };\n Blob.prototype.getAccountInfo = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccountInfoOperationSpec$2, callback);\n };\n Blob.prototype.query = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, queryOperationSpec, callback);\n };\n Blob.prototype.getTags = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getTagsOperationSpec, callback);\n };\n Blob.prototype.setTags = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setTagsOperationSpec, callback);\n };\n return Blob;\n}());\n// Operation Specifications\nvar serializer$2 = new coreHttp.Serializer(Mappers$2, true);\nvar downloadOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds\n ],\n headerParameters: [\n range0,\n rangeGetContentMD5,\n rangeGetContentCRC64,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobDownloadHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobDownloadHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getPropertiesOperationSpec$2 = {\n httpMethod: \"HEAD\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobGetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar deleteMethodOperationSpec$1 = {\n httpMethod: \"DELETE\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds,\n blobDeleteType\n ],\n headerParameters: [\n deleteSnapshots,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 202: {\n headersMapper: BlobDeleteHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobDeleteHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setAccessControlOperationSpec = {\n httpMethod: \"PATCH\",\n path: \"{filesystem}/{path}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n action5\n ],\n headerParameters: [\n owner,\n group,\n posixPermissions,\n posixAcl,\n requestId,\n version,\n leaseId0,\n ifMatch,\n ifNoneMatch,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: BlobSetAccessControlHeaders\n },\n default: {\n bodyMapper: DataLakeStorageError,\n headersMapper: BlobSetAccessControlHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getAccessControlOperationSpec = {\n httpMethod: \"HEAD\",\n path: \"{filesystem}/{path}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n upn,\n action6\n ],\n headerParameters: [\n requestId,\n version,\n leaseId0,\n ifMatch,\n ifNoneMatch,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: BlobGetAccessControlHeaders\n },\n default: {\n bodyMapper: DataLakeStorageError,\n headersMapper: BlobGetAccessControlHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar renameOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{filesystem}/{path}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n pathRenameMode\n ],\n headerParameters: [\n renameSource,\n directoryProperties,\n posixPermissions,\n posixUmask,\n sourceLeaseId,\n version,\n requestId,\n cacheControl,\n contentType,\n contentEncoding,\n contentLanguage,\n contentDisposition,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: BlobRenameHeaders\n },\n default: {\n bodyMapper: DataLakeStorageError,\n headersMapper: BlobRenameHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar undeleteOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp8\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n headersMapper: BlobUndeleteHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobUndeleteHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setExpiryOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp11\n ],\n headerParameters: [\n version,\n requestId,\n expiryOptions,\n expiresOn\n ],\n responses: {\n 200: {\n headersMapper: BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetExpiryHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setHTTPHeadersOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp0\n ],\n headerParameters: [\n version,\n requestId,\n blobCacheControl,\n blobContentType,\n blobContentMD5,\n blobContentEncoding,\n blobContentLanguage,\n blobContentDisposition,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobSetHTTPHeadersHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetHTTPHeadersHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setMetadataOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp6\n ],\n headerParameters: [\n metadata,\n encryptionScope,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetMetadataHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar acquireLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10\n ],\n headerParameters: [\n duration,\n proposedLeaseId0,\n version,\n requestId,\n action0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobAcquireLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar releaseLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action1,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobReleaseLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar renewLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action2,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobRenewLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar changeLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10\n ],\n headerParameters: [\n leaseId1,\n proposedLeaseId1,\n version,\n requestId,\n action4,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobChangeLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar breakLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10\n ],\n headerParameters: [\n breakPeriod,\n version,\n requestId,\n action3,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 202: {\n headersMapper: BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobBreakLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar createSnapshotOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp12\n ],\n headerParameters: [\n metadata,\n encryptionScope,\n version,\n requestId,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n leaseId0\n ],\n responses: {\n 201: {\n headersMapper: BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobCreateSnapshotHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar startCopyFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n metadata,\n tier0,\n rehydratePriority,\n copySource,\n version,\n requestId,\n blobTagsString,\n sealBlob,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch,\n sourceIfTags,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n leaseId0\n ],\n responses: {\n 202: {\n headersMapper: BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobStartCopyFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar copyFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n metadata,\n tier0,\n copySource,\n version,\n requestId,\n sourceContentMD5,\n blobTagsString,\n xMsRequiresSync,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n leaseId0\n ],\n responses: {\n 202: {\n headersMapper: BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobCopyFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar abortCopyFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n copyId,\n timeoutInSeconds,\n comp13\n ],\n headerParameters: [\n version,\n requestId,\n copyActionAbortConstant,\n leaseId0\n ],\n responses: {\n 204: {\n headersMapper: BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobAbortCopyFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setTierOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds,\n comp14\n ],\n headerParameters: [\n tier1,\n rehydratePriority,\n version,\n requestId,\n leaseId0,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobSetTierHeaders\n },\n 202: {\n headersMapper: BlobSetTierHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetTierHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getAccountInfoOperationSpec$2 = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n restype1,\n comp0\n ],\n headerParameters: [\n version\n ],\n responses: {\n 200: {\n headersMapper: BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobGetAccountInfoHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar queryOperationSpec = {\n httpMethod: \"POST\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n timeoutInSeconds,\n comp15\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: [\n \"options\",\n \"queryRequest\"\n ],\n mapper: QueryRequest\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 200: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobQueryHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobQueryHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getTagsOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n snapshot,\n versionId,\n comp16\n ],\n headerParameters: [\n version,\n requestId,\n ifTags,\n leaseId0\n ],\n responses: {\n 200: {\n bodyMapper: BlobTags,\n headersMapper: BlobGetTagsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobGetTagsHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setTagsOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n versionId,\n comp16\n ],\n headerParameters: [\n version,\n transactionalContentMD5,\n transactionalContentCrc64,\n requestId,\n ifTags,\n leaseId0\n ],\n requestBody: {\n parameterPath: [\n \"options\",\n \"tags\"\n ],\n mapper: BlobTags\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 204: {\n headersMapper: BlobSetTagsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetTagsHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$3 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n ClearRange: ClearRange,\n PageBlobClearPagesHeaders: PageBlobClearPagesHeaders,\n PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders,\n PageBlobCreateHeaders: PageBlobCreateHeaders,\n PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders,\n PageBlobResizeHeaders: PageBlobResizeHeaders,\n PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders,\n PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders,\n PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders,\n PageList: PageList,\n PageRange: PageRange,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a PageBlob. */\nvar PageBlob = /** @class */ (function () {\n /**\n * Create a PageBlob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function PageBlob(client) {\n this.client = client;\n }\n PageBlob.prototype.create = function (contentLength, blobContentLength, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n blobContentLength: blobContentLength,\n options: options\n }, createOperationSpec$1, callback);\n };\n PageBlob.prototype.uploadPages = function (body, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n options: options\n }, uploadPagesOperationSpec, callback);\n };\n PageBlob.prototype.clearPages = function (contentLength, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n options: options\n }, clearPagesOperationSpec, callback);\n };\n PageBlob.prototype.uploadPagesFromURL = function (sourceUrl, sourceRange, contentLength, range, options, callback) {\n return this.client.sendOperationRequest({\n sourceUrl: sourceUrl,\n sourceRange: sourceRange,\n contentLength: contentLength,\n range: range,\n options: options\n }, uploadPagesFromURLOperationSpec, callback);\n };\n PageBlob.prototype.getPageRanges = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPageRangesOperationSpec, callback);\n };\n PageBlob.prototype.getPageRangesDiff = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPageRangesDiffOperationSpec, callback);\n };\n PageBlob.prototype.resize = function (blobContentLength, options, callback) {\n return this.client.sendOperationRequest({\n blobContentLength: blobContentLength,\n options: options\n }, resizeOperationSpec, callback);\n };\n PageBlob.prototype.updateSequenceNumber = function (sequenceNumberAction, options, callback) {\n return this.client.sendOperationRequest({\n sequenceNumberAction: sequenceNumberAction,\n options: options\n }, updateSequenceNumberOperationSpec, callback);\n };\n PageBlob.prototype.copyIncremental = function (copySource, options, callback) {\n return this.client.sendOperationRequest({\n copySource: copySource,\n options: options\n }, copyIncrementalOperationSpec, callback);\n };\n return PageBlob;\n}());\n// Operation Specifications\nvar serializer$3 = new coreHttp.Serializer(Mappers$3, true);\nvar createOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n contentLength,\n tier0,\n metadata,\n encryptionScope,\n blobContentLength,\n blobSequenceNumber,\n version,\n requestId,\n blobTagsString,\n blobType0,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: PageBlobCreateHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobCreateHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar uploadPagesOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp17\n ],\n headerParameters: [\n contentLength,\n transactionalContentMD5,\n transactionalContentCrc64,\n range0,\n encryptionScope,\n version,\n requestId,\n pageWrite0,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifSequenceNumberLessThanOrEqualTo,\n ifSequenceNumberLessThan,\n ifSequenceNumberEqualTo,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobUploadPagesHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar clearPagesOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp17\n ],\n headerParameters: [\n contentLength,\n range0,\n encryptionScope,\n version,\n requestId,\n pageWrite1,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifSequenceNumberLessThanOrEqualTo,\n ifSequenceNumberLessThan,\n ifSequenceNumberEqualTo,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobClearPagesHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar uploadPagesFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp17\n ],\n headerParameters: [\n sourceUrl,\n sourceRange0,\n sourceContentMD5,\n sourceContentCrc64,\n contentLength,\n range1,\n encryptionScope,\n version,\n requestId,\n pageWrite0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n leaseId0,\n ifSequenceNumberLessThanOrEqualTo,\n ifSequenceNumberLessThan,\n ifSequenceNumberEqualTo,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobUploadPagesFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar getPageRangesOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n timeoutInSeconds,\n comp18\n ],\n headerParameters: [\n range0,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: PageList,\n headersMapper: PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobGetPageRangesHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar getPageRangesDiffOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n timeoutInSeconds,\n prevsnapshot,\n comp18\n ],\n headerParameters: [\n prevSnapshotUrl,\n range0,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: PageList,\n headersMapper: PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobGetPageRangesDiffHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar resizeOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp0\n ],\n headerParameters: [\n encryptionScope,\n blobContentLength,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: PageBlobResizeHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobResizeHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar updateSequenceNumberOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp0\n ],\n headerParameters: [\n sequenceNumberAction,\n blobSequenceNumber,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobUpdateSequenceNumberHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar copyIncrementalOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp19\n ],\n headerParameters: [\n copySource,\n version,\n requestId,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 202: {\n headersMapper: PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobCopyIncrementalHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$4 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders,\n AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders,\n AppendBlobCreateHeaders: AppendBlobCreateHeaders,\n AppendBlobSealHeaders: AppendBlobSealHeaders,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a AppendBlob. */\nvar AppendBlob = /** @class */ (function () {\n /**\n * Create a AppendBlob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function AppendBlob(client) {\n this.client = client;\n }\n AppendBlob.prototype.create = function (contentLength, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n options: options\n }, createOperationSpec$2, callback);\n };\n AppendBlob.prototype.appendBlock = function (body, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n options: options\n }, appendBlockOperationSpec, callback);\n };\n AppendBlob.prototype.appendBlockFromUrl = function (sourceUrl, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n sourceUrl: sourceUrl,\n contentLength: contentLength,\n options: options\n }, appendBlockFromUrlOperationSpec, callback);\n };\n AppendBlob.prototype.seal = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, sealOperationSpec, callback);\n };\n return AppendBlob;\n}());\n// Operation Specifications\nvar serializer$4 = new coreHttp.Serializer(Mappers$4, true);\nvar createOperationSpec$2 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n contentLength,\n metadata,\n encryptionScope,\n version,\n requestId,\n blobTagsString,\n blobType1,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobCreateHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\nvar appendBlockOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp20\n ],\n headerParameters: [\n contentLength,\n transactionalContentMD5,\n transactionalContentCrc64,\n encryptionScope,\n version,\n requestId,\n leaseId0,\n maxSize,\n appendPosition,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobAppendBlockHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\nvar appendBlockFromUrlOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp20\n ],\n headerParameters: [\n sourceUrl,\n sourceRange1,\n sourceContentMD5,\n sourceContentCrc64,\n contentLength,\n transactionalContentMD5,\n encryptionScope,\n version,\n requestId,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n leaseId0,\n maxSize,\n appendPosition,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobAppendBlockFromUrlHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\nvar sealOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp21\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n appendPosition\n ],\n responses: {\n 200: {\n headersMapper: AppendBlobSealHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobSealHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$5 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n Block: Block,\n BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders,\n BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders,\n BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders,\n BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders,\n BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders,\n BlockBlobUploadHeaders: BlockBlobUploadHeaders,\n BlockList: BlockList,\n BlockLookupList: BlockLookupList,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a BlockBlob. */\nvar BlockBlob = /** @class */ (function () {\n /**\n * Create a BlockBlob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function BlockBlob(client) {\n this.client = client;\n }\n BlockBlob.prototype.upload = function (body, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n options: options\n }, uploadOperationSpec, callback);\n };\n BlockBlob.prototype.putBlobFromUrl = function (contentLength, copySource, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n copySource: copySource,\n options: options\n }, putBlobFromUrlOperationSpec, callback);\n };\n BlockBlob.prototype.stageBlock = function (blockId, contentLength, body, options, callback) {\n return this.client.sendOperationRequest({\n blockId: blockId,\n contentLength: contentLength,\n body: body,\n options: options\n }, stageBlockOperationSpec, callback);\n };\n BlockBlob.prototype.stageBlockFromURL = function (blockId, contentLength, sourceUrl, options, callback) {\n return this.client.sendOperationRequest({\n blockId: blockId,\n contentLength: contentLength,\n sourceUrl: sourceUrl,\n options: options\n }, stageBlockFromURLOperationSpec, callback);\n };\n BlockBlob.prototype.commitBlockList = function (blocks, options, callback) {\n return this.client.sendOperationRequest({\n blocks: blocks,\n options: options\n }, commitBlockListOperationSpec, callback);\n };\n BlockBlob.prototype.getBlockList = function (listType, options, callback) {\n return this.client.sendOperationRequest({\n listType: listType,\n options: options\n }, getBlockListOperationSpec, callback);\n };\n return BlockBlob;\n}());\n// Operation Specifications\nvar serializer$5 = new coreHttp.Serializer(Mappers$5, true);\nvar uploadOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n transactionalContentMD5,\n contentLength,\n metadata,\n encryptionScope,\n tier0,\n version,\n requestId,\n blobTagsString,\n blobType2,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobUploadHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar putBlobFromUrlOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n transactionalContentMD5,\n contentLength,\n metadata,\n encryptionScope,\n tier0,\n version,\n requestId,\n sourceContentMD5,\n blobTagsString,\n copySource,\n copySourceBlobProperties,\n blobType2,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch,\n sourceIfTags\n ],\n responses: {\n 201: {\n headersMapper: BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobPutBlobFromUrlHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar stageBlockOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n blockId,\n timeoutInSeconds,\n comp22\n ],\n headerParameters: [\n contentLength,\n transactionalContentMD5,\n transactionalContentCrc64,\n encryptionScope,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobStageBlockHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar stageBlockFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n blockId,\n timeoutInSeconds,\n comp22\n ],\n headerParameters: [\n contentLength,\n sourceUrl,\n sourceRange1,\n sourceContentMD5,\n sourceContentCrc64,\n encryptionScope,\n version,\n requestId,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n leaseId0,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobStageBlockFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar commitBlockListOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp23\n ],\n headerParameters: [\n transactionalContentMD5,\n transactionalContentCrc64,\n metadata,\n encryptionScope,\n tier0,\n version,\n requestId,\n blobTagsString,\n blobCacheControl,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"blocks\",\n mapper: tslib.__assign(tslib.__assign({}, BlockLookupList), { required: true })\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 201: {\n headersMapper: BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobCommitBlockListHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar getBlockListOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n listType,\n timeoutInSeconds,\n comp23\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: BlockList,\n headersMapper: BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobGetBlockListHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\n\n// Copyright (c) Microsoft Corporation.\n/**\n * The `@azure/logger` configuration for this package.\n */\nvar logger = logger$1.createClientLogger(\"storage-blob\");\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar SDK_VERSION = \"12.5.0\";\nvar SERVICE_VERSION = \"2020-06-12\";\nvar BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB\nvar BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB\nvar BLOCK_BLOB_MAX_BLOCKS = 50000;\nvar DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB\nvar DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB\nvar DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5;\n/**\n * The OAuth scope to use with Azure Storage.\n */\nvar StorageOAuthScopes = \"https://storage.azure.com/.default\";\nvar URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\"\n }\n};\nvar HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416\n};\nvar HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\"\n};\nvar ETagNone = \"\";\nvar ETagAny = \"*\";\nvar SIZE_1_MB = 1 * 1024 * 1024;\nvar BATCH_MAX_REQUEST = 256;\nvar BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nvar HTTP_LINE_ENDING = \"\\r\\n\";\nvar HTTP_VERSION_1_1 = \"HTTP/1.1\";\nvar EncryptionAlgorithmAES25 = \"AES256\";\nvar DevelopmentConnectionString = \"DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;\";\nvar StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\"\n];\nvar StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\"\n];\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nfunction escapeURLPath(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var path = urlParsed.getPath();\n path = path || \"/\";\n path = escape(path);\n urlParsed.setPath(path);\n return urlParsed.toString();\n}\nfunction getProxyUriFromDevConnString(connectionString) {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n var proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n var matchCredentials = connectionString.split(\";\");\n for (var _i = 0, matchCredentials_1 = matchCredentials; _i < matchCredentials_1.length; _i++) {\n var element = matchCredentials_1[_i];\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")[1];\n }\n }\n }\n return proxyUri;\n}\nfunction getValueInConnString(connectionString, argument) {\n var elements = connectionString.split(\";\");\n for (var _i = 0, elements_1 = elements; _i < elements_1.length; _i++) {\n var element = elements_1[_i];\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")[1];\n }\n }\n return \"\";\n}\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nfunction extractConnectionStringParts(connectionString) {\n var proxyUri = \"\";\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n // Matching BlobEndpoint in the Account connection string\n var blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n if (connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1) {\n // Account connection string\n var defaultEndpointsProtocol = \"\";\n var accountName = \"\";\n var accountKey = Buffer.from(\"accountKey\", \"base64\");\n var endpointSuffix = \"\";\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n var protocol = defaultEndpointsProtocol.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\");\n }\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = defaultEndpointsProtocol + \"://\" + accountName + \".blob.\" + endpointSuffix;\n }\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n }\n else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName: accountName,\n accountKey: accountKey,\n proxyUri: proxyUri\n };\n }\n else {\n // SAS connection string\n var accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n var accountName = getAccountNameFromUrl(blobEndpoint);\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n }\n else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n return { kind: \"SASConnString\", url: blobEndpoint, accountName: accountName, accountSas: accountSas };\n }\n}\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text) {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nfunction appendToURLPath(url, name) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? \"\" + path + name : path + \"/\" + name) : name;\n urlParsed.setPath(path);\n return urlParsed.toString();\n}\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nfunction setURLParameter(url, name, value) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nfunction getURLParameter(url, name) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nfunction setURLHost(url, host) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nfunction getURLPath(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nfunction getURLScheme(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nfunction getURLPathAndQuery(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n var queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString != \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : \"?\" + queryString; // Ensure query string start with '?'\n }\n return \"\" + pathString + queryString;\n}\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nfunction getURLQueries(url) {\n var queryString = coreHttp.URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n var querySubStrings = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter(function (value) {\n var indexOfEqual = value.indexOf(\"=\");\n var lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1);\n });\n var queries = {};\n for (var _i = 0, querySubStrings_1 = querySubStrings; _i < querySubStrings_1.length; _i++) {\n var querySubString = querySubStrings_1[_i];\n var splitResults = querySubString.split(\"=\");\n var key = splitResults[0];\n var value = splitResults[1];\n queries[key] = value;\n }\n return queries;\n}\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nfunction appendToURLQuery(url, queryParts) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n }\n else {\n query = queryParts;\n }\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nfunction truncatedISO8061Date(date, withMilliseconds) {\n if (withMilliseconds === void 0) { withMilliseconds = true; }\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n var dateString = date.toISOString();\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n/**\n * Base64 encode.\n *\n * @param content -\n */\nfunction base64encode(content) {\n return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nfunction generateBlockID(blockIDPrefix, blockIndex) {\n // To generate a 64 bytes base64 string, source string should be 48\n var maxSourceStringLength = 48;\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n var maxBlockIndexLength = 6;\n var maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n var res = blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nfunction delay(timeInMs, aborter, abortError) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve, reject) {\n var timeout;\n var abortHandler = function () {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n var resolveHandler = function () {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n timeout = setTimeout(resolveHandler, timeInMs);\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n })];\n });\n });\n}\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nfunction padStart(currentString, targetLength, padString) {\n if (padString === void 0) { padString = \" \"; }\n // TS doesn't know this code needs to run downlevel sometimes.\n // @ts-expect-error\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n }\n else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nfunction iEqual(str1, str2) {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nfunction getAccountNameFromUrl(url) {\n var parsedUrl = coreHttp.URLBuilder.parse(url);\n var accountName;\n try {\n if (parsedUrl.getHost().split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost().split(\".\")[0];\n }\n else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath().split(\"/\")[1];\n }\n else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n }\n catch (error) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\nfunction isIpEndpointStyle(parsedUrl) {\n if (parsedUrl.getHost() == undefined) {\n return false;\n }\n var host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? \"\" : \":\" + parsedUrl.getPort());\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port), use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(host);\n}\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nfunction toBlobTagsString(tags) {\n if (tags === undefined) {\n return undefined;\n }\n var tagPairs = [];\n for (var key in tags) {\n if (tags.hasOwnProperty(key)) {\n var value = tags[key];\n tagPairs.push(encodeURIComponent(key) + \"=\" + encodeURIComponent(value));\n }\n }\n return tagPairs.join(\"&\");\n}\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nfunction toBlobTags(tags) {\n if (tags === undefined) {\n return undefined;\n }\n var res = {\n blobTagSet: []\n };\n for (var key in tags) {\n if (tags.hasOwnProperty(key)) {\n var value = tags[key];\n res.blobTagSet.push({\n key: key,\n value: value\n });\n }\n }\n return res;\n}\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nfunction toTags(tags) {\n if (tags === undefined) {\n return undefined;\n }\n var res = {};\n for (var _i = 0, _a = tags.blobTagSet; _i < _a.length; _i++) {\n var blobTag = _a[_i];\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nfunction toQuerySerialization(textConfiguration) {\n if (textConfiguration === undefined) {\n return undefined;\n }\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false\n }\n }\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator\n }\n }\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema\n }\n }\n };\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\nfunction parseObjectReplicationRecord(objectReplicationRecord) {\n if (!objectReplicationRecord) {\n return undefined;\n }\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n var orProperties = [];\n var _loop_1 = function (key) {\n var ids = key.split(\"_\");\n var policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n var rule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key]\n };\n var policyIndex = orProperties.findIndex(function (policy) { return policy.policyId === ids[0]; });\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n }\n else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule]\n });\n }\n };\n for (var key in objectReplicationRecord) {\n _loop_1(key);\n }\n return orProperties;\n}\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nfunction attachCredential(thing, credential) {\n thing.credential = credential;\n return thing;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nvar StorageBrowserPolicy = /** @class */ (function (_super) {\n tslib.__extends(StorageBrowserPolicy, _super);\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n function StorageBrowserPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n */\n StorageBrowserPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n {\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n }\n });\n });\n };\n return StorageBrowserPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nvar StorageBrowserPolicyFactory = /** @class */ (function () {\n function StorageBrowserPolicyFactory() {\n }\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n StorageBrowserPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new StorageBrowserPolicy(nextPolicy, options);\n };\n return StorageBrowserPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation.\n(function (StorageRetryPolicyType) {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n StorageRetryPolicyType[StorageRetryPolicyType[\"EXPONENTIAL\"] = 0] = \"EXPONENTIAL\";\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n StorageRetryPolicyType[StorageRetryPolicyType[\"FIXED\"] = 1] = \"FIXED\";\n})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {}));\n// Default values of StorageRetryOptions\nvar DEFAULT_RETRY_OPTIONS = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined // Use server side default timeout strategy\n};\nvar RETRY_ABORT_ERROR = new abortController.AbortError(\"The operation was aborted.\");\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nvar StorageRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(StorageRetryPolicy, _super);\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n function StorageRetryPolicy(nextPolicy, options, retryOptions) {\n if (retryOptions === void 0) { retryOptions = DEFAULT_RETRY_OPTIONS; }\n var _this = _super.call(this, nextPolicy, options) || this;\n // Initialize retry options\n _this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs)\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost\n };\n return _this;\n }\n /**\n * Sends request.\n *\n * @param request -\n */\n StorageRetryPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, this.attemptSendRequest(request, false, 1)];\n });\n });\n };\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n StorageRetryPolicy.prototype.attemptSendRequest = function (request, secondaryHas404, attempt) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var newRequest, isPrimaryRetry, response, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n newRequest = request.clone();\n isPrimaryRetry = secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost);\n }\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString());\n }\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n logger.info(\"RetryPolicy: =====> Try=\" + attempt + \" \" + (isPrimaryRetry ? \"Primary\" : \"Secondary\"));\n return [4 /*yield*/, this._nextPolicy.sendRequest(newRequest)];\n case 2:\n response = _a.sent();\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return [2 /*return*/, response];\n }\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n return [3 /*break*/, 4];\n case 3:\n err_1 = _a.sent();\n logger.error(\"RetryPolicy: Caught error, message: \" + err_1.message + \", code: \" + err_1.code);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err_1)) {\n throw err_1;\n }\n return [3 /*break*/, 4];\n case 4: return [4 /*yield*/, this.delay(isPrimaryRetry, attempt, request.abortSignal)];\n case 5:\n _a.sent();\n return [4 /*yield*/, this.attemptSendRequest(request, secondaryHas404, ++attempt)];\n case 6: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n StorageRetryPolicy.prototype.shouldRetry = function (isPrimaryRetry, attempt, response, err) {\n if (attempt >= this.retryOptions.maxTries) {\n logger.info(\"RetryPolicy: Attempt(s) \" + attempt + \" >= maxTries \" + this.retryOptions\n .maxTries + \", no further try.\");\n return false;\n }\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n var retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\" // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (var _i = 0, retriableErrors_1 = retriableErrors; _i < retriableErrors_1.length; _i++) {\n var retriableError = retriableErrors_1[_i];\n if (err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)) {\n logger.info(\"RetryPolicy: Network error \" + retriableError + \" found, will retry.\");\n return true;\n }\n }\n }\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n var statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(\"RetryPolicy: Secondary access with 404, will retry.\");\n return true;\n }\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(\"RetryPolicy: Will retry for status code \" + statusCode + \".\");\n return true;\n }\n }\n if ((err === null || err === void 0 ? void 0 : err.code) === \"PARSE_ERROR\" && (err === null || err === void 0 ? void 0 : err.message.startsWith(\"Error \\\"Error: Unclosed root tag\"))) {\n logger.info(\"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\");\n return true;\n }\n return false;\n };\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n StorageRetryPolicy.prototype.delay = function (isPrimaryRetry, attempt, abortSignal) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var delayTimeInMs;\n return tslib.__generator(this, function (_a) {\n delayTimeInMs = 0;\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case exports.StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs);\n break;\n case exports.StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs;\n break;\n }\n }\n else {\n delayTimeInMs = Math.random() * 1000;\n }\n logger.info(\"RetryPolicy: Delay for \" + delayTimeInMs + \"ms\");\n return [2 /*return*/, delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR)];\n });\n });\n };\n return StorageRetryPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nvar StorageRetryPolicyFactory = /** @class */ (function () {\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n function StorageRetryPolicyFactory(retryOptions) {\n this.retryOptions = retryOptions;\n }\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n StorageRetryPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n };\n return StorageRetryPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nvar CredentialPolicy = /** @class */ (function (_super) {\n tslib.__extends(CredentialPolicy, _super);\n function CredentialPolicy() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n */\n CredentialPolicy.prototype.sendRequest = function (request) {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n };\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n CredentialPolicy.prototype.signRequest = function (request) {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n };\n return CredentialPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nvar AnonymousCredentialPolicy = /** @class */ (function (_super) {\n tslib.__extends(AnonymousCredentialPolicy, _super);\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n function AnonymousCredentialPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n return AnonymousCredentialPolicy;\n}(CredentialPolicy));\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nvar Credential = /** @class */ (function () {\n function Credential() {\n }\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n Credential.prototype.create = function (\n // tslint:disable-next-line:variable-name\n _nextPolicy, \n // tslint:disable-next-line:variable-name\n _options) {\n throw new Error(\"Method should be implemented in children classes.\");\n };\n return Credential;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nvar AnonymousCredential = /** @class */ (function (_super) {\n tslib.__extends(AnonymousCredential, _super);\n function AnonymousCredential() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n AnonymousCredential.prototype.create = function (nextPolicy, options) {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n };\n return AnonymousCredential;\n}(Credential));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nvar TelemetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(TelemetryPolicy, _super);\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n function TelemetryPolicy(nextPolicy, options, telemetry) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.telemetry = telemetry;\n return _this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n */\n TelemetryPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n {\n if (!request.headers) {\n request.headers = new coreHttp.HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return TelemetryPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nvar TelemetryPolicyFactory = /** @class */ (function () {\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n function TelemetryPolicyFactory(telemetry) {\n var userAgentInfo = [];\n {\n if (telemetry) {\n var telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n // e.g. azsdk-js-storageblob/10.0.0\n var libInfo = \"azsdk-js-storageblob/\" + SDK_VERSION;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n var runtimeInfo = \"(NODE-VERSION \" + process.version + \"; \" + os.type() + \" \" + os.release() + \")\";\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n this.telemetryString = userAgentInfo.join(\" \");\n }\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n TelemetryPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n };\n return TelemetryPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar _defaultHttpClient = new coreHttp.DefaultHttpClient();\nfunction getCachedDefaultHttpClient() {\n return _defaultHttpClient;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nvar Pipeline = /** @class */ (function () {\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n function Pipeline(factories, options) {\n if (options === void 0) { options = {}; }\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = tslib.__assign(tslib.__assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() });\n }\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n Pipeline.prototype.toServiceClientOptions = function () {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories\n };\n };\n return Pipeline;\n}());\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nfunction newPipeline(credential, pipelineOptions) {\n if (pipelineOptions === void 0) { pipelineOptions = {}; }\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n var telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n var factories = [\n coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n coreHttp.generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions),\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n coreHttp.deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n coreHttp.logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters\n })\n ];\n {\n // policies only available in Node.js runtime, not in browsers\n factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(coreHttp.disableResponseDecompressionPolicy());\n }\n factories.push(coreHttp.isTokenCredential(credential)\n ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential)\n : credential);\n return new Pipeline(factories, pipelineOptions);\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nvar StorageSharedKeyCredentialPolicy = /** @class */ (function (_super) {\n tslib.__extends(StorageSharedKeyCredentialPolicy, _super);\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n function StorageSharedKeyCredentialPolicy(nextPolicy, options, factory) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.factory = factory;\n return _this;\n }\n /**\n * Signs request.\n *\n * @param request -\n */\n StorageSharedKeyCredentialPolicy.prototype.signRequest = function (request) {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n if (request.body && typeof request.body === \"string\" && request.body.length > 0) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n var stringToSign = [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE)\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n var signature = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(HeaderConstants.AUTHORIZATION, \"SharedKey \" + this.factory.accountName + \":\" + signature);\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n };\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n StorageSharedKeyCredentialPolicy.prototype.getHeaderValueToSign = function (request, headerName) {\n var value = request.headers.get(headerName);\n if (!value) {\n return \"\";\n }\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n return value;\n };\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedHeadersString = function (request) {\n var headersArray = request.headers.headersArray().filter(function (value) {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n headersArray.sort(function (a, b) {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n // Remove duplicate headers\n headersArray = headersArray.filter(function (value, index, array) {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n var canonicalizedHeadersStringToSign = \"\";\n headersArray.forEach(function (header) {\n canonicalizedHeadersStringToSign += header.name\n .toLowerCase()\n .trimRight() + \":\" + header.value.trimLeft() + \"\\n\";\n });\n return canonicalizedHeadersStringToSign;\n };\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedResourceString = function (request) {\n var path = getURLPath(request.url) || \"/\";\n var canonicalizedResourceString = \"\";\n canonicalizedResourceString += \"/\" + this.factory.accountName + path;\n var queries = getURLQueries(request.url);\n var lowercaseQueries = {};\n if (queries) {\n var queryKeys = [];\n for (var key in queries) {\n if (queries.hasOwnProperty(key)) {\n var lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n queryKeys.sort();\n for (var _i = 0, queryKeys_1 = queryKeys; _i < queryKeys_1.length; _i++) {\n var key = queryKeys_1[_i];\n canonicalizedResourceString += \"\\n\" + key + \":\" + decodeURIComponent(lowercaseQueries[key]);\n }\n }\n return canonicalizedResourceString;\n };\n return StorageSharedKeyCredentialPolicy;\n}(CredentialPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nvar StorageSharedKeyCredential = /** @class */ (function (_super) {\n tslib.__extends(StorageSharedKeyCredential, _super);\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n function StorageSharedKeyCredential(accountName, accountKey) {\n var _this = _super.call(this) || this;\n _this.accountName = accountName;\n _this.accountKey = Buffer.from(accountKey, \"base64\");\n return _this;\n }\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n StorageSharedKeyCredential.prototype.create = function (nextPolicy, options) {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n };\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n StorageSharedKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {\n return crypto.createHmac(\"sha256\", this.accountKey)\n .update(stringToSign, \"utf8\")\n .digest(\"base64\");\n };\n return StorageSharedKeyCredential;\n}(Credential));\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\nvar packageName = \"azure-storage-blob\";\nvar packageVersion = \"12.5.0\";\nvar StorageClientContext = /** @class */ (function (_super) {\n tslib.__extends(StorageClientContext, _super);\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the targe of the desired\n * operation.\n * @param [options] The parameter options\n */\n function StorageClientContext(url, options) {\n var _this = this;\n if (url == undefined) {\n throw new Error(\"'url' cannot be null.\");\n }\n if (!options) {\n options = {};\n }\n if (!options.userAgent) {\n var defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = packageName + \"/\" + packageVersion + \" \" + defaultUserAgent;\n }\n _this = _super.call(this, undefined, options) || this;\n _this.version = '2020-06-12';\n _this.baseUri = \"{url}\";\n _this.requestContentType = \"application/json; charset=utf-8\";\n _this.url = url;\n return _this;\n }\n return StorageClientContext;\n}(coreHttp.ServiceClient));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nvar StorageClient = /** @class */ (function () {\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n function StorageClient(url, pipeline) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions());\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n this.credential = new AnonymousCredential();\n for (var _i = 0, _a = this.pipeline.factories; _i < _a.length; _i++) {\n var factory = _a[_i];\n if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential) {\n this.credential = factory;\n }\n else if (coreHttp.isTokenCredential(factory.credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = factory.credential;\n }\n }\n // Override protocol layer's default content-type\n var storageClientContext = this.storageClientContext;\n storageClientContext.requestContentType = undefined;\n }\n return StorageClient;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nvar createSpan = coreTracing.createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\"\n});\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nfunction convertTracingToRequestOptionsBase(options) {\n var _a;\n return {\n spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions\n };\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nvar BlobSASPermissions = /** @class */ (function () {\n function BlobSASPermissions() {\n /**\n * Specifies Read access granted.\n */\n this.read = false;\n /**\n * Specifies Add access granted.\n */\n this.add = false;\n /**\n * Specifies Create access granted.\n */\n this.create = false;\n /**\n * Specifies Write access granted.\n */\n this.write = false;\n /**\n * Specifies Delete access granted.\n */\n this.delete = false;\n /**\n * Specifies Delete version access granted.\n */\n this.deleteVersion = false;\n /**\n * Specfies Tag access granted.\n */\n this.tag = false;\n /**\n * Specifies Move access granted.\n */\n this.move = false;\n /**\n * Specifies Execute access granted.\n */\n this.execute = false;\n }\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n BlobSASPermissions.parse = function (permissions) {\n var blobSASPermissions = new BlobSASPermissions();\n for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {\n var char = permissions_1[_i];\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n default:\n throw new RangeError(\"Invalid permission: \" + char);\n }\n }\n return blobSASPermissions;\n };\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n BlobSASPermissions.from = function (permissionLike) {\n var blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n return blobSASPermissions;\n };\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n BlobSASPermissions.prototype.toString = function () {\n var permissions = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n return permissions.join(\"\");\n };\n return BlobSASPermissions;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nvar ContainerSASPermissions = /** @class */ (function () {\n function ContainerSASPermissions() {\n /**\n * Specifies Read access granted.\n */\n this.read = false;\n /**\n * Specifies Add access granted.\n */\n this.add = false;\n /**\n * Specifies Create access granted.\n */\n this.create = false;\n /**\n * Specifies Write access granted.\n */\n this.write = false;\n /**\n * Specifies Delete access granted.\n */\n this.delete = false;\n /**\n * Specifies Delete version access granted.\n */\n this.deleteVersion = false;\n /**\n * Specifies List access granted.\n */\n this.list = false;\n /**\n * Specfies Tag access granted.\n */\n this.tag = false;\n /**\n * Specifies Move access granted.\n */\n this.move = false;\n /**\n * Specifies Execute access granted.\n */\n this.execute = false;\n }\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n ContainerSASPermissions.parse = function (permissions) {\n var containerSASPermissions = new ContainerSASPermissions();\n for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {\n var char = permissions_1[_i];\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n default:\n throw new RangeError(\"Invalid permission \" + char);\n }\n }\n return containerSASPermissions;\n };\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n ContainerSASPermissions.from = function (permissionLike) {\n var containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n return containerSASPermissions;\n };\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n ContainerSASPermissions.prototype.toString = function () {\n var permissions = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n return permissions.join(\"\");\n };\n return ContainerSASPermissions;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nvar UserDelegationKeyCredential = /** @class */ (function () {\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n function UserDelegationKeyCredential(accountName, userDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n UserDelegationKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n return crypto.createHmac(\"sha256\", this.key)\n .update(stringToSign, \"utf8\")\n .digest(\"base64\");\n };\n return UserDelegationKeyCredential;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nfunction ipRangeToString(ipRange) {\n return ipRange.end ? ipRange.start + \"-\" + ipRange.end : ipRange.start;\n}\n\n// Copyright (c) Microsoft Corporation.\n(function (SASProtocol) {\n /**\n * Protocol that allows HTTPS only\n */\n SASProtocol[\"Https\"] = \"https\";\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n SASProtocol[\"HttpsAndHttp\"] = \"https,http\";\n})(exports.SASProtocol || (exports.SASProtocol = {}));\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@code String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nvar SASQueryParameters = /** @class */ (function () {\n function SASQueryParameters(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) {\n this.version = version;\n this.signature = signature;\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n }\n else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n Object.defineProperty(SASQueryParameters.prototype, \"ipRange\", {\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n get: function () {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start\n };\n }\n return undefined;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n SASQueryParameters.prototype.toString = function () {\n var params = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"skoid\",\n \"sktid\",\n \"skt\",\n \"ske\",\n \"sks\",\n \"skv\",\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\"\n ];\n var queries = [];\n for (var _i = 0, params_1 = params; _i < params_1.length; _i++) {\n var param = params_1[_i];\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined);\n break;\n case \"se\":\n this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined);\n break;\n case \"sip\":\n this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined);\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined);\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined);\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n };\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n SASQueryParameters.prototype.tryAppendQueryParameter = function (queries, key, value) {\n if (!value) {\n return;\n }\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(key + \"=\" + value);\n }\n };\n return SASQueryParameters;\n}());\n\n// Copyright (c) Microsoft Corporation.\nfunction generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) {\n var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n var sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n var userDelegationKeyCredential;\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey);\n }\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n }\n else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential);\n }\n else {\n return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential);\n }\n }\n }\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n }\n else {\n throw new RangeError(\"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\");\n }\n }\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n if (!blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\");\n }\n var resource = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\"\n ].join(\"\\n\");\n var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n if (!blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\");\n }\n var resource = \"c\";\n var timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n }\n else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\"\n ].join(\"\\n\");\n var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\");\n }\n var resource = \"c\";\n var timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n }\n else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n ].join(\"\\n\");\n var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey);\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\");\n }\n var resource = \"c\";\n var timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n }\n else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n ].join(\"\\n\");\n var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId);\n}\nfunction getCanonicalName(accountName, containerName, blobName) {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n var elements = [\"/blob/\" + accountName + \"/\" + containerName];\n if (blobName) {\n elements.push(\"/\" + blobName);\n }\n return elements.join(\"\");\n}\nfunction SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) {\n var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n if (blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\") {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n if (version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n if (version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\");\n }\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nvar BlobLeaseClient = /** @class */ (function () {\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n function BlobLeaseClient(client, leaseId) {\n var clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions());\n this._url = client.url;\n if (client.name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n }\n else {\n this._isContainer = false;\n this._containerOrBlobOperation = new Blob$1(clientContext);\n }\n if (!leaseId) {\n leaseId = coreHttp.generateUuid();\n }\n this._leaseId = leaseId;\n }\n Object.defineProperty(BlobLeaseClient.prototype, \"leaseId\", {\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n get: function () {\n return this._leaseId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobLeaseClient.prototype, \"url\", {\n /**\n * Gets the url.\n *\n * @readonly\n */\n get: function () {\n return this._url;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n BlobLeaseClient.prototype.acquireLease = function (duration, options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, updatedOptions, e_1;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-acquireLease\", options), span = _g.span, updatedOptions = _g.updatedOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.acquireLease(tslib.__assign({ abortSignal: options.abortSignal, duration: duration, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_1 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n BlobLeaseClient.prototype.changeLease = function (proposedLeaseId, options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, updatedOptions, response, e_2;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-changeLease\", options), span = _g.span, updatedOptions = _g.updatedOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _h.sent();\n this._leaseId = proposedLeaseId;\n return [2 /*return*/, response];\n case 3:\n e_2 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n BlobLeaseClient.prototype.releaseLease = function (options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, updatedOptions, e_3;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-releaseLease\", options), span = _g.span, updatedOptions = _g.updatedOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.releaseLease(this._leaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_3 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_3.message\n });\n throw e_3;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n BlobLeaseClient.prototype.renewLease = function (options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, updatedOptions, e_4;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-renewLease\", options), span = _g.span, updatedOptions = _g.updatedOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.renewLease(this._leaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_4 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_4.message\n });\n throw e_4;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n BlobLeaseClient.prototype.breakLease = function (breakPeriod, options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, updatedOptions, operationOptions, e_5;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-breakLease\", options), span = _g.span, updatedOptions = _g.updatedOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n operationOptions = tslib.__assign({ abortSignal: options.abortSignal, breakPeriod: breakPeriod, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions));\n return [4 /*yield*/, this._containerOrBlobOperation.breakLease(operationOptions)];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_5 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_5.message\n });\n throw e_5;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return BlobLeaseClient;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nvar RetriableReadableStream = /** @class */ (function (_super) {\n tslib.__extends(RetriableReadableStream, _super);\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n function RetriableReadableStream(source, getter, offset, count, options) {\n if (options === void 0) { options = {}; }\n var _this = _super.call(this, { highWaterMark: options.highWaterMark }) || this;\n _this.retries = 0;\n _this.sourceDataHandler = function (data) {\n if (_this.options.doInjectErrorOnce) {\n _this.options.doInjectErrorOnce = undefined;\n _this.source.pause();\n _this.source.removeAllListeners(\"data\");\n _this.source.emit(\"end\");\n return;\n }\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n _this.offset += data.length;\n if (_this.onProgress) {\n _this.onProgress({ loadedBytes: _this.offset - _this.start });\n }\n if (!_this.push(data)) {\n _this.source.pause();\n }\n };\n _this.sourceErrorOrEndHandler = function (err) {\n if (err && err.name === \"AbortError\") {\n _this.destroy(err);\n return;\n }\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n _this.removeSourceEventHandlers();\n if (_this.offset - 1 === _this.end) {\n _this.push(null);\n }\n else if (_this.offset <= _this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (_this.retries < _this.maxRetryRequests) {\n _this.retries += 1;\n _this.getter(_this.offset)\n .then(function (newSource) {\n _this.source = newSource;\n _this.setSourceEventHandlers();\n })\n .catch(function (error) {\n _this.destroy(error);\n });\n }\n else {\n _this.destroy(new Error(\n // tslint:disable-next-line:max-line-length\n \"Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: \" + (_this\n .offset - 1) + \", data needed offset: \" + _this.end + \", retries: \" + _this.retries + \", max retries: \" + _this.maxRetryRequests));\n }\n }\n else {\n _this.destroy(new Error(\"Data corruption failure: Received more data than original request, data needed offset is \" + _this.end + \", received offset: \" + (_this.offset - 1)));\n }\n };\n _this.getter = getter;\n _this.source = source;\n _this.start = offset;\n _this.offset = offset;\n _this.end = offset + count - 1;\n _this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n _this.onProgress = options.onProgress;\n _this.options = options;\n _this.setSourceEventHandlers();\n return _this;\n }\n RetriableReadableStream.prototype._read = function () {\n this.source.resume();\n };\n RetriableReadableStream.prototype.setSourceEventHandlers = function () {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n };\n RetriableReadableStream.prototype.removeSourceEventHandlers = function () {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n };\n RetriableReadableStream.prototype._destroy = function (error, callback) {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n this.source.destroy();\n callback(error === null ? undefined : error);\n };\n return RetriableReadableStream;\n}(stream.Readable));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nvar BlobDownloadResponse = /** @class */ (function () {\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n function BlobDownloadResponse(originalResponse, getter, offset, count, options) {\n if (options === void 0) { options = {}; }\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);\n }\n Object.defineProperty(BlobDownloadResponse.prototype, \"acceptRanges\", {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.acceptRanges;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"cacheControl\", {\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.cacheControl;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentDisposition\", {\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentDisposition;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentEncoding\", {\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentEncoding;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentLanguage\", {\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentLanguage;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobSequenceNumber\", {\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobSequenceNumber;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobType\", {\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentLength\", {\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentLength;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentMD5\", {\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentRange\", {\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentRange;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentType\", {\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyCompletedOn\", {\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyCompletedOn;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyId\", {\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyProgress\", {\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyProgress;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copySource\", {\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copySource;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyStatus\", {\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyStatusDescription\", {\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyStatusDescription;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"leaseDuration\", {\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.leaseDuration;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"leaseState\", {\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.leaseState;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"leaseStatus\", {\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.leaseStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"date\", {\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.date;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobCommittedBlockCount\", {\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobCommittedBlockCount;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"etag\", {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.etag;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"tagCount\", {\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.tagCount;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"errorCode\", {\n /**\n * The error code.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.errorCode;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"isServerEncrypted\", {\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.isServerEncrypted;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobContentMD5\", {\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobContentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"lastModified\", {\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.lastModified;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"lastAccessed\", {\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.lastAccessed;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"metadata\", {\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.metadata;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"requestId\", {\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.requestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"clientRequestId\", {\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.clientRequestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"version\", {\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.version;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"versionId\", {\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.versionId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"isCurrentVersion\", {\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.isCurrentVersion;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"encryptionKeySha256\", {\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.encryptionKeySha256;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentCrc64\", {\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n get: function () {\n return this.originalResponse.contentCrc64;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"objectReplicationDestinationPolicyId\", {\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"objectReplicationSourceProperties\", {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.objectReplicationSourceProperties;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"isSealed\", {\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.isSealed;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentAsBlob\", {\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobBody;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"readableStreamBody\", {\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n get: function () {\n return coreHttp.isNode ? this.blobDownloadStream : undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"_response\", {\n /**\n * The HTTP response.\n */\n get: function () {\n return this.originalResponse._response;\n },\n enumerable: false,\n configurable: true\n });\n return BlobDownloadResponse;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar AVRO_SYNC_MARKER_SIZE = 16;\nvar AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]);\nvar AVRO_CODEC_KEY = \"avro.codec\";\nvar AVRO_SCHEMA_KEY = \"avro.schema\";\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nfunction arraysEqual(a, b) {\n if (a === b)\n return true;\n if (a == null || b == null)\n return false;\n if (a.length != b.length)\n return false;\n for (var i = 0; i < a.length; ++i) {\n if (a[i] !== b[i])\n return false;\n }\n return true;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar AvroParser = /** @class */ (function () {\n function AvroParser() {\n }\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n AvroParser.readFixedBytes = function (stream, length, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var bytes;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, stream.read(length, { abortSignal: options.abortSignal })];\n case 1:\n bytes = _a.sent();\n if (bytes.length != length) {\n throw new Error(\"Hit stream end.\");\n }\n return [2 /*return*/, bytes];\n }\n });\n });\n };\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n AvroParser.readByte = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buf;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 1, options)];\n case 1:\n buf = _a.sent();\n return [2 /*return*/, buf[0]];\n }\n });\n });\n };\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n AvroParser.readZigZagLong = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var zigZagEncoded, significanceInBit, byte, haveMoreByte, significanceInFloat, res;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n zigZagEncoded = 0;\n significanceInBit = 0;\n _a.label = 1;\n case 1: return [4 /*yield*/, AvroParser.readByte(stream, options)];\n case 2:\n byte = _a.sent();\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n _a.label = 3;\n case 3:\n if (haveMoreByte && significanceInBit < 28) return [3 /*break*/, 1];\n _a.label = 4;\n case 4:\n if (!haveMoreByte) return [3 /*break*/, 9];\n // Switch to float arithmetic\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n _a.label = 5;\n case 5: return [4 /*yield*/, AvroParser.readByte(stream, options)];\n case 6:\n byte = _a.sent();\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n _a.label = 7;\n case 7:\n if (byte & 0x80) return [3 /*break*/, 5];\n _a.label = 8;\n case 8:\n res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return [2 /*return*/, res];\n case 9: return [2 /*return*/, (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1)];\n }\n });\n });\n };\n AvroParser.readLong = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];\n });\n });\n };\n AvroParser.readInt = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];\n });\n });\n };\n AvroParser.readNull = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, null];\n });\n });\n };\n AvroParser.readBoolean = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var b;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readByte(stream, options)];\n case 1:\n b = _a.sent();\n if (b == 1) {\n return [2 /*return*/, true];\n }\n else if (b == 0) {\n return [2 /*return*/, false];\n }\n else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n });\n });\n };\n AvroParser.readFloat = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var u8arr, view;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 4, options)];\n case 1:\n u8arr = _a.sent();\n view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return [2 /*return*/, view.getFloat32(0, true)]; // littleEndian = true\n }\n });\n });\n };\n AvroParser.readDouble = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var u8arr, view;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 8, options)];\n case 1:\n u8arr = _a.sent();\n view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return [2 /*return*/, view.getFloat64(0, true)]; // littleEndian = true\n }\n });\n });\n };\n AvroParser.readBytes = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var size;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 1:\n size = _a.sent();\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n return [4 /*yield*/, stream.read(size, { abortSignal: options.abortSignal })];\n case 2: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n AvroParser.readString = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var u8arr, utf8decoder;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readBytes(stream, options)];\n case 1:\n u8arr = _a.sent();\n // polyfill TextDecoder to be backward compatible with older\n // nodejs that doesn't expose TextDecoder as a global variable\n if (typeof TextDecoder === \"undefined\" && typeof require !== \"undefined\") {\n global.TextDecoder = require(\"util\").TextDecoder;\n }\n utf8decoder = new TextDecoder();\n return [2 /*return*/, utf8decoder.decode(u8arr)];\n }\n });\n });\n };\n AvroParser.readMapPair = function (stream, readItemMethod, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var key, value;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readString(stream, options)];\n case 1:\n key = _a.sent();\n return [4 /*yield*/, readItemMethod(stream, options)];\n case 2:\n value = _a.sent();\n return [2 /*return*/, { key: key, value: value }];\n }\n });\n });\n };\n AvroParser.readMap = function (stream, readItemMethod, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var readPairMethod, pairs, dict, _i, pairs_1, pair;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n readPairMethod = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readMapPair(stream, readItemMethod, options)];\n case 1: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n return [4 /*yield*/, AvroParser.readArray(stream, readPairMethod, options)];\n case 1:\n pairs = _a.sent();\n dict = {};\n for (_i = 0, pairs_1 = pairs; _i < pairs_1.length; _i++) {\n pair = pairs_1[_i];\n dict[pair.key] = pair.value;\n }\n return [2 /*return*/, dict];\n }\n });\n });\n };\n AvroParser.readArray = function (stream, readItemMethod, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var items, count, item;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n items = [];\n return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 1:\n count = _a.sent();\n _a.label = 2;\n case 2:\n if (!(count != 0)) return [3 /*break*/, 8];\n if (!(count < 0)) return [3 /*break*/, 4];\n // Ignore block sizes\n return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 3:\n // Ignore block sizes\n _a.sent();\n count = -count;\n _a.label = 4;\n case 4:\n if (!count--) return [3 /*break*/, 6];\n return [4 /*yield*/, readItemMethod(stream, options)];\n case 5:\n item = _a.sent();\n items.push(item);\n return [3 /*break*/, 4];\n case 6: return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 7:\n count = _a.sent();\n return [3 /*break*/, 2];\n case 8: return [2 /*return*/, items];\n }\n });\n });\n };\n return AvroParser;\n}());\nvar AvroComplex;\n(function (AvroComplex) {\n AvroComplex[\"RECORD\"] = \"record\";\n AvroComplex[\"ENUM\"] = \"enum\";\n AvroComplex[\"ARRAY\"] = \"array\";\n AvroComplex[\"MAP\"] = \"map\";\n AvroComplex[\"UNION\"] = \"union\";\n AvroComplex[\"FIXED\"] = \"fixed\";\n})(AvroComplex || (AvroComplex = {}));\nvar AvroType = /** @class */ (function () {\n function AvroType() {\n }\n /**\n * Determines the AvroType from the Avro Schema.\n */\n AvroType.fromSchema = function (schema) {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n }\n else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n }\n else {\n return AvroType.fromObjectSchema(schema);\n }\n };\n AvroType.fromStringSchema = function (schema) {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema);\n default:\n throw new Error(\"Unexpected Avro type \" + schema);\n }\n };\n AvroType.fromArraySchema = function (schema) {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n };\n AvroType.fromObjectSchema = function (schema) {\n var type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n }\n catch (err) { }\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(\"aliases currently is not supported, schema: \" + schema);\n }\n if (!schema.name) {\n throw new Error(\"Required attribute 'name' doesn't exist on schema: \" + schema);\n }\n var fields = {};\n if (!schema.fields) {\n throw new Error(\"Required attribute 'fields' doesn't exist on schema: \" + schema);\n }\n for (var _i = 0, _a = schema.fields; _i < _a.length; _i++) {\n var field = _a[_i];\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(\"aliases currently is not supported, schema: \" + schema);\n }\n if (!schema.symbols) {\n throw new Error(\"Required attribute 'symbols' doesn't exist on schema: \" + schema);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(\"Required attribute 'values' doesn't exist on schema: \" + schema);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(\"Unexpected Avro type \" + type + \" in \" + schema);\n }\n };\n return AvroType;\n}());\nvar AvroPrimitive;\n(function (AvroPrimitive) {\n AvroPrimitive[\"NULL\"] = \"null\";\n AvroPrimitive[\"BOOLEAN\"] = \"boolean\";\n AvroPrimitive[\"INT\"] = \"int\";\n AvroPrimitive[\"LONG\"] = \"long\";\n AvroPrimitive[\"FLOAT\"] = \"float\";\n AvroPrimitive[\"DOUBLE\"] = \"double\";\n AvroPrimitive[\"BYTES\"] = \"bytes\";\n AvroPrimitive[\"STRING\"] = \"string\";\n})(AvroPrimitive || (AvroPrimitive = {}));\nvar AvroPrimitiveType = /** @class */ (function (_super) {\n tslib.__extends(AvroPrimitiveType, _super);\n function AvroPrimitiveType(primitive) {\n var _this = _super.call(this) || this;\n _this._primitive = primitive;\n return _this;\n }\n AvroPrimitiveType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = this._primitive;\n switch (_a) {\n case AvroPrimitive.NULL: return [3 /*break*/, 1];\n case AvroPrimitive.BOOLEAN: return [3 /*break*/, 3];\n case AvroPrimitive.INT: return [3 /*break*/, 5];\n case AvroPrimitive.LONG: return [3 /*break*/, 7];\n case AvroPrimitive.FLOAT: return [3 /*break*/, 9];\n case AvroPrimitive.DOUBLE: return [3 /*break*/, 11];\n case AvroPrimitive.BYTES: return [3 /*break*/, 13];\n case AvroPrimitive.STRING: return [3 /*break*/, 15];\n }\n return [3 /*break*/, 17];\n case 1: return [4 /*yield*/, AvroParser.readNull()];\n case 2: return [2 /*return*/, _b.sent()];\n case 3: return [4 /*yield*/, AvroParser.readBoolean(stream, options)];\n case 4: return [2 /*return*/, _b.sent()];\n case 5: return [4 /*yield*/, AvroParser.readInt(stream, options)];\n case 6: return [2 /*return*/, _b.sent()];\n case 7: return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 8: return [2 /*return*/, _b.sent()];\n case 9: return [4 /*yield*/, AvroParser.readFloat(stream, options)];\n case 10: return [2 /*return*/, _b.sent()];\n case 11: return [4 /*yield*/, AvroParser.readDouble(stream, options)];\n case 12: return [2 /*return*/, _b.sent()];\n case 13: return [4 /*yield*/, AvroParser.readBytes(stream, options)];\n case 14: return [2 /*return*/, _b.sent()];\n case 15: return [4 /*yield*/, AvroParser.readString(stream, options)];\n case 16: return [2 /*return*/, _b.sent()];\n case 17: throw new Error(\"Unknown Avro Primitive\");\n }\n });\n });\n };\n return AvroPrimitiveType;\n}(AvroType));\nvar AvroEnumType = /** @class */ (function (_super) {\n tslib.__extends(AvroEnumType, _super);\n function AvroEnumType(symbols) {\n var _this = _super.call(this) || this;\n _this._symbols = symbols;\n return _this;\n }\n AvroEnumType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var value;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];\n case 1:\n value = _a.sent();\n return [2 /*return*/, this._symbols[value]];\n }\n });\n });\n };\n return AvroEnumType;\n}(AvroType));\nvar AvroUnionType = /** @class */ (function (_super) {\n tslib.__extends(AvroUnionType, _super);\n function AvroUnionType(types) {\n var _this = _super.call(this) || this;\n _this._types = types;\n return _this;\n }\n AvroUnionType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var typeIndex;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];\n case 1:\n typeIndex = _a.sent();\n return [4 /*yield*/, this._types[typeIndex].read(stream, options)];\n case 2: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n return AvroUnionType;\n}(AvroType));\nvar AvroMapType = /** @class */ (function (_super) {\n tslib.__extends(AvroMapType, _super);\n function AvroMapType(itemType) {\n var _this = _super.call(this) || this;\n _this._itemType = itemType;\n return _this;\n }\n AvroMapType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var readItemMethod;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n readItemMethod = function (s, options) { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, this._itemType.read(s, options)];\n case 1: return [2 /*return*/, _a.sent()];\n }\n });\n }); };\n return [4 /*yield*/, AvroParser.readMap(stream, readItemMethod, options)];\n case 1: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n return AvroMapType;\n}(AvroType));\nvar AvroRecordType = /** @class */ (function (_super) {\n tslib.__extends(AvroRecordType, _super);\n function AvroRecordType(fields, name) {\n var _this = _super.call(this) || this;\n _this._fields = fields;\n _this._name = name;\n return _this;\n }\n AvroRecordType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var record, _a, _b, _i, key, _c, _d;\n return tslib.__generator(this, function (_e) {\n switch (_e.label) {\n case 0:\n record = {};\n record[\"$schema\"] = this._name;\n _a = [];\n for (_b in this._fields)\n _a.push(_b);\n _i = 0;\n _e.label = 1;\n case 1:\n if (!(_i < _a.length)) return [3 /*break*/, 4];\n key = _a[_i];\n if (!this._fields.hasOwnProperty(key)) return [3 /*break*/, 3];\n _c = record;\n _d = key;\n return [4 /*yield*/, this._fields[key].read(stream, options)];\n case 2:\n _c[_d] = _e.sent();\n _e.label = 3;\n case 3:\n _i++;\n return [3 /*break*/, 1];\n case 4: return [2 /*return*/, record];\n }\n });\n });\n };\n return AvroRecordType;\n}(AvroType));\n\n// Copyright (c) Microsoft Corporation.\nvar AvroReader = /** @class */ (function () {\n function AvroReader(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n Object.defineProperty(AvroReader.prototype, \"blockOffset\", {\n get: function () {\n return this._blockOffset;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(AvroReader.prototype, \"objectIndex\", {\n get: function () {\n return this._objectIndex;\n },\n enumerable: false,\n configurable: true\n });\n AvroReader.prototype.initialize = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var header, _a, codec, _b, schema, _c, i;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal\n })];\n case 1:\n header = _d.sent();\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n _a = this;\n return [4 /*yield*/, AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal\n })];\n case 2:\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n _a._metadata = _d.sent();\n codec = this._metadata[AVRO_CODEC_KEY];\n if (!(codec == undefined || codec == \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n // The 16-byte, randomly-generated sync marker for this file.\n _b = this;\n return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal\n })];\n case 3:\n // The 16-byte, randomly-generated sync marker for this file.\n _b._syncMarker = _d.sent();\n schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n if (this._blockOffset == 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n _c = this;\n return [4 /*yield*/, AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal\n })];\n case 4:\n _c._itemsRemainingInBlock = _d.sent();\n // skip block length\n return [4 /*yield*/, AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })];\n case 5:\n // skip block length\n _d.sent();\n this._initialized = true;\n if (!(this._objectIndex && this._objectIndex > 0)) return [3 /*break*/, 9];\n i = 0;\n _d.label = 6;\n case 6:\n if (!(i < this._objectIndex)) return [3 /*break*/, 9];\n return [4 /*yield*/, this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })];\n case 7:\n _d.sent();\n this._itemsRemainingInBlock--;\n _d.label = 8;\n case 8:\n i++;\n return [3 /*break*/, 6];\n case 9: return [2 /*return*/];\n }\n });\n });\n };\n AvroReader.prototype.hasNext = function () {\n return !this._initialized || this._itemsRemainingInBlock > 0;\n };\n AvroReader.prototype.parseObjects = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function parseObjects_1() {\n var result, marker, _a, err_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!!this._initialized) return [3 /*break*/, 2];\n return [4 /*yield*/, tslib.__await(this.initialize(options))];\n case 1:\n _b.sent();\n _b.label = 2;\n case 2:\n if (!this.hasNext()) return [3 /*break*/, 13];\n return [4 /*yield*/, tslib.__await(this._itemType.read(this._dataStream, {\n abortSignal: options.abortSignal\n }))];\n case 3:\n result = _b.sent();\n this._itemsRemainingInBlock--;\n this._objectIndex++;\n if (!(this._itemsRemainingInBlock == 0)) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal\n }))];\n case 4:\n marker = _b.sent();\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n if (!arraysEqual(this._syncMarker, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n _b.label = 5;\n case 5:\n _b.trys.push([5, 7, , 8]);\n _a = this;\n return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal\n }))];\n case 6:\n _a._itemsRemainingInBlock = _b.sent();\n return [3 /*break*/, 8];\n case 7:\n err_1 = _b.sent();\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n return [3 /*break*/, 8];\n case 8:\n if (!(this._itemsRemainingInBlock > 0)) return [3 /*break*/, 10];\n // Ignore block size\n return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }))];\n case 9:\n // Ignore block size\n _b.sent();\n _b.label = 10;\n case 10: return [4 /*yield*/, tslib.__await(result)];\n case 11: return [4 /*yield*/, _b.sent()];\n case 12:\n _b.sent();\n return [3 /*break*/, 2];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n return AvroReader;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar AvroReadable = /** @class */ (function () {\n function AvroReadable() {\n }\n return AvroReadable;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar ABORT_ERROR = new abortController.AbortError(\"Reading from the avro stream was aborted.\");\nvar AvroReadableFromStream = /** @class */ (function (_super) {\n tslib.__extends(AvroReadableFromStream, _super);\n function AvroReadableFromStream(readable) {\n var _this = _super.call(this) || this;\n _this._readable = readable;\n _this._position = 0;\n return _this;\n }\n AvroReadableFromStream.prototype.toUint8Array = function (data) {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n };\n Object.defineProperty(AvroReadableFromStream.prototype, \"position\", {\n get: function () {\n return this._position;\n },\n enumerable: false,\n configurable: true\n });\n AvroReadableFromStream.prototype.read = function (size, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var chunk;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) {\n throw ABORT_ERROR;\n }\n if (size < 0) {\n throw new Error(\"size parameter should be positive: \" + size);\n }\n if (size === 0) {\n return [2 /*return*/, new Uint8Array()];\n }\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return [2 /*return*/, this.toUint8Array(chunk)];\n }\n else {\n // register callback to wait for enough data to read\n return [2 /*return*/, new Promise(function (resolve, reject) {\n var cleanUp = function () {\n _this._readable.removeListener(\"readable\", readableCallback);\n _this._readable.removeListener(\"error\", rejectCallback);\n _this._readable.removeListener(\"end\", rejectCallback);\n _this._readable.removeListener(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal.removeEventListener(\"abort\", abortHandler);\n }\n };\n var readableCallback = function () {\n var chunk = _this._readable.read(size);\n if (chunk) {\n _this._position += chunk.length;\n cleanUp();\n // chunk.length maybe less than desired size if the stream ends.\n resolve(_this.toUint8Array(chunk));\n }\n };\n var rejectCallback = function () {\n cleanUp();\n reject();\n };\n var abortHandler = function () {\n cleanUp();\n reject(ABORT_ERROR);\n };\n _this._readable.on(\"readable\", readableCallback);\n _this._readable.once(\"error\", rejectCallback);\n _this._readable.once(\"end\", rejectCallback);\n _this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", abortHandler);\n }\n })];\n }\n });\n });\n };\n return AvroReadableFromStream;\n}(AvroReadable));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nvar BlobQuickQueryStream = /** @class */ (function (_super) {\n tslib.__extends(BlobQuickQueryStream, _super);\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n function BlobQuickQueryStream(source, options) {\n if (options === void 0) { options = {}; }\n var _this = _super.call(this) || this;\n _this.avroPaused = true;\n _this.source = source;\n _this.onProgress = options.onProgress;\n _this.onError = options.onError;\n _this.avroReader = new AvroReader(new AvroReadableFromStream(_this.source));\n _this.avroIter = _this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n return _this;\n }\n BlobQuickQueryStream.prototype._read = function () {\n var _this = this;\n if (this.avroPaused) {\n this.readInternal().catch(function (err) {\n _this.emit(\"error\", err);\n });\n }\n };\n BlobQuickQueryStream.prototype.readInternal = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var avroNext, obj, schema, data, bytesScanned, totalBytes, fatal, name_1, description, position;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n this.avroPaused = false;\n _a.label = 1;\n case 1: return [4 /*yield*/, this.avroIter.next()];\n case 2:\n avroNext = _a.sent();\n if (avroNext.done) {\n return [3 /*break*/, 4];\n }\n obj = avroNext.value;\n schema = obj.$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n data = obj.data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n bytesScanned = obj.bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n totalBytes = obj.totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n fatal = obj.fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n name_1 = obj.name;\n if (typeof name_1 !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n description = obj.description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n position = obj.position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position: position,\n name: name_1,\n isFatal: fatal,\n description: description\n });\n }\n break;\n default:\n throw Error(\"Unknown schema \" + schema + \" in avro progress record.\");\n }\n _a.label = 3;\n case 3:\n if (!avroNext.done && !this.avroPaused) return [3 /*break*/, 1];\n _a.label = 4;\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n return BlobQuickQueryStream;\n}(stream.Readable));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nvar BlobQueryResponse = /** @class */ (function () {\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n function BlobQueryResponse(originalResponse, options) {\n if (options === void 0) { options = {}; }\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);\n }\n Object.defineProperty(BlobQueryResponse.prototype, \"acceptRanges\", {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.acceptRanges;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"cacheControl\", {\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.cacheControl;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentDisposition\", {\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentDisposition;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentEncoding\", {\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentEncoding;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentLanguage\", {\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentLanguage;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobSequenceNumber\", {\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobSequenceNumber;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobType\", {\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentLength\", {\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentLength;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentMD5\", {\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentRange\", {\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentRange;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentType\", {\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.contentType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyCompletedOn\", {\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n get: function () {\n return undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyId\", {\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyProgress\", {\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyProgress;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copySource\", {\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copySource;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyStatus\", {\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyStatusDescription\", {\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.copyStatusDescription;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"leaseDuration\", {\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.leaseDuration;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"leaseState\", {\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.leaseState;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"leaseStatus\", {\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.leaseStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"date\", {\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.date;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobCommittedBlockCount\", {\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobCommittedBlockCount;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"etag\", {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.etag;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"errorCode\", {\n /**\n * The error code.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.errorCode;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"isServerEncrypted\", {\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.isServerEncrypted;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobContentMD5\", {\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.blobContentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"lastModified\", {\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.lastModified;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"metadata\", {\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.metadata;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"requestId\", {\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.requestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"clientRequestId\", {\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.clientRequestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"version\", {\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.version;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"encryptionKeySha256\", {\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n get: function () {\n return this.originalResponse.encryptionKeySha256;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentCrc64\", {\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n get: function () {\n return this.originalResponse.contentCrc64;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobBody\", {\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n get: function () {\n return undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"readableStreamBody\", {\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n get: function () {\n return coreHttp.isNode ? this.blobDownloadStream : undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"_response\", {\n /**\n * The HTTP response.\n */\n get: function () {\n return this.originalResponse._response;\n },\n enumerable: false,\n configurable: true\n });\n return BlobQueryResponse;\n}());\n\n// Copyright (c) Microsoft Corporation.\n(function (BlockBlobTier) {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n BlockBlobTier[\"Hot\"] = \"Hot\";\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n BlockBlobTier[\"Cool\"] = \"Cool\";\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n BlockBlobTier[\"Archive\"] = \"Archive\";\n})(exports.BlockBlobTier || (exports.BlockBlobTier = {}));\n(function (PremiumPageBlobTier) {\n /**\n * P4 Tier.\n */\n PremiumPageBlobTier[\"P4\"] = \"P4\";\n /**\n * P6 Tier.\n */\n PremiumPageBlobTier[\"P6\"] = \"P6\";\n /**\n * P10 Tier.\n */\n PremiumPageBlobTier[\"P10\"] = \"P10\";\n /**\n * P15 Tier.\n */\n PremiumPageBlobTier[\"P15\"] = \"P15\";\n /**\n * P20 Tier.\n */\n PremiumPageBlobTier[\"P20\"] = \"P20\";\n /**\n * P30 Tier.\n */\n PremiumPageBlobTier[\"P30\"] = \"P30\";\n /**\n * P40 Tier.\n */\n PremiumPageBlobTier[\"P40\"] = \"P40\";\n /**\n * P50 Tier.\n */\n PremiumPageBlobTier[\"P50\"] = \"P50\";\n /**\n * P60 Tier.\n */\n PremiumPageBlobTier[\"P60\"] = \"P60\";\n /**\n * P70 Tier.\n */\n PremiumPageBlobTier[\"P70\"] = \"P70\";\n /**\n * P80 Tier.\n */\n PremiumPageBlobTier[\"P80\"] = \"P80\";\n})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {}));\nfunction toAccessTier(tier) {\n if (tier == undefined) {\n return undefined;\n }\n return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\nfunction ensureCpkIfSpecified(cpk, isHttps) {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nfunction rangeResponseFromModel(response) {\n var pageRange = (response._response.parsedBody.pageRange || []).map(function (x) { return ({\n offset: x.start,\n count: x.end - x.start\n }); });\n var clearRange = (response._response.parsedBody.clearRange || []).map(function (x) { return ({\n offset: x.start,\n count: x.end - x.start\n }); });\n return tslib.__assign(tslib.__assign({}, response), { pageRange: pageRange,\n clearRange: clearRange, _response: tslib.__assign(tslib.__assign({}, response._response), { parsedBody: {\n pageRange: pageRange,\n clearRange: clearRange\n } }) });\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nvar BlobBeginCopyFromUrlPoller = /** @class */ (function (_super) {\n tslib.__extends(BlobBeginCopyFromUrlPoller, _super);\n function BlobBeginCopyFromUrlPoller(options) {\n var _this = this;\n var blobClient = options.blobClient, copySource = options.copySource, _a = options.intervalInMs, intervalInMs = _a === void 0 ? 15000 : _a, onProgress = options.onProgress, resumeFrom = options.resumeFrom, startCopyFromURLOptions = options.startCopyFromURLOptions;\n var state;\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n var operation = makeBlobBeginCopyFromURLPollOperation(tslib.__assign(tslib.__assign({}, state), { blobClient: blobClient,\n copySource: copySource,\n startCopyFromURLOptions: startCopyFromURLOptions }));\n _this = _super.call(this, operation) || this;\n if (typeof onProgress === \"function\") {\n _this.onProgress(onProgress);\n }\n _this.intervalInMs = intervalInMs;\n return _this;\n }\n BlobBeginCopyFromUrlPoller.prototype.delay = function () {\n return coreHttp.delay(this.intervalInMs);\n };\n return BlobBeginCopyFromUrlPoller;\n}(coreLro.Poller));\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nvar cancel = function cancel(options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var state, copyId;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n state = this.state;\n copyId = state.copyId;\n if (state.isCompleted) {\n return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n if (!copyId) {\n state.isCancelled = true;\n return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n return [4 /*yield*/, state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal\n })];\n case 1:\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n _a.sent();\n state.isCancelled = true;\n return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n });\n });\n};\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nvar update = function update(options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var state, blobClient, copySource, startCopyFromURLOptions, result, result, copyStatus, copyProgress, prevCopyProgress, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n state = this.state;\n blobClient = state.blobClient, copySource = state.copySource, startCopyFromURLOptions = state.startCopyFromURLOptions;\n if (!!state.isStarted) return [3 /*break*/, 2];\n state.isStarted = true;\n return [4 /*yield*/, blobClient.startCopyFromURL(copySource, startCopyFromURLOptions)];\n case 1:\n result = _a.sent();\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n return [3 /*break*/, 6];\n case 2:\n if (!!state.isCompleted) return [3 /*break*/, 6];\n _a.label = 3;\n case 3:\n _a.trys.push([3, 5, , 6]);\n return [4 /*yield*/, state.blobClient.getProperties({ abortSignal: options.abortSignal })];\n case 4:\n result = _a.sent();\n copyStatus = result.copyStatus, copyProgress = result.copyProgress;\n prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\") {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n }\n else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n else if (copyStatus === \"failed\") {\n state.error = new Error(\"Blob copy failed with reason: \\\"\" + (result.copyStatusDescription || \"unknown\") + \"\\\"\");\n state.isCompleted = true;\n }\n return [3 /*break*/, 6];\n case 5:\n err_1 = _a.sent();\n state.error = err_1;\n state.isCompleted = true;\n return [3 /*break*/, 6];\n case 6: return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n });\n });\n};\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nvar toString = function toString() {\n return JSON.stringify({ state: this.state }, function (key, value) {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(state) {\n return {\n state: tslib.__assign({}, state),\n cancel: cancel,\n toString: toString,\n update: update\n };\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nfunction rangeToString(iRange) {\n if (iRange.offset < 0) {\n throw new RangeError(\"Range.offset cannot be smaller than 0.\");\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\"Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.\");\n }\n return iRange.count\n ? \"bytes=\" + iRange.offset + \"-\" + (iRange.offset + iRange.count - 1)\n : \"bytes=\" + iRange.offset + \"-\";\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * States for Batch.\n */\nvar BatchStates;\n(function (BatchStates) {\n BatchStates[BatchStates[\"Good\"] = 0] = \"Good\";\n BatchStates[BatchStates[\"Error\"] = 1] = \"Error\";\n})(BatchStates || (BatchStates = {}));\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nvar Batch = /** @class */ (function () {\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n function Batch(concurrency) {\n if (concurrency === void 0) { concurrency = 5; }\n /**\n * Number of active operations under execution.\n */\n this.actives = 0;\n /**\n * Number of completed operations under execution.\n */\n this.completed = 0;\n /**\n * Offset of next operation to be executed.\n */\n this.offset = 0;\n /**\n * Operation array to be executed.\n */\n this.operations = [];\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n this.state = BatchStates.Good;\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new events.EventEmitter();\n }\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n Batch.prototype.addOperation = function (operation) {\n var _this = this;\n this.operations.push(function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n var error_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n _a.trys.push([0, 2, , 3]);\n this.actives++;\n return [4 /*yield*/, operation()];\n case 1:\n _a.sent();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n return [3 /*break*/, 3];\n case 2:\n error_1 = _a.sent();\n this.emitter.emit(\"error\", error_1);\n return [3 /*break*/, 3];\n case 3: return [2 /*return*/];\n }\n });\n }); });\n };\n /**\n * Start execute operations in the queue.\n *\n */\n Batch.prototype.do = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n if (this.operations.length === 0) {\n return [2 /*return*/, Promise.resolve()];\n }\n this.parallelExecute();\n return [2 /*return*/, new Promise(function (resolve, reject) {\n _this.emitter.on(\"finish\", resolve);\n _this.emitter.on(\"error\", function (error) {\n _this.state = BatchStates.Error;\n reject(error);\n });\n })];\n });\n });\n };\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n Batch.prototype.nextOperation = function () {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n };\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n Batch.prototype.parallelExecute = function () {\n if (this.state === BatchStates.Error) {\n return;\n }\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n while (this.actives < this.concurrency) {\n var operation = this.nextOperation();\n if (operation) {\n operation();\n }\n else {\n return;\n }\n }\n };\n return Batch;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nvar BuffersStream = /** @class */ (function (_super) {\n tslib.__extends(BuffersStream, _super);\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n function BuffersStream(buffers, byteLength, options) {\n var _this = _super.call(this, options) || this;\n _this.buffers = buffers;\n _this.byteLength = byteLength;\n _this.byteOffsetInCurrentBuffer = 0;\n _this.bufferIndex = 0;\n _this.pushedBytesLength = 0;\n // check byteLength is no larger than buffers[] total length\n var buffersLength = 0;\n for (var _i = 0, _a = _this.buffers; _i < _a.length; _i++) {\n var buf = _a[_i];\n buffersLength += buf.byteLength;\n }\n if (buffersLength < _this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n return _this;\n }\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n BuffersStream.prototype._read = function (size) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n if (!size) {\n size = this.readableHighWaterMark;\n }\n var outBuffers = [];\n var i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n var remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n var remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n var remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n var end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n }\n else {\n // chunkSize = remaining\n var end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n }\n else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n }\n else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n };\n return BuffersStream;\n}(stream.Readable));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nvar maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nvar PooledBuffer = /** @class */ (function () {\n function PooledBuffer(capacity, buffers, totalLength) {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n this.buffers = [];\n this.capacity = capacity;\n this._size = 0;\n // allocate\n var bufferNum = Math.ceil(capacity / maxBufferLength);\n for (var i = 0; i < bufferNum; i++) {\n var len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n if (buffers) {\n this.fill(buffers, totalLength);\n }\n }\n Object.defineProperty(PooledBuffer.prototype, \"size\", {\n /**\n * The size of the data contained in the pooled buffers.\n */\n get: function () {\n return this._size;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n PooledBuffer.prototype.fill = function (buffers, totalLength) {\n this._size = Math.min(this.capacity, totalLength);\n var i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n var source = buffers[i];\n var target = this.buffers[j];\n var copiedNum = source.copy(target, targetOffset, sourceOffset);\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n };\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n PooledBuffer.prototype.getReadableStream = function () {\n return new BuffersStream(this.buffers, this.size);\n };\n return PooledBuffer;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nvar BufferScheduler = /** @class */ (function () {\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n function BufferScheduler(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {\n /**\n * An internal event emitter.\n */\n this.emitter = new events.EventEmitter();\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n this.offset = 0;\n /**\n * An internal marker to track whether stream is end.\n */\n this.isStreamEnd = false;\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n this.isError = false;\n /**\n * How many handlers are executing.\n */\n this.executingOutgoingHandlers = 0;\n /**\n * How many buffers have been allocated.\n */\n this.numBuffers = 0;\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n this.unresolvedDataArray = [];\n /**\n * How much data consisted in unresolvedDataArray.\n */\n this.unresolvedLength = 0;\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n this.incoming = [];\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n this.outgoing = [];\n if (bufferSize <= 0) {\n throw new RangeError(\"bufferSize must be larger than 0, current is \" + bufferSize);\n }\n if (maxBuffers <= 0) {\n throw new RangeError(\"maxBuffers must be larger than 0, current is \" + maxBuffers);\n }\n if (concurrency <= 0) {\n throw new RangeError(\"concurrency must be larger than 0, current is \" + concurrency);\n }\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n BufferScheduler.prototype.do = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve, reject) {\n _this.readable.on(\"data\", function (data) {\n data = typeof data === \"string\" ? Buffer.from(data, _this.encoding) : data;\n _this.appendUnresolvedData(data);\n if (!_this.resolveData()) {\n _this.readable.pause();\n }\n });\n _this.readable.on(\"error\", function (err) {\n _this.emitter.emit(\"error\", err);\n });\n _this.readable.on(\"end\", function () {\n _this.isStreamEnd = true;\n _this.emitter.emit(\"checkEnd\");\n });\n _this.emitter.on(\"error\", function (err) {\n _this.isError = true;\n _this.readable.pause();\n reject(err);\n });\n _this.emitter.on(\"checkEnd\", function () {\n if (_this.outgoing.length > 0) {\n _this.triggerOutgoingHandlers();\n return;\n }\n if (_this.isStreamEnd && _this.executingOutgoingHandlers === 0) {\n if (_this.unresolvedLength > 0 && _this.unresolvedLength < _this.bufferSize) {\n var buffer_1 = _this.shiftBufferFromUnresolvedDataArray();\n _this.outgoingHandler(function () { return buffer_1.getReadableStream(); }, buffer_1.size, _this.offset)\n .then(resolve)\n .catch(reject);\n }\n else if (_this.unresolvedLength >= _this.bufferSize) {\n return;\n }\n else {\n resolve();\n }\n }\n });\n })];\n });\n });\n };\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n BufferScheduler.prototype.appendUnresolvedData = function (data) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n };\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n BufferScheduler.prototype.shiftBufferFromUnresolvedDataArray = function (buffer) {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n }\n else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n this.unresolvedLength -= buffer.size;\n return buffer;\n };\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n BufferScheduler.prototype.resolveData = function () {\n while (this.unresolvedLength >= this.bufferSize) {\n var buffer = void 0;\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift();\n this.shiftBufferFromUnresolvedDataArray(buffer);\n }\n else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n }\n else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n };\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n BufferScheduler.prototype.triggerOutgoingHandlers = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buffer;\n return tslib.__generator(this, function (_a) {\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return [2 /*return*/];\n }\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n return [2 /*return*/];\n });\n });\n };\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n BufferScheduler.prototype.triggerOutgoingHandler = function (buffer) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var bufferLength, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n bufferLength = buffer.size;\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n return [4 /*yield*/, this.outgoingHandler(function () { return buffer.getReadableStream(); }, bufferLength, this.offset - bufferLength)];\n case 2:\n _a.sent();\n return [3 /*break*/, 4];\n case 3:\n err_1 = _a.sent();\n this.emitter.emit(\"error\", err_1);\n return [2 /*return*/];\n case 4:\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n BufferScheduler.prototype.reuseBuffer = function (buffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n };\n return BufferScheduler;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nfunction streamToBuffer(stream, buffer, offset, end, encoding) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var pos, count;\n return tslib.__generator(this, function (_a) {\n pos = 0;\n count = end - offset;\n return [2 /*return*/, new Promise(function (resolve, reject) {\n stream.on(\"readable\", function () {\n if (pos >= count) {\n resolve();\n return;\n }\n var chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n // How much data needed in this chunk\n var chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n stream.on(\"end\", function () {\n if (pos < count) {\n reject(new Error(\"Stream drains before getting enough data needed. Data read: \" + pos + \", data need: \" + count));\n }\n resolve();\n });\n stream.on(\"error\", reject);\n })];\n });\n });\n}\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nfunction streamToBuffer2(stream, buffer, encoding) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var pos, bufferSize;\n return tslib.__generator(this, function (_a) {\n pos = 0;\n bufferSize = buffer.length;\n return [2 /*return*/, new Promise(function (resolve, reject) {\n stream.on(\"readable\", function () {\n var chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n if (pos + chunk.length > bufferSize) {\n reject(new Error(\"Stream exceeds buffer size. Buffer size: \" + bufferSize));\n return;\n }\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n stream.on(\"end\", function () {\n resolve(pos);\n });\n stream.on(\"error\", reject);\n })];\n });\n });\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nfunction readStreamToLocalFile(rs, file) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve, reject) {\n var ws = fs.createWriteStream(file);\n rs.on(\"error\", function (err) {\n reject(err);\n });\n ws.on(\"error\", function (err) {\n reject(err);\n });\n ws.on(\"close\", resolve);\n rs.pipe(ws);\n })];\n });\n });\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nvar fsStat = util.promisify(fs.stat);\nvar fsCreateReadStream = fs.createReadStream;\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nvar BlobClient = /** @class */ (function (_super) {\n tslib.__extends(BlobClient, _super);\n function BlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _a;\n var _this = this;\n options = options || {};\n var pipeline;\n var url;\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n (_a = _this.getBlobAndContainerNamesFromUrl(), _this._name = _a.blobName, _this._containerName = _a.containerName);\n _this.blobContext = new Blob$1(_this.storageClientContext);\n _this._snapshot = getURLParameter(_this.url, URLConstants.Parameters.SNAPSHOT);\n _this._versionId = getURLParameter(_this.url, URLConstants.Parameters.VERSIONID);\n return _this;\n }\n Object.defineProperty(BlobClient.prototype, \"name\", {\n /**\n * The name of the blob.\n */\n get: function () {\n return this._name;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobClient.prototype, \"containerName\", {\n /**\n * The name of the storage container the blob is associated with.\n */\n get: function () {\n return this._containerName;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n BlobClient.prototype.withSnapshot = function (snapshot) {\n return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n BlobClient.prototype.withVersion = function (versionId) {\n return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline);\n };\n /**\n * Creates a AppendBlobClient object.\n *\n */\n BlobClient.prototype.getAppendBlobClient = function () {\n return new AppendBlobClient(this.url, this.pipeline);\n };\n /**\n * Creates a BlockBlobClient object.\n *\n */\n BlobClient.prototype.getBlockBlobClient = function () {\n return new BlockBlobClient(this.url, this.pipeline);\n };\n /**\n * Creates a PageBlobClient object.\n *\n */\n BlobClient.prototype.getPageBlobClient = function () {\n return new PageBlobClient(this.url, this.pipeline);\n };\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n BlobClient.prototype.download = function (offset, count, options) {\n var _a;\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, res_1, wrappedRes, e_1;\n var _this = this;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n _b = createSpan(\"BlobClient-download\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, range: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n res_1 = _c.sent();\n wrappedRes = tslib.__assign(tslib.__assign({}, res_1), { _response: res_1._response, objectReplicationDestinationPolicyId: res_1.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res_1.objectReplicationRules) });\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n if (res_1.contentLength === undefined) {\n throw new RangeError(\"File download response doesn't contain valid content length header\");\n }\n if (!res_1.etag) {\n throw new RangeError(\"File download response doesn't contain valid etag header\");\n }\n return [2 /*return*/, new BlobDownloadResponse(wrappedRes, function (start) { return tslib.__awaiter(_this, void 0, void 0, function () {\n var updatedOptions;\n var _a;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n updatedOptions = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions.ifMatch || res_1.etag,\n ifModifiedSince: options.conditions.ifModifiedSince,\n ifNoneMatch: options.conditions.ifNoneMatch,\n ifUnmodifiedSince: options.conditions.ifUnmodifiedSince,\n ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions\n },\n range: rangeToString({\n count: offset + res_1.contentLength - start,\n offset: start\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey\n };\n return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal }, updatedOptions))];\n case 1: \n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n return [2 /*return*/, (_b.sent()).readableStreamBody];\n }\n });\n }); }, offset, res_1.contentLength, {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress\n })];\n case 3:\n e_1 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n BlobClient.prototype.exists = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_2;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-exists\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions\n })];\n case 2:\n _b.sent();\n return [2 /*return*/, true];\n case 3:\n e_2 = _b.sent();\n if (e_2.statusCode === 404) {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when checking blob existence\"\n });\n return [2 /*return*/, false];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n BlobClient.prototype.getProperties = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, res, e_3;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-getProperties\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.getProperties(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n res = _c.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) })];\n case 3:\n e_3 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_3.message\n });\n throw e_3;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n BlobClient.prototype.delete = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_4;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-delete\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.deleteMethod(tslib.__assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_4 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_4.message\n });\n throw e_4;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n BlobClient.prototype.deleteIfExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, updatedOptions, res, e_5;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"BlobClient-deleteIfExists\", options), span = _c.span, updatedOptions = _c.updatedOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.delete(updatedOptions)];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_5 = _d.sent();\n if (((_a = e_5.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"BlobNotFound\") {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_5.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_5.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_5.message\n });\n throw e_5;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n BlobClient.prototype.undelete = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_6;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-undelete\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.undelete(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_6 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_6.message\n });\n throw e_6;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n BlobClient.prototype.setHTTPHeaders = function (blobHTTPHeaders, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_7;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setHTTPHeaders\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.setHTTPHeaders(tslib.__assign({ abortSignal: options.abortSignal, blobHTTPHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_7 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_7.message\n });\n throw e_7;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n BlobClient.prototype.setMetadata = function (metadata, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_8;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setMetadata\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.setMetadata(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_8 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_8.message\n });\n throw e_8;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n BlobClient.prototype.setTags = function (tags, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_9;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setTags\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.setTags(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) }))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_9 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_9.message\n });\n throw e_9;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n BlobClient.prototype.getTags = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, response, wrappedResponse, e_10;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-getTags\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.getTags(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _c.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_10 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_10.message\n });\n throw e_10;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n BlobClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {\n return new BlobLeaseClient(this, proposeLeaseId);\n };\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n BlobClient.prototype.createSnapshot = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_11;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-createSnapshot\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.createSnapshot(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_11 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_11.message\n });\n throw e_11;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n BlobClient.prototype.beginCopyFromURL = function (copySource, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var client, poller;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n client = {\n abortCopyFromURL: function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n return _this.abortCopyFromURL.apply(_this, args);\n },\n getProperties: function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n return _this.getProperties.apply(_this, args);\n },\n startCopyFromURL: function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n return _this.startCopyFromURL.apply(_this, args);\n }\n };\n poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource: copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options\n });\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n return [4 /*yield*/, poller.poll()];\n case 1:\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n _a.sent();\n return [2 /*return*/, poller];\n }\n });\n });\n };\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n BlobClient.prototype.abortCopyFromURL = function (copyId, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_12;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-abortCopyFromURL\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.abortCopyFromURL(copyId, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_12 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_12.message\n });\n throw e_12;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n BlobClient.prototype.syncCopyFromURL = function (copySource, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_13;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-syncCopyFromURL\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.copyFromURL(copySource, tslib.__assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince\n }, sourceContentMD5: options.sourceContentMD5, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_13 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_13.message\n });\n throw e_13;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n BlobClient.prototype.setAccessTier = function (tier, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_14;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setAccessTier\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.setTier(toAccessTier(tier), tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_14 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_14.message\n });\n throw e_14;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n BlobClient.prototype.downloadToBuffer = function (param1, param2, param3, param4) {\n if (param4 === void 0) { param4 = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buffer, offset, count, options, _a, span, updatedOptions, response, transferProgress_1, batch, _loop_1, off, e_15;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n offset = 0;\n count = 0;\n options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n }\n else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = param3 || {};\n }\n _a = createSpan(\"BlobClient-downloadToBuffer\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 5, 6, 7]);\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n if (!!count) return [3 /*break*/, 3];\n return [4 /*yield*/, this.getProperties(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))];\n case 2:\n response = _b.sent();\n count = response.contentLength - offset;\n if (count < 0) {\n throw new RangeError(\"offset \" + offset + \" shouldn't be larger than blob size \" + response.contentLength);\n }\n _b.label = 3;\n case 3:\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n }\n catch (error) {\n throw new Error(\"Unable to allocate the buffer of size: \" + count + \"(in bytes). Please try passing your own buffer to the \\\"downloadToBuffer\\\" method or try using other methods like \\\"download\\\" or \\\"downloadToFile\\\".\\t \" + error.message);\n }\n }\n if (buffer.length < count) {\n throw new RangeError(\"The buffer's size should be equal to or larger than the request count of bytes: \" + count);\n }\n transferProgress_1 = 0;\n batch = new Batch(options.concurrency);\n _loop_1 = function (off) {\n batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n var chunkEnd, response, stream;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n chunkEnd = offset + count;\n if (off + options.blockSize < chunkEnd) {\n chunkEnd = off + options.blockSize;\n }\n return [4 /*yield*/, this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions))\n })];\n case 1:\n response = _a.sent();\n stream = response.readableStreamBody;\n return [4 /*yield*/, streamToBuffer(stream, buffer, off - offset, chunkEnd - offset)];\n case 2:\n _a.sent();\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress_1 += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress_1 });\n }\n return [2 /*return*/];\n }\n });\n }); });\n };\n for (off = offset; off < offset + count; off = off + options.blockSize) {\n _loop_1(off);\n }\n return [4 /*yield*/, batch.do()];\n case 4:\n _b.sent();\n return [2 /*return*/, buffer];\n case 5:\n e_15 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_15.message\n });\n throw e_15;\n case 6:\n span.end();\n return [7 /*endfinally*/];\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n BlobClient.prototype.downloadToFile = function (filePath, offset, count, options) {\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, response, e_16;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-downloadToFile\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 5, 6, 7]);\n return [4 /*yield*/, this.download(offset, count, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))];\n case 2:\n response = _b.sent();\n if (!response.readableStreamBody) return [3 /*break*/, 4];\n return [4 /*yield*/, readStreamToLocalFile(response.readableStreamBody, filePath)];\n case 3:\n _b.sent();\n _b.label = 4;\n case 4:\n // The stream is no longer accessible so setting it to undefined.\n response.blobDownloadStream = undefined;\n return [2 /*return*/, response];\n case 5:\n e_16 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_16.message\n });\n throw e_16;\n case 6:\n span.end();\n return [7 /*endfinally*/];\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n BlobClient.prototype.getBlobAndContainerNamesFromUrl = function () {\n var containerName;\n var blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n var parsedUrl = coreHttp.URLBuilder.parse(this.url);\n if (parsedUrl.getHost().split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n var pathComponents = parsedUrl.getPath().match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents[1];\n blobName = pathComponents[3];\n }\n else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n var pathComponents = parsedUrl.getPath().match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents[2];\n blobName = pathComponents[4];\n }\n else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n var pathComponents = parsedUrl.getPath().match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents[1];\n blobName = pathComponents[3];\n }\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n return { blobName: blobName, containerName: containerName };\n }\n catch (error) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n };\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n BlobClient.prototype.startCopyFromURL = function (copySource, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_17;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-startCopyFromURL\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.startCopyFromURL(copySource, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions\n }, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_17 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_17.message\n });\n throw e_17;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n BlobClient.prototype.generateSasUrl = function (options) {\n var _this = this;\n return new Promise(function (resolve) {\n if (!(_this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\"Can only generate the SAS when the client is initialized with a shared key credential\");\n }\n var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName, blobName: _this._name, snapshotTime: _this._snapshot, versionId: _this._versionId }, options), _this.credential).toString();\n resolve(appendToURLQuery(_this.url, sas));\n });\n };\n return BlobClient;\n}(StorageClient));\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nvar AppendBlobClient = /** @class */ (function (_super) {\n tslib.__extends(AppendBlobClient, _super);\n function AppendBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _this = this;\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.appendBlobContext = new AppendBlob(_this.storageClientContext);\n return _this;\n }\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n AppendBlobClient.prototype.withSnapshot = function (snapshot) {\n return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n AppendBlobClient.prototype.create = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_18;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-create\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.appendBlobContext.create(0, tslib.__assign({ abortSignal: options.abortSignal, blobHTTPHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_18 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_18.message\n });\n throw e_18;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n AppendBlobClient.prototype.createIfNotExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, updatedOptions, conditions, res, e_19;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"AppendBlobClient-createIfNotExists\", options), span = _c.span, updatedOptions = _c.updatedOptions;\n conditions = { ifNoneMatch: ETagAny };\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, updatedOptions), { conditions: conditions }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_19 = _d.sent();\n if (((_a = e_19.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"BlobAlreadyExists\") {\n span.setStatus({\n code: api.CanonicalCode.ALREADY_EXISTS,\n message: \"Expected exception when creating a blob only if it does not already exist.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_19.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_19.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_19.message\n });\n throw e_19;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n AppendBlobClient.prototype.seal = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_20;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-seal\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.appendBlobContext.seal(tslib.__assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_20 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_20.message\n });\n throw e_20;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n AppendBlobClient.prototype.appendBlock = function (body, contentLength, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_21;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-appendBlock\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.appendBlobContext.appendBlock(body, contentLength, tslib.__assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), onUploadProgress: options.onProgress, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_21 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_21.message\n });\n throw e_21;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n AppendBlobClient.prototype.appendBlockFromURL = function (sourceURL, sourceOffset, count, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_22;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-appendBlockFromURL\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, tslib.__assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count: count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince\n }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_22 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_22.message\n });\n throw e_22;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return AppendBlobClient;\n}(BlobClient));\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nvar BlockBlobClient = /** @class */ (function (_super) {\n tslib.__extends(BlockBlobClient, _super);\n function BlockBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _this = this;\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.blockBlobContext = new BlockBlob(_this.storageClientContext);\n _this._blobContext = new Blob$1(_this.storageClientContext);\n return _this;\n }\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n BlockBlobClient.prototype.withSnapshot = function (snapshot) {\n return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n BlockBlobClient.prototype.query = function (query, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, response, e_23;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n _b = createSpan(\"BlockBlobClient-query\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._blobContext.query(tslib.__assign({ abortSignal: options.abortSignal, queryRequest: {\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration)\n }, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _c.sent();\n return [2 /*return*/, new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError\n })];\n case 3:\n e_23 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_23.message\n });\n throw e_23;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n BlockBlobClient.prototype.upload = function (body, contentLength, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_24;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"BlockBlobClient-upload\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.upload(body, contentLength, tslib.__assign({ abortSignal: options.abortSignal, blobHTTPHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), onUploadProgress: options.onProgress, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_24 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_24.message\n });\n throw e_24;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n BlockBlobClient.prototype.syncUploadFromURL = function (sourceURL, options) {\n var _a, _b, _c, _d, _e;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _f, span, updatedOptions, e_25;\n return tslib.__generator(this, function (_g) {\n switch (_g.label) {\n case 0:\n options.conditions = options.conditions || {};\n _f = createSpan(\"BlockBlobClient-syncUploadFromURL\", options), span = _f.span, updatedOptions = _f.updatedOptions;\n _g.label = 1;\n case 1:\n _g.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.putBlobFromUrl(0, sourceURL, tslib.__assign(tslib.__assign(tslib.__assign({}, options), { leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: {\n sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch,\n sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince,\n sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch,\n sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince,\n sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions\n }, cpkInfo: options.customerProvidedKey, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _g.sent()];\n case 3:\n e_25 = _g.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_25.message\n });\n throw e_25;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n BlockBlobClient.prototype.stageBlock = function (blockId, body, contentLength, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_26;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-stageBlock\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.stageBlock(blockId, contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, onUploadProgress: options.onProgress, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_26 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_26.message\n });\n throw e_26;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n BlockBlobClient.prototype.stageBlockFromURL = function (blockId, sourceURL, offset, count, options) {\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_27;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-stageBlockFromURL\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_27 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_27.message\n });\n throw e_27;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n BlockBlobClient.prototype.commitBlockList = function (blocks, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_28;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"BlockBlobClient-commitBlockList\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.commitBlockList({ latest: blocks }, tslib.__assign({ abortSignal: options.abortSignal, blobHTTPHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_28 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_28.message\n });\n throw e_28;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n BlockBlobClient.prototype.getBlockList = function (listType, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, res, e_29;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlockBlobClient-getBlockList\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blockBlobContext.getBlockList(listType, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n res = _c.sent();\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n return [2 /*return*/, res];\n case 3:\n e_29 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_29.message\n });\n throw e_29;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n // High level functions\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n BlockBlobClient.prototype.uploadData = function (data, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, buffer_1, browserBlob_1;\n return tslib.__generator(this, function (_b) {\n _a = createSpan(\"BlockBlobClient-uploadData\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n try {\n if (true) {\n if (data instanceof Buffer) {\n buffer_1 = data;\n }\n else if (data instanceof ArrayBuffer) {\n buffer_1 = Buffer.from(data);\n }\n else {\n data = data;\n buffer_1 = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n return [2 /*return*/, this.uploadSeekableInternal(function (offset, size) { return buffer_1.slice(offset, offset + size); }, buffer_1.byteLength, updatedOptions)];\n }\n else {\n browserBlob_1 = new Blob([data]);\n return [2 /*return*/, this.uploadSeekableInternal(function (offset, size) { return browserBlob_1.slice(offset, offset + size); }, browserBlob_1.size, updatedOptions)];\n }\n }\n catch (e) {\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e.message\n });\n throw e;\n }\n finally {\n span.end();\n }\n return [2 /*return*/];\n });\n });\n };\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n BlockBlobClient.prototype.uploadBrowserData = function (browserData, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, browserBlob_2, e_30;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-uploadBrowserData\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n browserBlob_2 = new Blob([browserData]);\n return [4 /*yield*/, this.uploadSeekableInternal(function (offset, size) { return browserBlob_2.slice(offset, offset + size); }, browserBlob_2.size, updatedOptions)];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_30 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_30.message\n });\n throw e_30;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n BlockBlobClient.prototype.uploadSeekableInternal = function (bodyFactory, size, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, numBlocks_1, blockList_1, blockIDPrefix_1, transferProgress_2, batch, _loop_2, i, e_31;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\"blockSize option must be >= 0 and <= \" + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES);\n }\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {\n throw new RangeError(\"maxSingleShotSize option must be >= 0 and <= \" + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES);\n }\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(size + \" is too larger to upload to a block blob.\");\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 5, 6, 7]);\n if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3];\n return [4 /*yield*/, this.upload(bodyFactory(0, size), size, updatedOptions)];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n numBlocks_1 = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks_1 > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\"The buffer's size is too big or the BlockSize is too small;\" +\n (\"the number of blocks must be <= \" + BLOCK_BLOB_MAX_BLOCKS));\n }\n blockList_1 = [];\n blockIDPrefix_1 = coreHttp.generateUuid();\n transferProgress_2 = 0;\n batch = new Batch(options.concurrency);\n _loop_2 = function (i) {\n batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n var blockID, start, end, contentLength;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n blockID = generateBlockID(blockIDPrefix_1, i);\n start = options.blockSize * i;\n end = i === numBlocks_1 - 1 ? size : start + options.blockSize;\n contentLength = end - start;\n blockList_1.push(blockID);\n return [4 /*yield*/, this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions\n })];\n case 1:\n _a.sent();\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress_2 += contentLength;\n if (options.onProgress) {\n options.onProgress({\n loadedBytes: transferProgress_2\n });\n }\n return [2 /*return*/];\n }\n });\n }); });\n };\n for (i = 0; i < numBlocks_1; i++) {\n _loop_2(i);\n }\n return [4 /*yield*/, batch.do()];\n case 4:\n _b.sent();\n return [2 /*return*/, this.commitBlockList(blockList_1, updatedOptions)];\n case 5:\n e_31 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_31.message\n });\n throw e_31;\n case 6:\n span.end();\n return [7 /*endfinally*/];\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n BlockBlobClient.prototype.uploadFile = function (filePath, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, size, e_32;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-uploadFile\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, 5, 6]);\n return [4 /*yield*/, fsStat(filePath)];\n case 2:\n size = (_b.sent()).size;\n return [4 /*yield*/, this.uploadSeekableInternal(function (offset, count) {\n return function () {\n return fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset\n });\n };\n }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))];\n case 3: return [2 /*return*/, _b.sent()];\n case 4:\n e_32 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_32.message\n });\n throw e_32;\n case 5:\n span.end();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n BlockBlobClient.prototype.uploadStream = function (stream, bufferSize, maxConcurrency, options) {\n if (bufferSize === void 0) { bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES; }\n if (maxConcurrency === void 0) { maxConcurrency = 5; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, blockNum_1, blockIDPrefix_2, transferProgress_3, blockList_2, scheduler, e_33;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"BlockBlobClient-uploadStream\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, 5, 6]);\n blockNum_1 = 0;\n blockIDPrefix_2 = coreHttp.generateUuid();\n transferProgress_3 = 0;\n blockList_2 = [];\n scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, function (body, length) { return tslib.__awaiter(_this, void 0, void 0, function () {\n var blockID;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n blockID = generateBlockID(blockIDPrefix_2, blockNum_1);\n blockList_2.push(blockID);\n blockNum_1++;\n return [4 /*yield*/, this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions\n })];\n case 1:\n _a.sent();\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress_3 += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress_3 });\n }\n return [2 /*return*/];\n }\n });\n }); }, \n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3));\n return [4 /*yield*/, scheduler.do()];\n case 2:\n _b.sent();\n return [4 /*yield*/, this.commitBlockList(blockList_2, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))];\n case 3: return [2 /*return*/, _b.sent()];\n case 4:\n e_33 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_33.message\n });\n throw e_33;\n case 5:\n span.end();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n return BlockBlobClient;\n}(BlobClient));\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nvar PageBlobClient = /** @class */ (function (_super) {\n tslib.__extends(PageBlobClient, _super);\n function PageBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _this = this;\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.pageBlobContext = new PageBlob(_this.storageClientContext);\n return _this;\n }\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n PageBlobClient.prototype.withSnapshot = function (snapshot) {\n return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n PageBlobClient.prototype.create = function (size, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_34;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-create\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.pageBlobContext.create(0, size, tslib.__assign({ abortSignal: options.abortSignal, blobHTTPHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_34 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_34.message\n });\n throw e_34;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n PageBlobClient.prototype.createIfNotExists = function (size, options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, updatedOptions, conditions, res, e_35;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"PageBlobClient-createIfNotExists\", options), span = _c.span, updatedOptions = _c.updatedOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n conditions = { ifNoneMatch: ETagAny };\n return [4 /*yield*/, this.create(size, tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: updatedOptions.tracingOptions }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_35 = _d.sent();\n if (((_a = e_35.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"BlobAlreadyExists\") {\n span.setStatus({\n code: api.CanonicalCode.ALREADY_EXISTS,\n message: \"Expected exception when creating a blob only if it does not already exist.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_35.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_35.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_35.message\n });\n throw e_35;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n PageBlobClient.prototype.uploadPages = function (body, offset, count, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_36;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-uploadPages\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.pageBlobContext.uploadPages(body, count, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), onUploadProgress: options.onProgress, range: rangeToString({ offset: offset, count: count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_36 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_36.message\n });\n throw e_36;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n PageBlobClient.prototype.uploadPagesFromURL = function (sourceURL, sourceOffset, destOffset, count, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_37;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _b = createSpan(\"PageBlobClient-uploadPagesFromURL\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count: count }), 0, rangeToString({ offset: destOffset, count: count }), tslib.__assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince\n }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_37 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_37.message\n });\n throw e_37;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n PageBlobClient.prototype.clearPages = function (offset, count, options) {\n var _a;\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_38;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-clearPages\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.clearPages(0, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset: offset, count: count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_38 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_38.message\n });\n throw e_38;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n PageBlobClient.prototype.getPageRanges = function (offset, count, options) {\n var _a;\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_39;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-getPageRanges\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext\n .getPageRanges(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions)))\n .then(rangeResponseFromModel)];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_39 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_39.message\n });\n throw e_39;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n PageBlobClient.prototype.getPageRangesDiff = function (offset, count, prevSnapshot, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_40;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-getPageRangesDiff\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext\n .getPageRangesDiff(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions)))\n .then(rangeResponseFromModel)];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_40 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_40.message\n });\n throw e_40;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n PageBlobClient.prototype.getPageRangesDiffForManagedDisks = function (offset, count, prevSnapshotUrl, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_41;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-GetPageRangesDiffForManagedDisks\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext\n .getPageRangesDiff(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl, range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions)))\n .then(rangeResponseFromModel)];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_41 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_41.message\n });\n throw e_41;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n PageBlobClient.prototype.resize = function (size, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_42;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-resize\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.resize(size, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_42 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_42.message\n });\n throw e_42;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n PageBlobClient.prototype.updateSequenceNumber = function (sequenceNumberAction, sequenceNumber, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_43;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-updateSequenceNumber\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, tslib.__assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_43 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_43.message\n });\n throw e_43;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n PageBlobClient.prototype.startCopyIncremental = function (copySource, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, e_44;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"PageBlobClient-startCopyIncremental\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.copyIncremental(copySource, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_44 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_44.message\n });\n throw e_44;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return PageBlobClient;\n}(BlobClient));\n\n// Copyright (c) Microsoft Corporation.\nfunction getBodyAsText(batchResponse) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buffer, responseLength;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n return [4 /*yield*/, streamToBuffer2(batchResponse.readableStreamBody, buffer)];\n case 1:\n responseLength = _a.sent();\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n return [2 /*return*/, buffer.toString()];\n }\n });\n });\n}\nfunction utf8ByteLength(str) {\n return Buffer.byteLength(str);\n}\n\n// Copyright (c) Microsoft Corporation.\nvar HTTP_HEADER_DELIMITER = \": \";\nvar SPACE_DELIMITER = \" \";\nvar NOT_FOUND = -1;\n/**\n * Util class for parsing batch response.\n */\nvar BatchResponseParser = /** @class */ (function () {\n function BatchResponseParser(batchResponse, subRequests) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType.split(\"=\")[1];\n this.perResponsePrefix = \"--\" + this.responseBatchBoundary + HTTP_LINE_ENDING;\n this.batchResponseEnding = \"--\" + this.responseBatchBoundary + \"--\";\n }\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n BatchResponseParser.prototype.parseBatchResponse = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var responseBodyAsText, subResponses, subResponseCount, deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount, index, subResponse, deserializedSubResponse, responseLines, subRespHeaderStartFound, subRespHeaderEndFound, subRespFailed, contentId, _i, responseLines_1, responseLine, tokens, tokens;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\"Invalid state: batch request failed with status: '\" + this.batchResponse._response.status + \"'.\");\n }\n return [4 /*yield*/, getBodyAsText(this.batchResponse)];\n case 1:\n responseBodyAsText = _a.sent();\n subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1);\n subResponseCount = subResponses.length;\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount != this.subRequests.size && subResponseCount != 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n deserializedSubResponses = new Array(subResponseCount);\n subResponsesSucceededCount = 0;\n subResponsesFailedCount = 0;\n // Parse sub subResponses.\n for (index = 0; index < subResponseCount; index++) {\n subResponse = subResponses[index];\n deserializedSubResponse = {};\n deserializedSubResponse.headers = new coreHttp.HttpHeaders();\n responseLines = subResponse.split(\"\" + HTTP_LINE_ENDING);\n subRespHeaderStartFound = false;\n subRespHeaderEndFound = false;\n subRespFailed = false;\n contentId = NOT_FOUND;\n for (_i = 0, responseLines_1 = responseLines; _i < responseLines_1.length; _i++) {\n responseLine = responseLines_1[_i];\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n continue; // Skip empty line\n }\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\"Invalid state: find non-empty line '\" + responseLine + \"' without HTTP header delimiter '\" + HTTP_HEADER_DELIMITER + \"'.\");\n }\n tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n }\n else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (contentId != NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined) {\n deserializedSubResponse._request = this.subRequests.get(contentId);\n deserializedSubResponses[contentId] = deserializedSubResponse;\n }\n else {\n logger.error(\"subResponses[\" + index + \"] is dropped as the Content-ID is not found or invalid, Content-ID: \" + contentId);\n }\n if (subRespFailed) {\n subResponsesFailedCount++;\n }\n else {\n subResponsesSucceededCount++;\n }\n }\n return [2 /*return*/, {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount\n }];\n }\n });\n });\n };\n return BatchResponseParser;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar MutexLockStatus;\n(function (MutexLockStatus) {\n MutexLockStatus[MutexLockStatus[\"LOCKED\"] = 0] = \"LOCKED\";\n MutexLockStatus[MutexLockStatus[\"UNLOCKED\"] = 1] = \"UNLOCKED\";\n})(MutexLockStatus || (MutexLockStatus = {}));\n/**\n * An async mutex lock.\n */\nvar Mutex = /** @class */ (function () {\n function Mutex() {\n }\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n Mutex.lock = function (key) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve) {\n if (_this.keys[key] === undefined || _this.keys[key] === MutexLockStatus.UNLOCKED) {\n _this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n }\n else {\n _this.onUnlockEvent(key, function () {\n _this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n })];\n });\n });\n };\n /**\n * Unlock a key.\n *\n * @param key -\n */\n Mutex.unlock = function (key) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve) {\n if (_this.keys[key] === MutexLockStatus.LOCKED) {\n _this.emitUnlockEvent(key);\n }\n delete _this.keys[key];\n resolve();\n })];\n });\n });\n };\n Mutex.onUnlockEvent = function (key, handler) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n }\n else {\n this.listeners[key].push(handler);\n }\n };\n Mutex.emitUnlockEvent = function (key) {\n var _this = this;\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n var handler_1 = this.listeners[key].shift();\n setImmediate(function () {\n handler_1.call(_this);\n });\n }\n };\n Mutex.keys = {};\n Mutex.listeners = {};\n return Mutex;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nvar BlobBatch = /** @class */ (function () {\n function BlobBatch() {\n this.batch = \"batch\";\n this.batchRequest = new InnerBatchRequest();\n }\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n BlobBatch.prototype.getMultiPartContentType = function () {\n return this.batchRequest.getMultipartContentType();\n };\n /**\n * Get assembled HTTP request body for sub requests.\n */\n BlobBatch.prototype.getHttpRequestBody = function () {\n return this.batchRequest.getHttpRequestBody();\n };\n /**\n * Get sub requests that are added into the batch request.\n */\n BlobBatch.prototype.getSubRequests = function () {\n return this.batchRequest.getSubRequests();\n };\n BlobBatch.prototype.addSubRequestInternal = function (subRequest, assembleSubRequestFunc) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, Mutex.lock(this.batch)];\n case 1:\n _a.sent();\n _a.label = 2;\n case 2:\n _a.trys.push([2, , 4, 6]);\n this.batchRequest.preAddSubRequest(subRequest);\n return [4 /*yield*/, assembleSubRequestFunc()];\n case 3:\n _a.sent();\n this.batchRequest.postAddSubRequest(subRequest);\n return [3 /*break*/, 6];\n case 4: return [4 /*yield*/, Mutex.unlock(this.batch)];\n case 5:\n _a.sent();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n BlobBatch.prototype.setBatchType = function (batchType) {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\"BlobBatch only supports one operation type per batch and it already is being used for \" + this.batchType + \" operations.\");\n }\n };\n BlobBatch.prototype.deleteBlob = function (urlOrBlobClient, credentialOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var url, credential, _a, span, updatedOptions, e_1;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (typeof urlOrBlobClient === \"string\" &&\n ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrOptions))) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n }\n else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions;\n }\n else {\n throw new RangeError(\"Invalid arguments. Either url and credential, or BlobClient need be provided.\");\n }\n if (!options) {\n options = {};\n }\n _a = createSpan(\"BatchDeleteRequest-addSubRequest\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n this.setBatchType(\"delete\");\n return [4 /*yield*/, this.addSubRequestInternal({\n url: url,\n credential: credential\n }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions)];\n case 1:\n _a.sent();\n return [2 /*return*/];\n }\n });\n }); })];\n case 2:\n _b.sent();\n return [3 /*break*/, 5];\n case 3:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n BlobBatch.prototype.setBlobAccessTier = function (urlOrBlobClient, credentialOrTier, tierOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var url, credential, tier, _a, span, updatedOptions, e_2;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (typeof urlOrBlobClient === \"string\" &&\n ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrTier))) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier;\n tier = tierOrOptions;\n }\n else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier;\n options = tierOrOptions;\n }\n else {\n throw new RangeError(\"Invalid arguments. Either url and credential, or BlobClient need be provided.\");\n }\n if (!options) {\n options = {};\n }\n _a = createSpan(\"BatchSetTierRequest-addSubRequest\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n this.setBatchType(\"setAccessTier\");\n return [4 /*yield*/, this.addSubRequestInternal({\n url: url,\n credential: credential\n }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions)];\n case 1:\n _a.sent();\n return [2 /*return*/];\n }\n });\n }); })];\n case 2:\n _b.sent();\n return [3 /*break*/, 5];\n case 3:\n e_2 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return BlobBatch;\n}());\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nvar InnerBatchRequest = /** @class */ (function () {\n function InnerBatchRequest() {\n this.operationCount = 0;\n this.body = \"\";\n var tempGuid = coreHttp.generateUuid();\n // batch_{batchid}\n this.boundary = \"batch_\" + tempGuid;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = \"--\" + this.boundary + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TYPE + \": application/http\" + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TRANSFER_ENCODING + \": binary\";\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = \"multipart/mixed; boundary=\" + this.boundary;\n // --batch_{batchid}--\n this.batchRequestEnding = \"--\" + this.boundary + \"--\";\n this.subRequests = new Map();\n }\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n InnerBatchRequest.prototype.createPipeline = function (credential) {\n var isAnonymousCreds = credential instanceof AnonymousCredential;\n var policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n var factories = new Array(policyFactoryLength);\n factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = coreHttp.isTokenCredential(credential)\n ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential)\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n return new Pipeline(factories, {});\n };\n InnerBatchRequest.prototype.appendSubRequestToBody = function (request) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix,\n HeaderConstants.CONTENT_ID + \": \" + this.operationCount,\n \"\",\n request.method.toString() + \" \" + getURLPathAndQuery(request.url) + \" \" + HTTP_VERSION_1_1 + HTTP_LINE_ENDING // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n for (var _i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {\n var header = _a[_i];\n this.body += header.name + \": \" + header.value + HTTP_LINE_ENDING;\n }\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n };\n InnerBatchRequest.prototype.preAddSubRequest = function (subRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(\"Cannot exceed \" + BATCH_MAX_REQUEST + \" sub requests in a single batch\");\n }\n // Fast fail if url for sub request is invalid\n var path = getURLPath(subRequest.url);\n if (!path || path == \"\") {\n throw new RangeError(\"Invalid url for sub request: '\" + subRequest.url + \"'\");\n }\n };\n InnerBatchRequest.prototype.postAddSubRequest = function (subRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n };\n // Return the http request body with assembling the ending line to the sub request body.\n InnerBatchRequest.prototype.getHttpRequestBody = function () {\n return \"\" + this.body + this.batchRequestEnding + HTTP_LINE_ENDING;\n };\n InnerBatchRequest.prototype.getMultipartContentType = function () {\n return this.multipartContentType;\n };\n InnerBatchRequest.prototype.getSubRequests = function () {\n return this.subRequests;\n };\n return InnerBatchRequest;\n}());\nvar BatchRequestAssemblePolicy = /** @class */ (function (_super) {\n tslib.__extends(BatchRequestAssemblePolicy, _super);\n function BatchRequestAssemblePolicy(batchRequest, nextPolicy, options) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.dummyResponse = {\n request: new coreHttp.WebResource(),\n status: 200,\n headers: new coreHttp.HttpHeaders()\n };\n _this.batchRequest = batchRequest;\n return _this;\n }\n BatchRequestAssemblePolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, this.batchRequest.appendSubRequestToBody(request)];\n case 1:\n _a.sent();\n return [2 /*return*/, this.dummyResponse]; // Intercept request from going to wire\n }\n });\n });\n };\n return BatchRequestAssemblePolicy;\n}(coreHttp.BaseRequestPolicy));\nvar BatchRequestAssemblePolicyFactory = /** @class */ (function () {\n function BatchRequestAssemblePolicyFactory(batchRequest) {\n this.batchRequest = batchRequest;\n }\n BatchRequestAssemblePolicyFactory.prototype.create = function (nextPolicy, options) {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n };\n return BatchRequestAssemblePolicyFactory;\n}());\nvar BatchHeaderFilterPolicy = /** @class */ (function (_super) {\n tslib.__extends(BatchHeaderFilterPolicy, _super);\n function BatchHeaderFilterPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n BatchHeaderFilterPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var xMsHeaderName, _i, _a, header;\n return tslib.__generator(this, function (_b) {\n xMsHeaderName = \"\";\n for (_i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {\n header = _a[_i];\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return BatchHeaderFilterPolicy;\n}(coreHttp.BaseRequestPolicy));\nvar BatchHeaderFilterPolicyFactory = /** @class */ (function () {\n function BatchHeaderFilterPolicyFactory() {\n }\n BatchHeaderFilterPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n };\n return BatchHeaderFilterPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nvar BlobBatchClient = /** @class */ (function () {\n function BlobBatchClient(url, credentialOrPipeline, options) {\n var pipeline;\n if (credentialOrPipeline instanceof Pipeline) {\n pipeline = credentialOrPipeline;\n }\n else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n var storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n var path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n }\n else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n BlobBatchClient.prototype.createBatch = function () {\n return new BlobBatch();\n };\n BlobBatchClient.prototype.deleteBlobs = function (urlsOrBlobClients, credentialOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var batch, _i, urlsOrBlobClients_1, urlOrBlobClient;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n batch = new BlobBatch();\n _i = 0, urlsOrBlobClients_1 = urlsOrBlobClients;\n _a.label = 1;\n case 1:\n if (!(_i < urlsOrBlobClients_1.length)) return [3 /*break*/, 6];\n urlOrBlobClient = urlsOrBlobClients_1[_i];\n if (!(typeof urlOrBlobClient === \"string\")) return [3 /*break*/, 3];\n return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options)];\n case 2:\n _a.sent();\n return [3 /*break*/, 5];\n case 3: return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions)];\n case 4:\n _a.sent();\n _a.label = 5;\n case 5:\n _i++;\n return [3 /*break*/, 1];\n case 6: return [2 /*return*/, this.submitBatch(batch)];\n }\n });\n });\n };\n BlobBatchClient.prototype.setBlobsAccessTier = function (urlsOrBlobClients, credentialOrTier, tierOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var batch, _i, urlsOrBlobClients_2, urlOrBlobClient;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n batch = new BlobBatch();\n _i = 0, urlsOrBlobClients_2 = urlsOrBlobClients;\n _a.label = 1;\n case 1:\n if (!(_i < urlsOrBlobClients_2.length)) return [3 /*break*/, 6];\n urlOrBlobClient = urlsOrBlobClients_2[_i];\n if (!(typeof urlOrBlobClient === \"string\")) return [3 /*break*/, 3];\n return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options)];\n case 2:\n _a.sent();\n return [3 /*break*/, 5];\n case 3: return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions)];\n case 4:\n _a.sent();\n _a.label = 5;\n case 5:\n _i++;\n return [3 /*break*/, 1];\n case 6: return [2 /*return*/, this.submitBatch(batch)];\n }\n });\n });\n };\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n BlobBatchClient.prototype.submitBatch = function (batchRequest, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, batchRequestBody, rawBatchResponse, batchResponseParser, responseSummary, res, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!batchRequest || batchRequest.getSubRequests().size == 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n _a = createSpan(\"BlobBatchClient-submitBatch\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, 5, 6]);\n batchRequestBody = batchRequest.getHttpRequestBody();\n return [4 /*yield*/, this.serviceOrContainerContext.submitBatch(batchRequestBody, utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), tslib.__assign(tslib.__assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n rawBatchResponse = _b.sent();\n batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests());\n return [4 /*yield*/, batchResponseParser.parseBatchResponse()];\n case 3:\n responseSummary = _b.sent();\n res = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount\n };\n return [2 /*return*/, res];\n case 4:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 5:\n span.end();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n return BlobBatchClient;\n}());\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nvar ContainerClient = /** @class */ (function (_super) {\n tslib.__extends(ContainerClient, _super);\n function ContainerClient(urlOrConnectionString, credentialOrPipelineOrContainerName, options) {\n var _this = this;\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this._containerName = _this.getContainerNameFromUrl();\n _this.containerContext = new Container(_this.storageClientContext);\n return _this;\n }\n Object.defineProperty(ContainerClient.prototype, \"containerName\", {\n /**\n * The name of the container.\n */\n get: function () {\n return this._containerName;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n ContainerClient.prototype.create = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-create\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.create(tslib.__assign(tslib.__assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: \n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return [2 /*return*/, _b.sent()];\n case 3:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options -\n */\n ContainerClient.prototype.createIfNotExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, updatedOptions, res, e_2;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"ContainerClient-createIfNotExists\", options), span = _c.span, updatedOptions = _c.updatedOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.create(updatedOptions)];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_2 = _d.sent();\n if (((_a = e_2.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: api.CanonicalCode.ALREADY_EXISTS,\n message: \"Expected exception when creating a container only if it does not already exist.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_2.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_2.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n ContainerClient.prototype.exists = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_3;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-exists\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions\n })];\n case 2:\n _b.sent();\n return [2 /*return*/, true];\n case 3:\n e_3 = _b.sent();\n if (e_3.statusCode === 404) {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when checking container existence\"\n });\n return [2 /*return*/, false];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_3.message\n });\n throw e_3;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n ContainerClient.prototype.getBlobClient = function (blobName) {\n return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n ContainerClient.prototype.getAppendBlobClient = function (blobName) {\n return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n ContainerClient.prototype.getBlockBlobClient = function (blobName) {\n return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n ContainerClient.prototype.getPageBlobClient = function (blobName) {\n return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n ContainerClient.prototype.getProperties = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_4;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"ContainerClient-getProperties\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.getProperties(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_4 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_4.message\n });\n throw e_4;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n ContainerClient.prototype.delete = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_5;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"ContainerClient-delete\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.deleteMethod(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_5 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_5.message\n });\n throw e_5;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n ContainerClient.prototype.deleteIfExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, updatedOptions, res, e_6;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"ContainerClient-deleteIfExists\", options), span = _c.span, updatedOptions = _c.updatedOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.delete(updatedOptions)];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_6 = _d.sent();\n if (((_a = e_6.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"ContainerNotFound\") {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when deleting a container only if it exists.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_6.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_6.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_6.message\n });\n throw e_6;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n ContainerClient.prototype.setMetadata = function (metadata, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_7;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\");\n }\n _a = createSpan(\"ContainerClient-setMetadata\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.setMetadata(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_7 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_7.message\n });\n throw e_7;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n ContainerClient.prototype.getAccessPolicy = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, response, res, _i, response_1, identifier, accessPolicy, e_8;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"ContainerClient-getAccessPolicy\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.getAccessPolicy(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _b.sent();\n res = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version\n };\n for (_i = 0, response_1 = response; _i < response_1.length; _i++) {\n identifier = response_1[_i];\n accessPolicy = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions\n };\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n res.signedIdentifiers.push({\n accessPolicy: accessPolicy,\n id: identifier.id\n });\n }\n return [2 /*return*/, res];\n case 3:\n e_8 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_8.message\n });\n throw e_8;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n ContainerClient.prototype.setAccessPolicy = function (access, containerAcl, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, acl, _i, _b, identifier, e_9;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _a = createSpan(\"ContainerClient-setAccessPolicy\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n acl = [];\n for (_i = 0, _b = containerAcl || []; _i < _b.length; _i++) {\n identifier = _b[_i];\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\"\n },\n id: identifier.id\n });\n }\n return [4 /*yield*/, this.containerContext.setAccessPolicy(tslib.__assign({ abortSignal: options.abortSignal, access: access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_9 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_9.message\n });\n throw e_9;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n ContainerClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {\n return new BlobLeaseClient(this, proposeLeaseId);\n };\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n ContainerClient.prototype.uploadBlockBlob = function (blobName, body, contentLength, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, blockBlobClient, response, e_10;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-uploadBlockBlob\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n blockBlobClient = this.getBlockBlobClient(blobName);\n return [4 /*yield*/, blockBlobClient.upload(body, contentLength, updatedOptions)];\n case 2:\n response = _b.sent();\n return [2 /*return*/, {\n blockBlobClient: blockBlobClient,\n response: response\n }];\n case 3:\n e_10 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_10.message\n });\n throw e_10;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n ContainerClient.prototype.deleteBlob = function (blobName, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, blobClient, e_11;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-deleteBlob\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return [4 /*yield*/, blobClient.delete(updatedOptions)];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_11 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_11.message\n });\n throw e_11;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n ContainerClient.prototype.listBlobFlatSegment = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, response, wrappedResponse, e_12;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-listBlobFlatSegment\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.listBlobFlatSegment(tslib.__assign(tslib.__assign({ marker: marker }, options), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _b.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {\n var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });\n return blobItem;\n }) }) });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_12 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_12.message\n });\n throw e_12;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n ContainerClient.prototype.listBlobHierarchySegment = function (delimiter, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, response, wrappedResponse, e_13;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-listBlobHierarchySegment\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.listBlobHierarchySegment(delimiter, tslib.__assign(tslib.__assign({ marker: marker }, options), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _b.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {\n var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });\n return blobItem;\n }) }) });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_13 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_13.message\n });\n throw e_13;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n ContainerClient.prototype.listSegments = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listSegments_1() {\n var listBlobsFlatSegmentResponse;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.listBlobFlatSegment(marker, options))];\n case 2:\n listBlobsFlatSegmentResponse = _a.sent();\n marker = listBlobsFlatSegmentResponse.continuationToken;\n return [4 /*yield*/, tslib.__await(listBlobsFlatSegmentResponse)];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];\n case 4: return [4 /*yield*/, _a.sent()];\n case 5:\n _a.sent();\n _a.label = 6;\n case 6:\n if (marker) return [3 /*break*/, 1];\n _a.label = 7;\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n ContainerClient.prototype.listItems = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listItems_1() {\n var marker, _a, _b, listBlobsFlatSegmentResponse, e_14_1;\n var e_14, _c;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _d.trys.push([0, 7, 8, 13]);\n _a = tslib.__asyncValues(this.listSegments(marker, options));\n _d.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];\n listBlobsFlatSegmentResponse = _b.value;\n return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)))];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];\n case 4:\n _d.sent();\n _d.label = 5;\n case 5: return [3 /*break*/, 1];\n case 6: return [3 /*break*/, 13];\n case 7:\n e_14_1 = _d.sent();\n e_14 = { error: e_14_1 };\n return [3 /*break*/, 13];\n case 8:\n _d.trys.push([8, , 11, 12]);\n if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(_c.call(_a))];\n case 9:\n _d.sent();\n _d.label = 10;\n case 10: return [3 /*break*/, 12];\n case 11:\n if (e_14) throw e_14.error;\n return [7 /*endfinally*/];\n case 12: return [7 /*endfinally*/];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n ContainerClient.prototype.listBlobsFlat = function (options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n var include = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));\n // AsyncIterableIterator to iterate over blobs\n var iter = this.listItems(updatedOptions);\n return _a = {\n /**\n * The next method, part of the iteration protocol\n */\n next: function () {\n return iter.next();\n }\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));\n },\n _a;\n };\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n ContainerClient.prototype.listHierarchySegments = function (delimiter, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listHierarchySegments_1() {\n var listBlobsHierarchySegmentResponse;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options))];\n case 2:\n listBlobsHierarchySegmentResponse = _a.sent();\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n return [4 /*yield*/, tslib.__await(listBlobsHierarchySegmentResponse)];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];\n case 4: return [4 /*yield*/, _a.sent()];\n case 5:\n _a.sent();\n _a.label = 6;\n case 6:\n if (marker) return [3 /*break*/, 1];\n _a.label = 7;\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n ContainerClient.prototype.listItemsByHierarchy = function (delimiter, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listItemsByHierarchy_1() {\n var marker, _a, _b, listBlobsHierarchySegmentResponse, segment, _i, _c, prefix, _d, _e, blob, e_15_1;\n var e_15, _f;\n return tslib.__generator(this, function (_g) {\n switch (_g.label) {\n case 0:\n _g.trys.push([0, 14, 15, 20]);\n _a = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options));\n _g.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _g.sent(), !_b.done)) return [3 /*break*/, 13];\n listBlobsHierarchySegmentResponse = _b.value;\n segment = listBlobsHierarchySegmentResponse.segment;\n if (!segment.blobPrefixes) return [3 /*break*/, 7];\n _i = 0, _c = segment.blobPrefixes;\n _g.label = 3;\n case 3:\n if (!(_i < _c.length)) return [3 /*break*/, 7];\n prefix = _c[_i];\n return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: \"prefix\" }, prefix))];\n case 4: return [4 /*yield*/, _g.sent()];\n case 5:\n _g.sent();\n _g.label = 6;\n case 6:\n _i++;\n return [3 /*break*/, 3];\n case 7:\n _d = 0, _e = segment.blobItems;\n _g.label = 8;\n case 8:\n if (!(_d < _e.length)) return [3 /*break*/, 12];\n blob = _e[_d];\n return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: \"blob\" }, blob))];\n case 9: return [4 /*yield*/, _g.sent()];\n case 10:\n _g.sent();\n _g.label = 11;\n case 11:\n _d++;\n return [3 /*break*/, 8];\n case 12: return [3 /*break*/, 1];\n case 13: return [3 /*break*/, 20];\n case 14:\n e_15_1 = _g.sent();\n e_15 = { error: e_15_1 };\n return [3 /*break*/, 20];\n case 15:\n _g.trys.push([15, , 18, 19]);\n if (!(_b && !_b.done && (_f = _a.return))) return [3 /*break*/, 17];\n return [4 /*yield*/, tslib.__await(_f.call(_a))];\n case 16:\n _g.sent();\n _g.label = 17;\n case 17: return [3 /*break*/, 19];\n case 18:\n if (e_15) throw e_15.error;\n return [7 /*endfinally*/];\n case 19: return [7 /*endfinally*/];\n case 20: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\"}).byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n ContainerClient.prototype.listBlobsByHierarchy = function (delimiter, options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n var include = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n var iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return _a = {\n /**\n * The next method, part of the iteration protocol\n */\n next: function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, iter.next()];\n });\n });\n }\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.listHierarchySegments(delimiter, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));\n },\n _a;\n };\n ContainerClient.prototype.getContainerNameFromUrl = function () {\n var containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n var parsedUrl = coreHttp.URLBuilder.parse(this.url);\n if (parsedUrl.getHost().split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath().split(\"/\")[1];\n }\n else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath().split(\"/\")[2];\n }\n else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath().split(\"/\")[1];\n }\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n return containerName;\n }\n catch (error) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n };\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n ContainerClient.prototype.generateSasUrl = function (options) {\n var _this = this;\n return new Promise(function (resolve) {\n if (!(_this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\"Can only generate the SAS when the client is initialized with a shared key credential\");\n }\n var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName }, options), _this.credential).toString();\n resolve(appendToURLQuery(_this.url, sas));\n });\n };\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n ContainerClient.prototype.getBlobBatchClient = function () {\n return new BlobBatchClient(this.url, this.pipeline);\n };\n return ContainerClient;\n}(StorageClient));\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nvar AccountSASPermissions = /** @class */ (function () {\n function AccountSASPermissions() {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n this.read = false;\n /**\n * Permission to write resources granted.\n */\n this.write = false;\n /**\n * Permission to create blobs and files granted.\n */\n this.delete = false;\n /**\n * Permission to delete versions granted.\n */\n this.deleteVersion = false;\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n this.list = false;\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n this.add = false;\n /**\n * Permission to create blobs and files granted.\n */\n this.create = false;\n /**\n * Permissions to update messages and table entities granted.\n */\n this.update = false;\n /**\n * Permission to get and delete messages granted.\n */\n this.process = false;\n /**\n * Specfies Tag access granted.\n */\n this.tag = false;\n /**\n * Permission to filter blobs.\n */\n this.filter = false;\n }\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n AccountSASPermissions.parse = function (permissions) {\n var accountSASPermissions = new AccountSASPermissions();\n for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {\n var c = permissions_1[_i];\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n default:\n throw new RangeError(\"Invalid permission character: \" + c);\n }\n }\n return accountSASPermissions;\n };\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n AccountSASPermissions.from = function (permissionLike) {\n var accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n return accountSASPermissions;\n };\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n AccountSASPermissions.prototype.toString = function () {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n var permissions = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n return permissions.join(\"\");\n };\n return AccountSASPermissions;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nvar AccountSASResourceTypes = /** @class */ (function () {\n function AccountSASResourceTypes() {\n /**\n * Permission to access service level APIs granted.\n */\n this.service = false;\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n this.container = false;\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n this.object = false;\n }\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n AccountSASResourceTypes.parse = function (resourceTypes) {\n var accountSASResourceTypes = new AccountSASResourceTypes();\n for (var _i = 0, resourceTypes_1 = resourceTypes; _i < resourceTypes_1.length; _i++) {\n var c = resourceTypes_1[_i];\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(\"Invalid resource type: \" + c);\n }\n }\n return accountSASResourceTypes;\n };\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n AccountSASResourceTypes.prototype.toString = function () {\n var resourceTypes = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n };\n return AccountSASResourceTypes;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nvar AccountSASServices = /** @class */ (function () {\n function AccountSASServices() {\n /**\n * Permission to access blob resources granted.\n */\n this.blob = false;\n /**\n * Permission to access file resources granted.\n */\n this.file = false;\n /**\n * Permission to access queue resources granted.\n */\n this.queue = false;\n /**\n * Permission to access table resources granted.\n */\n this.table = false;\n }\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n AccountSASServices.parse = function (services) {\n var accountSASServices = new AccountSASServices();\n for (var _i = 0, services_1 = services; _i < services_1.length; _i++) {\n var c = services_1[_i];\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(\"Invalid service character: \" + c);\n }\n }\n return accountSASServices;\n };\n /**\n * Converts the given services to a string.\n *\n */\n AccountSASServices.prototype.toString = function () {\n var services = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n };\n return AccountSASServices;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) {\n var version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n if (accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n if (accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\") {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n if (accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\") {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n var parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString());\n var parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n var parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString();\n var stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\" // Account SAS requires an additional newline character\n ].join(\"\\n\");\n var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange);\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nvar BlobServiceClient = /** @class */ (function (_super) {\n tslib.__extends(BlobServiceClient, _super);\n function BlobServiceClient(url, credentialOrPipeline, options) {\n var _this = this;\n var pipeline;\n if (credentialOrPipeline instanceof Pipeline) {\n pipeline = credentialOrPipeline;\n }\n else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipeline)) {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.serviceContext = new Service(_this.storageClientContext);\n return _this;\n }\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n BlobServiceClient.fromConnectionString = function (connectionString, options) {\n options = options || {};\n var extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n var pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n var pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n };\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n BlobServiceClient.prototype.getContainerClient = function (containerName) {\n return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline);\n };\n /**\n * Create a Blob container.\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n BlobServiceClient.prototype.createContainer = function (containerName, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, containerClient, containerCreateResponse, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-createContainer\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(containerName);\n return [4 /*yield*/, containerClient.create(updatedOptions)];\n case 2:\n containerCreateResponse = _b.sent();\n return [2 /*return*/, {\n containerClient: containerClient,\n containerCreateResponse: containerCreateResponse\n }];\n case 3:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n BlobServiceClient.prototype.deleteContainer = function (containerName, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, containerClient, e_2;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-deleteContainer\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(containerName);\n return [4 /*yield*/, containerClient.delete(updatedOptions)];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_2 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n BlobServiceClient.prototype.undeleteContainer = function (deletedContainerName, deletedContainerVersion, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, containerClient, containerContext, containerUndeleteResponse, e_3;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-undeleteContainer\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName);\n containerContext = new Container(containerClient[\"storageClientContext\"]);\n return [4 /*yield*/, containerContext.restore(tslib.__assign({ deletedContainerName: deletedContainerName,\n deletedContainerVersion: deletedContainerVersion }, updatedOptions))];\n case 2:\n containerUndeleteResponse = _b.sent();\n return [2 /*return*/, { containerClient: containerClient, containerUndeleteResponse: containerUndeleteResponse }];\n case 3:\n e_3 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_3.message\n });\n throw e_3;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n BlobServiceClient.prototype.renameContainer = function (sourceContainerName, destinationContainerName, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, updatedOptions, containerClient, containerContext, containerRenameResponse, e_4;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobServiceClient-renameContainer\", options), span = _b.span, updatedOptions = _b.updatedOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(destinationContainerName);\n containerContext = new Container(containerClient[\"storageClientContext\"]);\n return [4 /*yield*/, containerContext.rename(sourceContainerName, tslib.__assign(tslib.__assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))];\n case 2:\n containerRenameResponse = _c.sent();\n return [2 /*return*/, { containerClient: containerClient, containerRenameResponse: containerRenameResponse }];\n case 3:\n e_4 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_4.message\n });\n throw e_4;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n BlobServiceClient.prototype.getProperties = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_5;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getProperties\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getProperties(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_5 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_5.message\n });\n throw e_5;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n BlobServiceClient.prototype.setProperties = function (properties, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_6;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-setProperties\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.setProperties(properties, tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_6 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_6.message\n });\n throw e_6;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n BlobServiceClient.prototype.getStatistics = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_7;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getStatistics\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getStatistics(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_7 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_7.message\n });\n throw e_7;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n BlobServiceClient.prototype.getAccountInfo = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_8;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getAccountInfo\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getAccountInfo(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_8 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_8.message\n });\n throw e_8;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n BlobServiceClient.prototype.listContainersSegment = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, e_9;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-listContainersSegment\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.listContainersSegment(tslib.__assign(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, marker: marker }, options), { include: typeof options.include === \"string\" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_9 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_9.message\n });\n throw e_9;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n BlobServiceClient.prototype.findBlobsByTagsSegment = function (tagFilterSqlExpression, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, response, wrappedResponse, e_10;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-findBlobsByTagsSegment\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.filterBlobs(tslib.__assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _b.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, blobs: response.blobs.map(function (blob) {\n var _a;\n var tagValue = \"\";\n if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return tslib.__assign(tslib.__assign({}, blob), { tags: toTags(blob.tags), tagValue: tagValue });\n }) });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_10 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_10.message\n });\n throw e_10;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n BlobServiceClient.prototype.findBlobsByTagsSegments = function (tagFilterSqlExpression, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsSegments_1() {\n var response;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 6];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options))];\n case 2:\n response = _a.sent();\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n return [4 /*yield*/, tslib.__await(response)];\n case 3: return [4 /*yield*/, _a.sent()];\n case 4:\n _a.sent();\n _a.label = 5;\n case 5:\n if (marker) return [3 /*break*/, 1];\n _a.label = 6;\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n BlobServiceClient.prototype.findBlobsByTagsItems = function (tagFilterSqlExpression, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsItems_1() {\n var marker, _a, _b, segment, e_11_1;\n var e_11, _c;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _d.trys.push([0, 7, 8, 13]);\n _a = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options));\n _d.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];\n segment = _b.value;\n return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)))];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];\n case 4:\n _d.sent();\n _d.label = 5;\n case 5: return [3 /*break*/, 1];\n case 6: return [3 /*break*/, 13];\n case 7:\n e_11_1 = _d.sent();\n e_11 = { error: e_11_1 };\n return [3 /*break*/, 13];\n case 8:\n _d.trys.push([8, , 11, 12]);\n if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(_c.call(_a))];\n case 9:\n _d.sent();\n _d.label = 10;\n case 10: return [3 /*break*/, 12];\n case 11:\n if (e_11) throw e_11.error;\n return [7 /*endfinally*/];\n case 12: return [7 /*endfinally*/];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n BlobServiceClient.prototype.findBlobsByTags = function (tagFilterSqlExpression, options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n // AsyncIterableIterator to iterate over blobs\n var listSegmentOptions = tslib.__assign({}, options);\n var iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return _a = {\n /**\n * The next method, part of the iteration protocol\n */\n next: function () {\n return iter.next();\n }\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));\n },\n _a;\n };\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n BlobServiceClient.prototype.listSegments = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listSegments_1() {\n var listContainersSegmentResponse;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.listContainersSegment(marker, options))];\n case 2:\n listContainersSegmentResponse = _a.sent();\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n return [4 /*yield*/, tslib.__await(listContainersSegmentResponse)];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];\n case 4: return [4 /*yield*/, _a.sent()];\n case 5:\n _a.sent();\n _a.label = 6;\n case 6:\n if (marker) return [3 /*break*/, 1];\n _a.label = 7;\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n BlobServiceClient.prototype.listItems = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listItems_1() {\n var marker, _a, _b, segment, e_12_1;\n var e_12, _c;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _d.trys.push([0, 7, 8, 13]);\n _a = tslib.__asyncValues(this.listSegments(marker, options));\n _d.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];\n segment = _b.value;\n return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)))];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];\n case 4:\n _d.sent();\n _d.label = 5;\n case 5: return [3 /*break*/, 1];\n case 6: return [3 /*break*/, 13];\n case 7:\n e_12_1 = _d.sent();\n e_12 = { error: e_12_1 };\n return [3 /*break*/, 13];\n case 8:\n _d.trys.push([8, , 11, 12]);\n if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(_c.call(_a))];\n case 9:\n _d.sent();\n _d.label = 10;\n case 10: return [3 /*break*/, 12];\n case 11:\n if (e_12) throw e_12.error;\n return [7 /*endfinally*/];\n case 12: return [7 /*endfinally*/];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n BlobServiceClient.prototype.listContainers = function (options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n var include = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n // AsyncIterableIterator to iterate over containers\n var listSegmentOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));\n var iter = this.listItems(listSegmentOptions);\n return _a = {\n /**\n * The next method, part of the iteration protocol\n */\n next: function () {\n return iter.next();\n }\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));\n },\n _a;\n };\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n BlobServiceClient.prototype.getUserDelegationKey = function (startsOn, expiresOn, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, updatedOptions, response, userDelegationKey, res, e_13;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getUserDelegationKey\", options), span = _a.span, updatedOptions = _a.updatedOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getUserDelegationKey({\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false)\n }, tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))];\n case 2:\n response = _b.sent();\n userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value\n };\n res = tslib.__assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey);\n return [2 /*return*/, res];\n case 3:\n e_13 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_13.message\n });\n throw e_13;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n BlobServiceClient.prototype.getBlobBatchClient = function () {\n return new BlobBatchClient(this.url, this.pipeline);\n };\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n BlobServiceClient.prototype.generateAccountSasUrl = function (expiresOn, permissions, resourceTypes, options) {\n if (permissions === void 0) { permissions = AccountSASPermissions.parse(\"r\"); }\n if (resourceTypes === void 0) { resourceTypes = \"sco\"; }\n if (options === void 0) { options = {}; }\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\"Can only generate the account SAS when the client is initialized with a shared key credential\");\n }\n if (expiresOn === undefined) {\n var now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n var sas = generateAccountSASQueryParameters(tslib.__assign({ permissions: permissions,\n expiresOn: expiresOn,\n resourceTypes: resourceTypes, services: AccountSASServices.parse(\"b\").toString() }, options), this.credential).toString();\n return appendToURLQuery(this.url, sas);\n };\n return BlobServiceClient;\n}(StorageClient));\n\nObject.defineProperty(exports, 'BaseRequestPolicy', {\n enumerable: true,\n get: function () {\n return coreHttp.BaseRequestPolicy;\n }\n});\nObject.defineProperty(exports, 'HttpHeaders', {\n enumerable: true,\n get: function () {\n return coreHttp.HttpHeaders;\n }\n});\nObject.defineProperty(exports, 'RequestPolicyOptions', {\n enumerable: true,\n get: function () {\n return coreHttp.RequestPolicyOptions;\n }\n});\nObject.defineProperty(exports, 'RestError', {\n enumerable: true,\n get: function () {\n return coreHttp.RestError;\n }\n});\nObject.defineProperty(exports, 'WebResource', {\n enumerable: true,\n get: function () {\n return coreHttp.WebResource;\n }\n});\nObject.defineProperty(exports, 'deserializationPolicy', {\n enumerable: true,\n get: function () {\n return coreHttp.deserializationPolicy;\n }\n});\nexports.AccountSASPermissions = AccountSASPermissions;\nexports.AccountSASResourceTypes = AccountSASResourceTypes;\nexports.AccountSASServices = AccountSASServices;\nexports.AnonymousCredential = AnonymousCredential;\nexports.AnonymousCredentialPolicy = AnonymousCredentialPolicy;\nexports.AppendBlobClient = AppendBlobClient;\nexports.BlobBatch = BlobBatch;\nexports.BlobBatchClient = BlobBatchClient;\nexports.BlobClient = BlobClient;\nexports.BlobLeaseClient = BlobLeaseClient;\nexports.BlobSASPermissions = BlobSASPermissions;\nexports.BlobServiceClient = BlobServiceClient;\nexports.BlockBlobClient = BlockBlobClient;\nexports.ContainerClient = ContainerClient;\nexports.ContainerSASPermissions = ContainerSASPermissions;\nexports.Credential = Credential;\nexports.CredentialPolicy = CredentialPolicy;\nexports.PageBlobClient = PageBlobClient;\nexports.Pipeline = Pipeline;\nexports.SASQueryParameters = SASQueryParameters;\nexports.StorageBrowserPolicy = StorageBrowserPolicy;\nexports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory;\nexports.StorageOAuthScopes = StorageOAuthScopes;\nexports.StorageRetryPolicy = StorageRetryPolicy;\nexports.StorageRetryPolicyFactory = StorageRetryPolicyFactory;\nexports.StorageSharedKeyCredential = StorageSharedKeyCredential;\nexports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy;\nexports.generateAccountSASQueryParameters = generateAccountSASQueryParameters;\nexports.generateBlobSASQueryParameters = generateBlobSASQueryParameters;\nexports.logger = logger;\nexports.newPipeline = newPipeline;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ContextAPI = void 0;\nvar context_base_1 = require(\"@opentelemetry/context-base\");\nvar global_utils_1 = require(\"./global-utils\");\nvar NOOP_CONTEXT_MANAGER = new context_base_1.NoopContextManager();\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nvar ContextAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function ContextAPI() {\n }\n /** Get the singleton instance of the Context API */\n ContextAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n return this._instance;\n };\n /**\n * Set the current context manager. Returns the initialized context manager\n */\n ContextAPI.prototype.setGlobalContextManager = function (contextManager) {\n if (global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) {\n // global context manager has already been set\n return this._getContextManager();\n }\n global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, contextManager, NOOP_CONTEXT_MANAGER);\n return contextManager;\n };\n /**\n * Get the currently active context\n */\n ContextAPI.prototype.active = function () {\n return this._getContextManager().active();\n };\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n */\n ContextAPI.prototype.with = function (context, fn) {\n return this._getContextManager().with(context, fn);\n };\n /**\n * Bind a context to a target function or event emitter\n *\n * @param target function or event emitter to bind\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n */\n ContextAPI.prototype.bind = function (target, context) {\n if (context === void 0) { context = this.active(); }\n return this._getContextManager().bind(target, context);\n };\n ContextAPI.prototype._getContextManager = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NOOP_CONTEXT_MANAGER);\n };\n /** Disable and remove the global context manager */\n ContextAPI.prototype.disable = function () {\n this._getContextManager().disable();\n delete global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY];\n };\n return ContextAPI;\n}());\nexports.ContextAPI = ContextAPI;\n//# sourceMappingURL=context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.API_BACKWARDS_COMPATIBILITY_VERSION = exports.makeGetter = exports._global = exports.GLOBAL_TRACE_API_KEY = exports.GLOBAL_PROPAGATION_API_KEY = exports.GLOBAL_METRICS_API_KEY = exports.GLOBAL_CONTEXT_MANAGER_API_KEY = void 0;\nvar platform_1 = require(\"../platform\");\nexports.GLOBAL_CONTEXT_MANAGER_API_KEY = Symbol.for('io.opentelemetry.js.api.context');\nexports.GLOBAL_METRICS_API_KEY = Symbol.for('io.opentelemetry.js.api.metrics');\nexports.GLOBAL_PROPAGATION_API_KEY = Symbol.for('io.opentelemetry.js.api.propagation');\nexports.GLOBAL_TRACE_API_KEY = Symbol.for('io.opentelemetry.js.api.trace');\nexports._global = platform_1._globalThis;\n/**\n * Make a function which accepts a version integer and returns the instance of an API if the version\n * is compatible, or a fallback version (usually NOOP) if it is not.\n *\n * @param requiredVersion Backwards compatibility version which is required to return the instance\n * @param instance Instance which should be returned if the required version is compatible\n * @param fallback Fallback instance, usually NOOP, which will be returned if the required version is not compatible\n */\nfunction makeGetter(requiredVersion, instance, fallback) {\n return function (version) {\n return version === requiredVersion ? instance : fallback;\n };\n}\nexports.makeGetter = makeGetter;\n/**\n * A number which should be incremented each time a backwards incompatible\n * change is made to the API. This number is used when an API package\n * attempts to access the global API to ensure it is getting a compatible\n * version. If the global API is not compatible with the API package\n * attempting to get it, a NOOP API implementation will be returned.\n */\nexports.API_BACKWARDS_COMPATIBILITY_VERSION = 0;\n//# sourceMappingURL=global-utils.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MetricsAPI = void 0;\nvar NoopMeterProvider_1 = require(\"../metrics/NoopMeterProvider\");\nvar global_utils_1 = require(\"./global-utils\");\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Metrics API\n */\nvar MetricsAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function MetricsAPI() {\n }\n /** Get the singleton instance of the Metrics API */\n MetricsAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new MetricsAPI();\n }\n return this._instance;\n };\n /**\n * Set the current global meter. Returns the initialized global meter provider.\n */\n MetricsAPI.prototype.setGlobalMeterProvider = function (provider) {\n if (global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) {\n // global meter provider has already been set\n return this.getMeterProvider();\n }\n global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, provider, NoopMeterProvider_1.NOOP_METER_PROVIDER);\n return provider;\n };\n /**\n * Returns the global meter provider.\n */\n MetricsAPI.prototype.getMeterProvider = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopMeterProvider_1.NOOP_METER_PROVIDER);\n };\n /**\n * Returns a meter from the global meter provider.\n */\n MetricsAPI.prototype.getMeter = function (name, version) {\n return this.getMeterProvider().getMeter(name, version);\n };\n /** Remove the global meter provider */\n MetricsAPI.prototype.disable = function () {\n delete global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY];\n };\n return MetricsAPI;\n}());\nexports.MetricsAPI = MetricsAPI;\n//# sourceMappingURL=metrics.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PropagationAPI = void 0;\nvar getter_1 = require(\"../context/propagation/getter\");\nvar NoopHttpTextPropagator_1 = require(\"../context/propagation/NoopHttpTextPropagator\");\nvar setter_1 = require(\"../context/propagation/setter\");\nvar context_1 = require(\"./context\");\nvar global_utils_1 = require(\"./global-utils\");\nvar contextApi = context_1.ContextAPI.getInstance();\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nvar PropagationAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function PropagationAPI() {\n }\n /** Get the singleton instance of the Propagator API */\n PropagationAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n return this._instance;\n };\n /**\n * Set the current propagator. Returns the initialized propagator\n */\n PropagationAPI.prototype.setGlobalPropagator = function (propagator) {\n if (global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY]) {\n // global propagator has already been set\n return this._getGlobalPropagator();\n }\n global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, propagator, NoopHttpTextPropagator_1.NOOP_HTTP_TEXT_PROPAGATOR);\n return propagator;\n };\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n * @param context Context carrying tracing data to inject. Defaults to the currently active context.\n */\n PropagationAPI.prototype.inject = function (carrier, setter, context) {\n if (setter === void 0) { setter = setter_1.defaultSetter; }\n if (context === void 0) { context = contextApi.active(); }\n return this._getGlobalPropagator().inject(context, carrier, setter);\n };\n /**\n * Extract context from a carrier\n *\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n * @param context Context which the newly created context will inherit from. Defaults to the currently active context.\n */\n PropagationAPI.prototype.extract = function (carrier, getter, context) {\n if (getter === void 0) { getter = getter_1.defaultGetter; }\n if (context === void 0) { context = contextApi.active(); }\n return this._getGlobalPropagator().extract(context, carrier, getter);\n };\n /** Remove the global propagator */\n PropagationAPI.prototype.disable = function () {\n delete global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY];\n };\n PropagationAPI.prototype._getGlobalPropagator = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopHttpTextPropagator_1.NOOP_HTTP_TEXT_PROPAGATOR);\n };\n return PropagationAPI;\n}());\nexports.PropagationAPI = PropagationAPI;\n//# sourceMappingURL=propagation.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TraceAPI = void 0;\nvar NoopTracerProvider_1 = require(\"../trace/NoopTracerProvider\");\nvar global_utils_1 = require(\"./global-utils\");\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nvar TraceAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function TraceAPI() {\n }\n /** Get the singleton instance of the Trace API */\n TraceAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n return this._instance;\n };\n /**\n * Set the current global tracer. Returns the initialized global tracer provider\n */\n TraceAPI.prototype.setGlobalTracerProvider = function (provider) {\n if (global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY]) {\n // global tracer provider has already been set\n return this.getTracerProvider();\n }\n global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, provider, NoopTracerProvider_1.NOOP_TRACER_PROVIDER);\n return this.getTracerProvider();\n };\n /**\n * Returns the global tracer provider.\n */\n TraceAPI.prototype.getTracerProvider = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopTracerProvider_1.NOOP_TRACER_PROVIDER);\n };\n /**\n * Returns a tracer from the global tracer provider.\n */\n TraceAPI.prototype.getTracer = function (name, version) {\n return this.getTracerProvider().getTracer(name, version);\n };\n /** Remove the global tracer provider */\n TraceAPI.prototype.disable = function () {\n delete global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY];\n };\n return TraceAPI;\n}());\nexports.TraceAPI = TraceAPI;\n//# sourceMappingURL=trace.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Logger.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Time.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=HttpTextPropagator.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_HTTP_TEXT_PROPAGATOR = exports.NoopHttpTextPropagator = void 0;\n/**\n * No-op implementations of {@link HttpTextPropagator}.\n */\nvar NoopHttpTextPropagator = /** @class */ (function () {\n function NoopHttpTextPropagator() {\n }\n /** Noop inject function does nothing */\n NoopHttpTextPropagator.prototype.inject = function (context, carrier, setter) { };\n /** Noop extract function does nothing and returns the input context */\n NoopHttpTextPropagator.prototype.extract = function (context, carrier, getter) {\n return context;\n };\n return NoopHttpTextPropagator;\n}());\nexports.NoopHttpTextPropagator = NoopHttpTextPropagator;\nexports.NOOP_HTTP_TEXT_PROPAGATOR = new NoopHttpTextPropagator();\n//# sourceMappingURL=NoopHttpTextPropagator.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultGetter = void 0;\n/**\n * Default getter which just does a simple property access. Returns\n * undefined if the key is not set.\n *\n * @param carrier\n * @param key\n */\nfunction defaultGetter(carrier, key) {\n return carrier[key];\n}\nexports.defaultGetter = defaultGetter;\n//# sourceMappingURL=getter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultSetter = void 0;\n/**\n * Default setter which sets value via direct property access\n *\n * @param carrier\n * @param key\n */\nfunction defaultSetter(carrier, key, value) {\n carrier[key] = value;\n}\nexports.defaultSetter = defaultSetter;\n//# sourceMappingURL=setter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=CorrelationContext.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.EntryTtl = void 0;\n/**\n * EntryTtl is an integer that represents number of hops an entry can propagate.\n *\n * For now, ONLY special values (0 and -1) are supported.\n */\nvar EntryTtl;\n(function (EntryTtl) {\n /**\n * NO_PROPAGATION is considered to have local context and is used within the\n * process it created.\n */\n EntryTtl[EntryTtl[\"NO_PROPAGATION\"] = 0] = \"NO_PROPAGATION\";\n /** UNLIMITED_PROPAGATION can propagate unlimited hops. */\n EntryTtl[EntryTtl[\"UNLIMITED_PROPAGATION\"] = -1] = \"UNLIMITED_PROPAGATION\";\n})(EntryTtl = exports.EntryTtl || (exports.EntryTtl = {}));\n//# sourceMappingURL=EntryValue.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.propagation = exports.metrics = exports.trace = exports.context = void 0;\n__exportStar(require(\"./common/Logger\"), exports);\n__exportStar(require(\"./common/Time\"), exports);\n__exportStar(require(\"./context/propagation/getter\"), exports);\n__exportStar(require(\"./context/propagation/HttpTextPropagator\"), exports);\n__exportStar(require(\"./context/propagation/NoopHttpTextPropagator\"), exports);\n__exportStar(require(\"./context/propagation/setter\"), exports);\n__exportStar(require(\"./correlation_context/CorrelationContext\"), exports);\n__exportStar(require(\"./correlation_context/EntryValue\"), exports);\n__exportStar(require(\"./metrics/BatchObserverResult\"), exports);\n__exportStar(require(\"./metrics/BoundInstrument\"), exports);\n__exportStar(require(\"./metrics/Meter\"), exports);\n__exportStar(require(\"./metrics/MeterProvider\"), exports);\n__exportStar(require(\"./metrics/Metric\"), exports);\n__exportStar(require(\"./metrics/NoopMeter\"), exports);\n__exportStar(require(\"./metrics/NoopMeterProvider\"), exports);\n__exportStar(require(\"./metrics/Observation\"), exports);\n__exportStar(require(\"./metrics/ObserverResult\"), exports);\n__exportStar(require(\"./trace/attributes\"), exports);\n__exportStar(require(\"./trace/Event\"), exports);\n__exportStar(require(\"./trace/instrumentation/Plugin\"), exports);\n__exportStar(require(\"./trace/link_context\"), exports);\n__exportStar(require(\"./trace/link\"), exports);\n__exportStar(require(\"./trace/NoopSpan\"), exports);\n__exportStar(require(\"./trace/NoopTracer\"), exports);\n__exportStar(require(\"./trace/NoopTracerProvider\"), exports);\n__exportStar(require(\"./trace/Sampler\"), exports);\n__exportStar(require(\"./trace/SamplingResult\"), exports);\n__exportStar(require(\"./trace/span_context\"), exports);\n__exportStar(require(\"./trace/span_kind\"), exports);\n__exportStar(require(\"./trace/span\"), exports);\n__exportStar(require(\"./trace/SpanOptions\"), exports);\n__exportStar(require(\"./trace/status\"), exports);\n__exportStar(require(\"./trace/TimedEvent\"), exports);\n__exportStar(require(\"./trace/trace_flags\"), exports);\n__exportStar(require(\"./trace/trace_state\"), exports);\n__exportStar(require(\"./trace/tracer_provider\"), exports);\n__exportStar(require(\"./trace/tracer\"), exports);\nvar context_base_1 = require(\"@opentelemetry/context-base\");\nObject.defineProperty(exports, \"Context\", { enumerable: true, get: function () { return context_base_1.Context; } });\nvar context_1 = require(\"./api/context\");\n/** Entrypoint for context API */\nexports.context = context_1.ContextAPI.getInstance();\nvar trace_1 = require(\"./api/trace\");\n/** Entrypoint for trace API */\nexports.trace = trace_1.TraceAPI.getInstance();\nvar metrics_1 = require(\"./api/metrics\");\n/** Entrypoint for metrics API */\nexports.metrics = metrics_1.MetricsAPI.getInstance();\nvar propagation_1 = require(\"./api/propagation\");\n/** Entrypoint for propagation API */\nexports.propagation = propagation_1.PropagationAPI.getInstance();\nexports.default = {\n trace: exports.trace,\n metrics: exports.metrics,\n context: exports.context,\n propagation: exports.propagation,\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=BatchObserverResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=BoundInstrument.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Meter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=MeterProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ValueType = void 0;\n/** The Type of value. It describes how the data is reported. */\nvar ValueType;\n(function (ValueType) {\n ValueType[ValueType[\"INT\"] = 0] = \"INT\";\n ValueType[ValueType[\"DOUBLE\"] = 1] = \"DOUBLE\";\n})(ValueType = exports.ValueType || (exports.ValueType = {}));\n//# sourceMappingURL=Metric.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __extends = (this && this.__extends) || (function () {\n var extendStatics = function (d, b) {\n extendStatics = Object.setPrototypeOf ||\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\n return extendStatics(d, b);\n };\n return function (d, b) {\n extendStatics(d, b);\n function __() { this.constructor = d; }\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\n };\n})();\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_BATCH_OBSERVER_METRIC = exports.NOOP_SUM_OBSERVER_METRIC = exports.NOOP_UP_DOWN_SUM_OBSERVER_METRIC = exports.NOOP_VALUE_OBSERVER_METRIC = exports.NOOP_BOUND_BASE_OBSERVER = exports.NOOP_VALUE_RECORDER_METRIC = exports.NOOP_BOUND_VALUE_RECORDER = exports.NOOP_COUNTER_METRIC = exports.NOOP_BOUND_COUNTER = exports.NOOP_METER = exports.NoopBoundBaseObserver = exports.NoopBoundValueRecorder = exports.NoopBoundCounter = exports.NoopBatchObserverMetric = exports.NoopBaseObserverMetric = exports.NoopValueRecorderMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0;\n/**\n * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses\n * constant NoopMetrics for all of its methods.\n */\nvar NoopMeter = /** @class */ (function () {\n function NoopMeter() {\n }\n /**\n * Returns constant noop value recorder.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n NoopMeter.prototype.createValueRecorder = function (name, options) {\n return exports.NOOP_VALUE_RECORDER_METRIC;\n };\n /**\n * Returns a constant noop counter.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n NoopMeter.prototype.createCounter = function (name, options) {\n return exports.NOOP_COUNTER_METRIC;\n };\n /**\n * Returns a constant noop UpDownCounter.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n NoopMeter.prototype.createUpDownCounter = function (name, options) {\n return exports.NOOP_COUNTER_METRIC;\n };\n /**\n * Returns constant noop value observer.\n * @param name the name of the metric.\n * @param [options] the metric options.\n * @param [callback] the value observer callback\n */\n NoopMeter.prototype.createValueObserver = function (name, options, callback) {\n return exports.NOOP_VALUE_OBSERVER_METRIC;\n };\n /**\n * Returns constant noop batch observer.\n * @param name the name of the metric.\n * @param callback the batch observer callback\n */\n NoopMeter.prototype.createBatchObserver = function (name, callback) {\n return exports.NOOP_BATCH_OBSERVER_METRIC;\n };\n return NoopMeter;\n}());\nexports.NoopMeter = NoopMeter;\nvar NoopMetric = /** @class */ (function () {\n function NoopMetric(instrument) {\n this._instrument = instrument;\n }\n /**\n * Returns a Bound Instrument associated with specified Labels.\n * It is recommended to keep a reference to the Bound Instrument instead of\n * always calling this method for every operations.\n * @param labels key-values pairs that are associated with a specific metric\n * that you want to record.\n */\n NoopMetric.prototype.bind = function (labels) {\n return this._instrument;\n };\n /**\n * Removes the Binding from the metric, if it is present.\n * @param labels key-values pairs that are associated with a specific metric.\n */\n NoopMetric.prototype.unbind = function (labels) {\n return;\n };\n /**\n * Clears all timeseries from the Metric.\n */\n NoopMetric.prototype.clear = function () {\n return;\n };\n return NoopMetric;\n}());\nexports.NoopMetric = NoopMetric;\nvar NoopCounterMetric = /** @class */ (function (_super) {\n __extends(NoopCounterMetric, _super);\n function NoopCounterMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n NoopCounterMetric.prototype.add = function (value, labels) {\n this.bind(labels).add(value);\n };\n return NoopCounterMetric;\n}(NoopMetric));\nexports.NoopCounterMetric = NoopCounterMetric;\nvar NoopValueRecorderMetric = /** @class */ (function (_super) {\n __extends(NoopValueRecorderMetric, _super);\n function NoopValueRecorderMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n NoopValueRecorderMetric.prototype.record = function (value, labels, correlationContext, spanContext) {\n if (typeof correlationContext === 'undefined') {\n this.bind(labels).record(value);\n }\n else if (typeof spanContext === 'undefined') {\n this.bind(labels).record(value, correlationContext);\n }\n else {\n this.bind(labels).record(value, correlationContext, spanContext);\n }\n };\n return NoopValueRecorderMetric;\n}(NoopMetric));\nexports.NoopValueRecorderMetric = NoopValueRecorderMetric;\nvar NoopBaseObserverMetric = /** @class */ (function (_super) {\n __extends(NoopBaseObserverMetric, _super);\n function NoopBaseObserverMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n NoopBaseObserverMetric.prototype.observation = function () {\n return {\n observer: this,\n value: 0,\n };\n };\n return NoopBaseObserverMetric;\n}(NoopMetric));\nexports.NoopBaseObserverMetric = NoopBaseObserverMetric;\nvar NoopBatchObserverMetric = /** @class */ (function (_super) {\n __extends(NoopBatchObserverMetric, _super);\n function NoopBatchObserverMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n return NoopBatchObserverMetric;\n}(NoopMetric));\nexports.NoopBatchObserverMetric = NoopBatchObserverMetric;\nvar NoopBoundCounter = /** @class */ (function () {\n function NoopBoundCounter() {\n }\n NoopBoundCounter.prototype.add = function (value) {\n return;\n };\n return NoopBoundCounter;\n}());\nexports.NoopBoundCounter = NoopBoundCounter;\nvar NoopBoundValueRecorder = /** @class */ (function () {\n function NoopBoundValueRecorder() {\n }\n NoopBoundValueRecorder.prototype.record = function (value, correlationContext, spanContext) {\n return;\n };\n return NoopBoundValueRecorder;\n}());\nexports.NoopBoundValueRecorder = NoopBoundValueRecorder;\nvar NoopBoundBaseObserver = /** @class */ (function () {\n function NoopBoundBaseObserver() {\n }\n NoopBoundBaseObserver.prototype.update = function (value) { };\n return NoopBoundBaseObserver;\n}());\nexports.NoopBoundBaseObserver = NoopBoundBaseObserver;\nexports.NOOP_METER = new NoopMeter();\nexports.NOOP_BOUND_COUNTER = new NoopBoundCounter();\nexports.NOOP_COUNTER_METRIC = new NoopCounterMetric(exports.NOOP_BOUND_COUNTER);\nexports.NOOP_BOUND_VALUE_RECORDER = new NoopBoundValueRecorder();\nexports.NOOP_VALUE_RECORDER_METRIC = new NoopValueRecorderMetric(exports.NOOP_BOUND_VALUE_RECORDER);\nexports.NOOP_BOUND_BASE_OBSERVER = new NoopBoundBaseObserver();\nexports.NOOP_VALUE_OBSERVER_METRIC = new NoopBaseObserverMetric(exports.NOOP_BOUND_BASE_OBSERVER);\nexports.NOOP_UP_DOWN_SUM_OBSERVER_METRIC = new NoopBaseObserverMetric(exports.NOOP_BOUND_BASE_OBSERVER);\nexports.NOOP_SUM_OBSERVER_METRIC = new NoopBaseObserverMetric(exports.NOOP_BOUND_BASE_OBSERVER);\nexports.NOOP_BATCH_OBSERVER_METRIC = new NoopBatchObserverMetric();\n//# sourceMappingURL=NoopMeter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0;\nvar NoopMeter_1 = require(\"./NoopMeter\");\n/**\n * An implementation of the {@link MeterProvider} which returns an impotent Meter\n * for all calls to `getMeter`\n */\nvar NoopMeterProvider = /** @class */ (function () {\n function NoopMeterProvider() {\n }\n NoopMeterProvider.prototype.getMeter = function (_name, _version) {\n return NoopMeter_1.NOOP_METER;\n };\n return NoopMeterProvider;\n}());\nexports.NoopMeterProvider = NoopMeterProvider;\nexports.NOOP_METER_PROVIDER = new NoopMeterProvider();\n//# sourceMappingURL=NoopMeterProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Observation.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=ObserverResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./node\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports._globalThis = void 0;\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexports._globalThis = typeof globalThis === 'object' ? globalThis : global;\n//# sourceMappingURL=globalThis.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./globalThis\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Event.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_SPAN = exports.NoopSpan = exports.INVALID_SPAN_ID = exports.INVALID_TRACE_ID = void 0;\nvar trace_flags_1 = require(\"./trace_flags\");\nexports.INVALID_TRACE_ID = '0';\nexports.INVALID_SPAN_ID = '0';\nvar INVALID_SPAN_CONTEXT = {\n traceId: exports.INVALID_TRACE_ID,\n spanId: exports.INVALID_SPAN_ID,\n traceFlags: trace_flags_1.TraceFlags.NONE,\n};\n/**\n * The NoopSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nvar NoopSpan = /** @class */ (function () {\n function NoopSpan(_spanContext) {\n if (_spanContext === void 0) { _spanContext = INVALID_SPAN_CONTEXT; }\n this._spanContext = _spanContext;\n }\n // Returns a SpanContext.\n NoopSpan.prototype.context = function () {\n return this._spanContext;\n };\n // By default does nothing\n NoopSpan.prototype.setAttribute = function (key, value) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.setAttributes = function (attributes) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.addEvent = function (name, attributes) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.setStatus = function (status) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.updateName = function (name) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.end = function (endTime) { };\n // isRecording always returns false for noopSpan.\n NoopSpan.prototype.isRecording = function () {\n return false;\n };\n return NoopSpan;\n}());\nexports.NoopSpan = NoopSpan;\nexports.NOOP_SPAN = new NoopSpan();\n//# sourceMappingURL=NoopSpan.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TRACER = exports.NoopTracer = void 0;\nvar NoopSpan_1 = require(\"./NoopSpan\");\n/**\n * No-op implementations of {@link Tracer}.\n */\nvar NoopTracer = /** @class */ (function () {\n function NoopTracer() {\n }\n NoopTracer.prototype.getCurrentSpan = function () {\n return NoopSpan_1.NOOP_SPAN;\n };\n // startSpan starts a noop span.\n NoopTracer.prototype.startSpan = function (name, options) {\n return NoopSpan_1.NOOP_SPAN;\n };\n NoopTracer.prototype.withSpan = function (span, fn) {\n return fn();\n };\n NoopTracer.prototype.bind = function (target, span) {\n return target;\n };\n return NoopTracer;\n}());\nexports.NoopTracer = NoopTracer;\nexports.NOOP_TRACER = new NoopTracer();\n//# sourceMappingURL=NoopTracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0;\nvar NoopTracer_1 = require(\"./NoopTracer\");\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nvar NoopTracerProvider = /** @class */ (function () {\n function NoopTracerProvider() {\n }\n NoopTracerProvider.prototype.getTracer = function (_name, _version) {\n return NoopTracer_1.NOOP_TRACER;\n };\n return NoopTracerProvider;\n}());\nexports.NoopTracerProvider = NoopTracerProvider;\nexports.NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n//# sourceMappingURL=NoopTracerProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Sampler.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SamplingDecision = void 0;\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nvar SamplingDecision;\n(function (SamplingDecision) {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n SamplingDecision[SamplingDecision[\"NOT_RECORD\"] = 0] = \"NOT_RECORD\";\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n SamplingDecision[SamplingDecision[\"RECORD\"] = 1] = \"RECORD\";\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n SamplingDecision[SamplingDecision[\"RECORD_AND_SAMPLED\"] = 2] = \"RECORD_AND_SAMPLED\";\n})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {}));\n//# sourceMappingURL=SamplingResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=SpanOptions.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=TimedEvent.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=attributes.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Plugin.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=link.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=link_context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=span.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=span_context.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SpanKind = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar SpanKind;\n(function (SpanKind) {\n /** Default value. Indicates that the span is used internally. */\n SpanKind[SpanKind[\"INTERNAL\"] = 0] = \"INTERNAL\";\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SpanKind[SpanKind[\"SERVER\"] = 1] = \"SERVER\";\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n SpanKind[SpanKind[\"CLIENT\"] = 2] = \"CLIENT\";\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"PRODUCER\"] = 3] = \"PRODUCER\";\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"CONSUMER\"] = 4] = \"CONSUMER\";\n})(SpanKind = exports.SpanKind || (exports.SpanKind = {}));\n//# sourceMappingURL=span_kind.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CanonicalCode = void 0;\n/**\n * An enumeration of canonical status codes.\n */\nvar CanonicalCode;\n(function (CanonicalCode) {\n /**\n * Not an error; returned on success\n */\n CanonicalCode[CanonicalCode[\"OK\"] = 0] = \"OK\";\n /**\n * The operation was cancelled (typically by the caller).\n */\n CanonicalCode[CanonicalCode[\"CANCELLED\"] = 1] = \"CANCELLED\";\n /**\n * Unknown error. An example of where this error may be returned is\n * if a status value received from another address space belongs to\n * an error-space that is not known in this address space. Also\n * errors raised by APIs that do not return enough error information\n * may be converted to this error.\n */\n CanonicalCode[CanonicalCode[\"UNKNOWN\"] = 2] = \"UNKNOWN\";\n /**\n * Client specified an invalid argument. Note that this differs\n * from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments\n * that are problematic regardless of the state of the system\n * (e.g., a malformed file name).\n */\n CanonicalCode[CanonicalCode[\"INVALID_ARGUMENT\"] = 3] = \"INVALID_ARGUMENT\";\n /**\n * Deadline expired before operation could complete. For operations\n * that change the state of the system, this error may be returned\n * even if the operation has completed successfully. For example, a\n * successful response from a server could have been delayed long\n * enough for the deadline to expire.\n */\n CanonicalCode[CanonicalCode[\"DEADLINE_EXCEEDED\"] = 4] = \"DEADLINE_EXCEEDED\";\n /**\n * Some requested entity (e.g., file or directory) was not found.\n */\n CanonicalCode[CanonicalCode[\"NOT_FOUND\"] = 5] = \"NOT_FOUND\";\n /**\n * Some entity that we attempted to create (e.g., file or directory)\n * already exists.\n */\n CanonicalCode[CanonicalCode[\"ALREADY_EXISTS\"] = 6] = \"ALREADY_EXISTS\";\n /**\n * The caller does not have permission to execute the specified\n * operation. PERMISSION_DENIED must not be used for rejections\n * caused by exhausting some resource (use RESOURCE_EXHAUSTED\n * instead for those errors). PERMISSION_DENIED must not be\n * used if the caller can not be identified (use UNAUTHENTICATED\n * instead for those errors).\n */\n CanonicalCode[CanonicalCode[\"PERMISSION_DENIED\"] = 7] = \"PERMISSION_DENIED\";\n /**\n * Some resource has been exhausted, perhaps a per-user quota, or\n * perhaps the entire file system is out of space.\n */\n CanonicalCode[CanonicalCode[\"RESOURCE_EXHAUSTED\"] = 8] = \"RESOURCE_EXHAUSTED\";\n /**\n * Operation was rejected because the system is not in a state\n * required for the operation's execution. For example, directory\n * to be deleted may be non-empty, an rmdir operation is applied to\n * a non-directory, etc.\n *\n * A litmus test that may help a service implementor in deciding\n * between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:\n *\n * - Use UNAVAILABLE if the client can retry just the failing call.\n * - Use ABORTED if the client should retry at a higher-level\n * (e.g., restarting a read-modify-write sequence).\n * - Use FAILED_PRECONDITION if the client should not retry until\n * the system state has been explicitly fixed. E.g., if an \"rmdir\"\n * fails because the directory is non-empty, FAILED_PRECONDITION\n * should be returned since the client should not retry unless\n * they have first fixed up the directory by deleting files from it.\n * - Use FAILED_PRECONDITION if the client performs conditional\n * REST Get/Update/Delete on a resource and the resource on the\n * server does not match the condition. E.g., conflicting\n * read-modify-write on the same resource.\n */\n CanonicalCode[CanonicalCode[\"FAILED_PRECONDITION\"] = 9] = \"FAILED_PRECONDITION\";\n /**\n * The operation was aborted, typically due to a concurrency issue\n * like sequencer check failures, transaction aborts, etc.\n *\n * See litmus test above for deciding between FAILED_PRECONDITION,\n * ABORTED, and UNAVAILABLE.\n */\n CanonicalCode[CanonicalCode[\"ABORTED\"] = 10] = \"ABORTED\";\n /**\n * Operation was attempted past the valid range. E.g., seeking or\n * reading past end of file.\n *\n * Unlike INVALID_ARGUMENT, this error indicates a problem that may\n * be fixed if the system state changes. For example, a 32-bit file\n * system will generate INVALID_ARGUMENT if asked to read at an\n * offset that is not in the range [0,2^32-1], but it will generate\n * OUT_OF_RANGE if asked to read from an offset past the current\n * file size.\n *\n * There is a fair bit of overlap between FAILED_PRECONDITION and\n * OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific\n * error) when it applies so that callers who are iterating through\n * a space can easily look for an OUT_OF_RANGE error to detect when\n * they are done.\n */\n CanonicalCode[CanonicalCode[\"OUT_OF_RANGE\"] = 11] = \"OUT_OF_RANGE\";\n /**\n * Operation is not implemented or not supported/enabled in this service.\n */\n CanonicalCode[CanonicalCode[\"UNIMPLEMENTED\"] = 12] = \"UNIMPLEMENTED\";\n /**\n * Internal errors. Means some invariants expected by underlying\n * system has been broken. If you see one of these errors,\n * something is very broken.\n */\n CanonicalCode[CanonicalCode[\"INTERNAL\"] = 13] = \"INTERNAL\";\n /**\n * The service is currently unavailable. This is a most likely a\n * transient condition and may be corrected by retrying with\n * a backoff.\n *\n * See litmus test above for deciding between FAILED_PRECONDITION,\n * ABORTED, and UNAVAILABLE.\n */\n CanonicalCode[CanonicalCode[\"UNAVAILABLE\"] = 14] = \"UNAVAILABLE\";\n /**\n * Unrecoverable data loss or corruption.\n */\n CanonicalCode[CanonicalCode[\"DATA_LOSS\"] = 15] = \"DATA_LOSS\";\n /**\n * The request does not have valid authentication credentials for the\n * operation.\n */\n CanonicalCode[CanonicalCode[\"UNAUTHENTICATED\"] = 16] = \"UNAUTHENTICATED\";\n})(CanonicalCode = exports.CanonicalCode || (exports.CanonicalCode = {}));\n//# sourceMappingURL=status.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TraceFlags = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar TraceFlags;\n(function (TraceFlags) {\n /** Represents no flag set. */\n TraceFlags[TraceFlags[\"NONE\"] = 0] = \"NONE\";\n /** Bit to represent whether trace is sampled in trace flags. */\n TraceFlags[TraceFlags[\"SAMPLED\"] = 1] = \"SAMPLED\";\n})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {}));\n//# sourceMappingURL=trace_flags.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=trace_state.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=tracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=tracer_provider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NoopContextManager = void 0;\nvar context_1 = require(\"./context\");\nvar NoopContextManager = /** @class */ (function () {\n function NoopContextManager() {\n }\n NoopContextManager.prototype.active = function () {\n return context_1.Context.ROOT_CONTEXT;\n };\n NoopContextManager.prototype.with = function (context, fn) {\n return fn();\n };\n NoopContextManager.prototype.bind = function (target, context) {\n return target;\n };\n NoopContextManager.prototype.enable = function () {\n return this;\n };\n NoopContextManager.prototype.disable = function () {\n return this;\n };\n return NoopContextManager;\n}());\nexports.NoopContextManager = NoopContextManager;\n//# sourceMappingURL=NoopContextManager.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar Context = /** @class */ (function () {\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n function Context(parentContext) {\n this._currentContext = parentContext ? new Map(parentContext) : new Map();\n }\n /** Get a key to uniquely identify a context value */\n Context.createKey = function (description) {\n return Symbol(description);\n };\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n Context.prototype.getValue = function (key) {\n return this._currentContext.get(key);\n };\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n Context.prototype.setValue = function (key, value) {\n var context = new Context(this._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n Context.prototype.deleteValue = function (key) {\n var context = new Context(this._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n /** The root context is used as the default parent context when there is no active context */\n Context.ROOT_CONTEXT = new Context();\n /**\n * This is another identifier to the root context which allows developers to easily search the\n * codebase for direct uses of context which need to be removed in later PRs.\n *\n * It's existence is temporary and it should be removed when all references are fixed.\n */\n Context.TODO = Context.ROOT_CONTEXT;\n return Context;\n}());\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./types\"), exports);\n__exportStar(require(\"./context\"), exports);\n__exportStar(require(\"./NoopContextManager\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=types.js.map","module.exports =\n{\n parallel : require('./parallel.js'),\n serial : require('./serial.js'),\n serialOrdered : require('./serialOrdered.js')\n};\n","// API\nmodule.exports = abort;\n\n/**\n * Aborts leftover active jobs\n *\n * @param {object} state - current state object\n */\nfunction abort(state)\n{\n Object.keys(state.jobs).forEach(clean.bind(state));\n\n // reset leftover jobs\n state.jobs = {};\n}\n\n/**\n * Cleans up leftover job by invoking abort function for the provided job id\n *\n * @this state\n * @param {string|number} key - job id to abort\n */\nfunction clean(key)\n{\n if (typeof this.jobs[key] == 'function')\n {\n this.jobs[key]();\n }\n}\n","var defer = require('./defer.js');\n\n// API\nmodule.exports = async;\n\n/**\n * Runs provided callback asynchronously\n * even if callback itself is not\n *\n * @param {function} callback - callback to invoke\n * @returns {function} - augmented callback\n */\nfunction async(callback)\n{\n var isAsync = false;\n\n // check if async happened\n defer(function() { isAsync = true; });\n\n return function async_callback(err, result)\n {\n if (isAsync)\n {\n callback(err, result);\n }\n else\n {\n defer(function nextTick_callback()\n {\n callback(err, result);\n });\n }\n };\n}\n","module.exports = defer;\n\n/**\n * Runs provided function on next iteration of the event loop\n *\n * @param {function} fn - function to run\n */\nfunction defer(fn)\n{\n var nextTick = typeof setImmediate == 'function'\n ? setImmediate\n : (\n typeof process == 'object' && typeof process.nextTick == 'function'\n ? process.nextTick\n : null\n );\n\n if (nextTick)\n {\n nextTick(fn);\n }\n else\n {\n setTimeout(fn, 0);\n }\n}\n","var async = require('./async.js')\n , abort = require('./abort.js')\n ;\n\n// API\nmodule.exports = iterate;\n\n/**\n * Iterates over each job object\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {object} state - current job status\n * @param {function} callback - invoked when all elements processed\n */\nfunction iterate(list, iterator, state, callback)\n{\n // store current index\n var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;\n\n state.jobs[key] = runJob(iterator, key, list[key], function(error, output)\n {\n // don't repeat yourself\n // skip secondary callbacks\n if (!(key in state.jobs))\n {\n return;\n }\n\n // clean up jobs\n delete state.jobs[key];\n\n if (error)\n {\n // don't process rest of the results\n // stop still active jobs\n // and reset the list\n abort(state);\n }\n else\n {\n state.results[key] = output;\n }\n\n // return salvaged results\n callback(error, state.results);\n });\n}\n\n/**\n * Runs iterator over provided job element\n *\n * @param {function} iterator - iterator to invoke\n * @param {string|number} key - key/index of the element in the list of jobs\n * @param {mixed} item - job description\n * @param {function} callback - invoked after iterator is done with the job\n * @returns {function|mixed} - job abort function or something else\n */\nfunction runJob(iterator, key, item, callback)\n{\n var aborter;\n\n // allow shortcut if iterator expects only two arguments\n if (iterator.length == 2)\n {\n aborter = iterator(item, async(callback));\n }\n // otherwise go with full three arguments\n else\n {\n aborter = iterator(item, key, async(callback));\n }\n\n return aborter;\n}\n","// API\nmodule.exports = state;\n\n/**\n * Creates initial state object\n * for iteration over list\n *\n * @param {array|object} list - list to iterate over\n * @param {function|null} sortMethod - function to use for keys sort,\n * or `null` to keep them as is\n * @returns {object} - initial state object\n */\nfunction state(list, sortMethod)\n{\n var isNamedList = !Array.isArray(list)\n , initState =\n {\n index : 0,\n keyedList: isNamedList || sortMethod ? Object.keys(list) : null,\n jobs : {},\n results : isNamedList ? {} : [],\n size : isNamedList ? Object.keys(list).length : list.length\n }\n ;\n\n if (sortMethod)\n {\n // sort array keys based on it's values\n // sort object's keys just on own merit\n initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)\n {\n return sortMethod(list[a], list[b]);\n });\n }\n\n return initState;\n}\n","var abort = require('./abort.js')\n , async = require('./async.js')\n ;\n\n// API\nmodule.exports = terminator;\n\n/**\n * Terminates jobs in the attached state context\n *\n * @this AsyncKitState#\n * @param {function} callback - final callback to invoke after termination\n */\nfunction terminator(callback)\n{\n if (!Object.keys(this.jobs).length)\n {\n return;\n }\n\n // fast forward iteration index\n this.index = this.size;\n\n // abort jobs\n abort(this);\n\n // send back results we have so far\n async(callback)(null, this.results);\n}\n","var iterate = require('./lib/iterate.js')\n , initState = require('./lib/state.js')\n , terminator = require('./lib/terminator.js')\n ;\n\n// Public API\nmodule.exports = parallel;\n\n/**\n * Runs iterator over provided array elements in parallel\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction parallel(list, iterator, callback)\n{\n var state = initState(list);\n\n while (state.index < (state['keyedList'] || list).length)\n {\n iterate(list, iterator, state, function(error, result)\n {\n if (error)\n {\n callback(error, result);\n return;\n }\n\n // looks like it's the last one\n if (Object.keys(state.jobs).length === 0)\n {\n callback(null, state.results);\n return;\n }\n });\n\n state.index++;\n }\n\n return terminator.bind(state, callback);\n}\n","var serialOrdered = require('./serialOrdered.js');\n\n// Public API\nmodule.exports = serial;\n\n/**\n * Runs iterator over provided array elements in series\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction serial(list, iterator, callback)\n{\n return serialOrdered(list, iterator, null, callback);\n}\n","var iterate = require('./lib/iterate.js')\n , initState = require('./lib/state.js')\n , terminator = require('./lib/terminator.js')\n ;\n\n// Public API\nmodule.exports = serialOrdered;\n// sorting helpers\nmodule.exports.ascending = ascending;\nmodule.exports.descending = descending;\n\n/**\n * Runs iterator over provided sorted array elements in series\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} sortMethod - custom sort function\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction serialOrdered(list, iterator, sortMethod, callback)\n{\n var state = initState(list, sortMethod);\n\n iterate(list, iterator, state, function iteratorHandler(error, result)\n {\n if (error)\n {\n callback(error, result);\n return;\n }\n\n state.index++;\n\n // are we there yet?\n if (state.index < (state['keyedList'] || list).length)\n {\n iterate(list, iterator, state, iteratorHandler);\n return;\n }\n\n // done here\n callback(null, state.results);\n });\n\n return terminator.bind(state, callback);\n}\n\n/*\n * -- Sort methods\n */\n\n/**\n * sort helper to sort array elements in ascending order\n *\n * @param {mixed} a - an item to compare\n * @param {mixed} b - an item to compare\n * @returns {number} - comparison result\n */\nfunction ascending(a, b)\n{\n return a < b ? -1 : a > b ? 1 : 0;\n}\n\n/**\n * sort helper to sort array elements in descending order\n *\n * @param {mixed} a - an item to compare\n * @param {mixed} b - an item to compare\n * @returns {number} - comparison result\n */\nfunction descending(a, b)\n{\n return -1 * ascending(a, b);\n}\n","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var concatMap = require('concat-map');\nvar balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction identity(e) {\n return e;\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m || /\\$$/.test(m.pre)) return [str];\n\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = concatMap(n, function(el) { return expand(el, false) });\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n\n return expansions;\n}\n\n","var util = require('util');\nvar Stream = require('stream').Stream;\nvar DelayedStream = require('delayed-stream');\n\nmodule.exports = CombinedStream;\nfunction CombinedStream() {\n this.writable = false;\n this.readable = true;\n this.dataSize = 0;\n this.maxDataSize = 2 * 1024 * 1024;\n this.pauseStreams = true;\n\n this._released = false;\n this._streams = [];\n this._currentStream = null;\n this._insideLoop = false;\n this._pendingNext = false;\n}\nutil.inherits(CombinedStream, Stream);\n\nCombinedStream.create = function(options) {\n var combinedStream = new this();\n\n options = options || {};\n for (var option in options) {\n combinedStream[option] = options[option];\n }\n\n return combinedStream;\n};\n\nCombinedStream.isStreamLike = function(stream) {\n return (typeof stream !== 'function')\n && (typeof stream !== 'string')\n && (typeof stream !== 'boolean')\n && (typeof stream !== 'number')\n && (!Buffer.isBuffer(stream));\n};\n\nCombinedStream.prototype.append = function(stream) {\n var isStreamLike = CombinedStream.isStreamLike(stream);\n\n if (isStreamLike) {\n if (!(stream instanceof DelayedStream)) {\n var newStream = DelayedStream.create(stream, {\n maxDataSize: Infinity,\n pauseStream: this.pauseStreams,\n });\n stream.on('data', this._checkDataSize.bind(this));\n stream = newStream;\n }\n\n this._handleErrors(stream);\n\n if (this.pauseStreams) {\n stream.pause();\n }\n }\n\n this._streams.push(stream);\n return this;\n};\n\nCombinedStream.prototype.pipe = function(dest, options) {\n Stream.prototype.pipe.call(this, dest, options);\n this.resume();\n return dest;\n};\n\nCombinedStream.prototype._getNext = function() {\n this._currentStream = null;\n\n if (this._insideLoop) {\n this._pendingNext = true;\n return; // defer call\n }\n\n this._insideLoop = true;\n try {\n do {\n this._pendingNext = false;\n this._realGetNext();\n } while (this._pendingNext);\n } finally {\n this._insideLoop = false;\n }\n};\n\nCombinedStream.prototype._realGetNext = function() {\n var stream = this._streams.shift();\n\n\n if (typeof stream == 'undefined') {\n this.end();\n return;\n }\n\n if (typeof stream !== 'function') {\n this._pipeNext(stream);\n return;\n }\n\n var getStream = stream;\n getStream(function(stream) {\n var isStreamLike = CombinedStream.isStreamLike(stream);\n if (isStreamLike) {\n stream.on('data', this._checkDataSize.bind(this));\n this._handleErrors(stream);\n }\n\n this._pipeNext(stream);\n }.bind(this));\n};\n\nCombinedStream.prototype._pipeNext = function(stream) {\n this._currentStream = stream;\n\n var isStreamLike = CombinedStream.isStreamLike(stream);\n if (isStreamLike) {\n stream.on('end', this._getNext.bind(this));\n stream.pipe(this, {end: false});\n return;\n }\n\n var value = stream;\n this.write(value);\n this._getNext();\n};\n\nCombinedStream.prototype._handleErrors = function(stream) {\n var self = this;\n stream.on('error', function(err) {\n self._emitError(err);\n });\n};\n\nCombinedStream.prototype.write = function(data) {\n this.emit('data', data);\n};\n\nCombinedStream.prototype.pause = function() {\n if (!this.pauseStreams) {\n return;\n }\n\n if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause();\n this.emit('pause');\n};\n\nCombinedStream.prototype.resume = function() {\n if (!this._released) {\n this._released = true;\n this.writable = true;\n this._getNext();\n }\n\n if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume();\n this.emit('resume');\n};\n\nCombinedStream.prototype.end = function() {\n this._reset();\n this.emit('end');\n};\n\nCombinedStream.prototype.destroy = function() {\n this._reset();\n this.emit('close');\n};\n\nCombinedStream.prototype._reset = function() {\n this.writable = false;\n this._streams = [];\n this._currentStream = null;\n};\n\nCombinedStream.prototype._checkDataSize = function() {\n this._updateDataSize();\n if (this.dataSize <= this.maxDataSize) {\n return;\n }\n\n var message =\n 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.';\n this._emitError(new Error(message));\n};\n\nCombinedStream.prototype._updateDataSize = function() {\n this.dataSize = 0;\n\n var self = this;\n this._streams.forEach(function(stream) {\n if (!stream.dataSize) {\n return;\n }\n\n self.dataSize += stream.dataSize;\n });\n\n if (this._currentStream && this._currentStream.dataSize) {\n this.dataSize += this._currentStream.dataSize;\n }\n};\n\nCombinedStream.prototype._emitError = function(err) {\n this._reset();\n this.emit('error', err);\n};\n","module.exports = function (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n var x = fn(xs[i], i);\n if (isArray(x)) res.push.apply(res, x);\n else res.push(x);\n }\n return res;\n};\n\nvar isArray = Array.isArray || function (xs) {\n return Object.prototype.toString.call(xs) === '[object Array]';\n};\n","var Stream = require('stream').Stream;\nvar util = require('util');\n\nmodule.exports = DelayedStream;\nfunction DelayedStream() {\n this.source = null;\n this.dataSize = 0;\n this.maxDataSize = 1024 * 1024;\n this.pauseStream = true;\n\n this._maxDataSizeExceeded = false;\n this._released = false;\n this._bufferedEvents = [];\n}\nutil.inherits(DelayedStream, Stream);\n\nDelayedStream.create = function(source, options) {\n var delayedStream = new this();\n\n options = options || {};\n for (var option in options) {\n delayedStream[option] = options[option];\n }\n\n delayedStream.source = source;\n\n var realEmit = source.emit;\n source.emit = function() {\n delayedStream._handleEmit(arguments);\n return realEmit.apply(source, arguments);\n };\n\n source.on('error', function() {});\n if (delayedStream.pauseStream) {\n source.pause();\n }\n\n return delayedStream;\n};\n\nObject.defineProperty(DelayedStream.prototype, 'readable', {\n configurable: true,\n enumerable: true,\n get: function() {\n return this.source.readable;\n }\n});\n\nDelayedStream.prototype.setEncoding = function() {\n return this.source.setEncoding.apply(this.source, arguments);\n};\n\nDelayedStream.prototype.resume = function() {\n if (!this._released) {\n this.release();\n }\n\n this.source.resume();\n};\n\nDelayedStream.prototype.pause = function() {\n this.source.pause();\n};\n\nDelayedStream.prototype.release = function() {\n this._released = true;\n\n this._bufferedEvents.forEach(function(args) {\n this.emit.apply(this, args);\n }.bind(this));\n this._bufferedEvents = [];\n};\n\nDelayedStream.prototype.pipe = function() {\n var r = Stream.prototype.pipe.apply(this, arguments);\n this.resume();\n return r;\n};\n\nDelayedStream.prototype._handleEmit = function(args) {\n if (this._released) {\n this.emit.apply(this, args);\n return;\n }\n\n if (args[0] === 'data') {\n this.dataSize += args[1].length;\n this._checkIfMaxDataSizeExceeded();\n }\n\n this._bufferedEvents.push(args);\n};\n\nDelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {\n if (this._maxDataSizeExceeded) {\n return;\n }\n\n if (this.dataSize <= this.maxDataSize) {\n return;\n }\n\n this._maxDataSizeExceeded = true;\n var message =\n 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'\n this.emit('error', new Error(message));\n};\n","'use strict'\n\n// A linked list to keep track of recently-used-ness\nconst Yallist = require('yallist')\n\nconst MAX = Symbol('max')\nconst LENGTH = Symbol('length')\nconst LENGTH_CALCULATOR = Symbol('lengthCalculator')\nconst ALLOW_STALE = Symbol('allowStale')\nconst MAX_AGE = Symbol('maxAge')\nconst DISPOSE = Symbol('dispose')\nconst NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')\nconst LRU_LIST = Symbol('lruList')\nconst CACHE = Symbol('cache')\nconst UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')\n\nconst naiveLength = () => 1\n\n// lruList is a yallist where the head is the youngest\n// item, and the tail is the oldest. the list contains the Hit\n// objects as the entries.\n// Each Hit object has a reference to its Yallist.Node. This\n// never changes.\n//\n// cache is a Map (or PseudoMap) that matches the keys to\n// the Yallist.Node object.\nclass LRUCache {\n constructor (options) {\n if (typeof options === 'number')\n options = { max: options }\n\n if (!options)\n options = {}\n\n if (options.max && (typeof options.max !== 'number' || options.max < 0))\n throw new TypeError('max must be a non-negative number')\n // Kind of weird to have a default max of Infinity, but oh well.\n const max = this[MAX] = options.max || Infinity\n\n const lc = options.length || naiveLength\n this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc\n this[ALLOW_STALE] = options.stale || false\n if (options.maxAge && typeof options.maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n this[MAX_AGE] = options.maxAge || 0\n this[DISPOSE] = options.dispose\n this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false\n this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false\n this.reset()\n }\n\n // resize the cache when the max changes.\n set max (mL) {\n if (typeof mL !== 'number' || mL < 0)\n throw new TypeError('max must be a non-negative number')\n\n this[MAX] = mL || Infinity\n trim(this)\n }\n get max () {\n return this[MAX]\n }\n\n set allowStale (allowStale) {\n this[ALLOW_STALE] = !!allowStale\n }\n get allowStale () {\n return this[ALLOW_STALE]\n }\n\n set maxAge (mA) {\n if (typeof mA !== 'number')\n throw new TypeError('maxAge must be a non-negative number')\n\n this[MAX_AGE] = mA\n trim(this)\n }\n get maxAge () {\n return this[MAX_AGE]\n }\n\n // resize the cache when the lengthCalculator changes.\n set lengthCalculator (lC) {\n if (typeof lC !== 'function')\n lC = naiveLength\n\n if (lC !== this[LENGTH_CALCULATOR]) {\n this[LENGTH_CALCULATOR] = lC\n this[LENGTH] = 0\n this[LRU_LIST].forEach(hit => {\n hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)\n this[LENGTH] += hit.length\n })\n }\n trim(this)\n }\n get lengthCalculator () { return this[LENGTH_CALCULATOR] }\n\n get length () { return this[LENGTH] }\n get itemCount () { return this[LRU_LIST].length }\n\n rforEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].tail; walker !== null;) {\n const prev = walker.prev\n forEachStep(this, fn, walker, thisp)\n walker = prev\n }\n }\n\n forEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].head; walker !== null;) {\n const next = walker.next\n forEachStep(this, fn, walker, thisp)\n walker = next\n }\n }\n\n keys () {\n return this[LRU_LIST].toArray().map(k => k.key)\n }\n\n values () {\n return this[LRU_LIST].toArray().map(k => k.value)\n }\n\n reset () {\n if (this[DISPOSE] &&\n this[LRU_LIST] &&\n this[LRU_LIST].length) {\n this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))\n }\n\n this[CACHE] = new Map() // hash of items by key\n this[LRU_LIST] = new Yallist() // list of items in order of use recency\n this[LENGTH] = 0 // length of items in the list\n }\n\n dump () {\n return this[LRU_LIST].map(hit =>\n isStale(this, hit) ? false : {\n k: hit.key,\n v: hit.value,\n e: hit.now + (hit.maxAge || 0)\n }).toArray().filter(h => h)\n }\n\n dumpLru () {\n return this[LRU_LIST]\n }\n\n set (key, value, maxAge) {\n maxAge = maxAge || this[MAX_AGE]\n\n if (maxAge && typeof maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n\n const now = maxAge ? Date.now() : 0\n const len = this[LENGTH_CALCULATOR](value, key)\n\n if (this[CACHE].has(key)) {\n if (len > this[MAX]) {\n del(this, this[CACHE].get(key))\n return false\n }\n\n const node = this[CACHE].get(key)\n const item = node.value\n\n // dispose of the old one before overwriting\n // split out into 2 ifs for better coverage tracking\n if (this[DISPOSE]) {\n if (!this[NO_DISPOSE_ON_SET])\n this[DISPOSE](key, item.value)\n }\n\n item.now = now\n item.maxAge = maxAge\n item.value = value\n this[LENGTH] += len - item.length\n item.length = len\n this.get(key)\n trim(this)\n return true\n }\n\n const hit = new Entry(key, value, len, now, maxAge)\n\n // oversized objects fall out of cache automatically.\n if (hit.length > this[MAX]) {\n if (this[DISPOSE])\n this[DISPOSE](key, value)\n\n return false\n }\n\n this[LENGTH] += hit.length\n this[LRU_LIST].unshift(hit)\n this[CACHE].set(key, this[LRU_LIST].head)\n trim(this)\n return true\n }\n\n has (key) {\n if (!this[CACHE].has(key)) return false\n const hit = this[CACHE].get(key).value\n return !isStale(this, hit)\n }\n\n get (key) {\n return get(this, key, true)\n }\n\n peek (key) {\n return get(this, key, false)\n }\n\n pop () {\n const node = this[LRU_LIST].tail\n if (!node)\n return null\n\n del(this, node)\n return node.value\n }\n\n del (key) {\n del(this, this[CACHE].get(key))\n }\n\n load (arr) {\n // reset the cache\n this.reset()\n\n const now = Date.now()\n // A previous serialized cache has the most recent items first\n for (let l = arr.length - 1; l >= 0; l--) {\n const hit = arr[l]\n const expiresAt = hit.e || 0\n if (expiresAt === 0)\n // the item was created without expiration in a non aged cache\n this.set(hit.k, hit.v)\n else {\n const maxAge = expiresAt - now\n // dont add already expired items\n if (maxAge > 0) {\n this.set(hit.k, hit.v, maxAge)\n }\n }\n }\n }\n\n prune () {\n this[CACHE].forEach((value, key) => get(this, key, false))\n }\n}\n\nconst get = (self, key, doUse) => {\n const node = self[CACHE].get(key)\n if (node) {\n const hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n return undefined\n } else {\n if (doUse) {\n if (self[UPDATE_AGE_ON_GET])\n node.value.now = Date.now()\n self[LRU_LIST].unshiftNode(node)\n }\n }\n return hit.value\n }\n}\n\nconst isStale = (self, hit) => {\n if (!hit || (!hit.maxAge && !self[MAX_AGE]))\n return false\n\n const diff = Date.now() - hit.now\n return hit.maxAge ? diff > hit.maxAge\n : self[MAX_AGE] && (diff > self[MAX_AGE])\n}\n\nconst trim = self => {\n if (self[LENGTH] > self[MAX]) {\n for (let walker = self[LRU_LIST].tail;\n self[LENGTH] > self[MAX] && walker !== null;) {\n // We know that we're about to delete this one, and also\n // what the next least recently used key will be, so just\n // go ahead and set it now.\n const prev = walker.prev\n del(self, walker)\n walker = prev\n }\n }\n}\n\nconst del = (self, node) => {\n if (node) {\n const hit = node.value\n if (self[DISPOSE])\n self[DISPOSE](hit.key, hit.value)\n\n self[LENGTH] -= hit.length\n self[CACHE].delete(hit.key)\n self[LRU_LIST].removeNode(node)\n }\n}\n\nclass Entry {\n constructor (key, value, length, now, maxAge) {\n this.key = key\n this.value = value\n this.length = length\n this.now = now\n this.maxAge = maxAge || 0\n }\n}\n\nconst forEachStep = (self, fn, node, thisp) => {\n let hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n hit = undefined\n }\n if (hit)\n fn.call(thisp, hit.value, hit.key, self)\n}\n\nmodule.exports = LRUCache\n","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","module.exports = minimatch\nminimatch.Minimatch = Minimatch\n\nvar path = { sep: '/' }\ntry {\n path = require('path')\n} catch (er) {}\n\nvar GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}\nvar expand = require('brace-expansion')\n\nvar plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' }\n}\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nvar qmark = '[^/]'\n\n// * => any number of characters\nvar star = qmark + '*?'\n\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nvar twoStarDot = '(?:(?!(?:\\\\\\/|^)(?:\\\\.{1,2})($|\\\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nvar twoStarNoDot = '(?:(?!(?:\\\\\\/|^)\\\\.).)*?'\n\n// characters that need to be escaped in RegExp.\nvar reSpecials = charSet('().*{}+?[]^$\\\\!')\n\n// \"abc\" -> { a:true, b:true, c:true }\nfunction charSet (s) {\n return s.split('').reduce(function (set, c) {\n set[c] = true\n return set\n }, {})\n}\n\n// normalizes slashes.\nvar slashSplit = /\\/+/\n\nminimatch.filter = filter\nfunction filter (pattern, options) {\n options = options || {}\n return function (p, i, list) {\n return minimatch(p, pattern, options)\n }\n}\n\nfunction ext (a, b) {\n a = a || {}\n b = b || {}\n var t = {}\n Object.keys(b).forEach(function (k) {\n t[k] = b[k]\n })\n Object.keys(a).forEach(function (k) {\n t[k] = a[k]\n })\n return t\n}\n\nminimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return minimatch\n\n var orig = minimatch\n\n var m = function minimatch (p, pattern, options) {\n return orig.minimatch(p, pattern, ext(def, options))\n }\n\n m.Minimatch = function Minimatch (pattern, options) {\n return new orig.Minimatch(pattern, ext(def, options))\n }\n\n return m\n}\n\nMinimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return Minimatch\n return minimatch.defaults(def).Minimatch\n}\n\nfunction minimatch (p, pattern, options) {\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false\n }\n\n // \"\" only matches \"\"\n if (pattern.trim() === '') return p === ''\n\n return new Minimatch(pattern, options).match(p)\n}\n\nfunction Minimatch (pattern, options) {\n if (!(this instanceof Minimatch)) {\n return new Minimatch(pattern, options)\n }\n\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n pattern = pattern.trim()\n\n // windows support: need to use /, not \\\n if (path.sep !== '/') {\n pattern = pattern.split(path.sep).join('/')\n }\n\n this.options = options\n this.set = []\n this.pattern = pattern\n this.regexp = null\n this.negate = false\n this.comment = false\n this.empty = false\n\n // make the set of regexps etc.\n this.make()\n}\n\nMinimatch.prototype.debug = function () {}\n\nMinimatch.prototype.make = make\nfunction make () {\n // don't do it more than once.\n if (this._made) return\n\n var pattern = this.pattern\n var options = this.options\n\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true\n return\n }\n if (!pattern) {\n this.empty = true\n return\n }\n\n // step 1: figure out negation, etc.\n this.parseNegate()\n\n // step 2: expand braces\n var set = this.globSet = this.braceExpand()\n\n if (options.debug) this.debug = console.error\n\n this.debug(this.pattern, set)\n\n // step 3: now we have a set, so turn each one into a series of path-portion\n // matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n set = this.globParts = set.map(function (s) {\n return s.split(slashSplit)\n })\n\n this.debug(this.pattern, set)\n\n // glob --> regexps\n set = set.map(function (s, si, set) {\n return s.map(this.parse, this)\n }, this)\n\n this.debug(this.pattern, set)\n\n // filter out everything that didn't compile properly.\n set = set.filter(function (s) {\n return s.indexOf(false) === -1\n })\n\n this.debug(this.pattern, set)\n\n this.set = set\n}\n\nMinimatch.prototype.parseNegate = parseNegate\nfunction parseNegate () {\n var pattern = this.pattern\n var negate = false\n var options = this.options\n var negateOffset = 0\n\n if (options.nonegate) return\n\n for (var i = 0, l = pattern.length\n ; i < l && pattern.charAt(i) === '!'\n ; i++) {\n negate = !negate\n negateOffset++\n }\n\n if (negateOffset) this.pattern = pattern.substr(negateOffset)\n this.negate = negate\n}\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nminimatch.braceExpand = function (pattern, options) {\n return braceExpand(pattern, options)\n}\n\nMinimatch.prototype.braceExpand = braceExpand\n\nfunction braceExpand (pattern, options) {\n if (!options) {\n if (this instanceof Minimatch) {\n options = this.options\n } else {\n options = {}\n }\n }\n\n pattern = typeof pattern === 'undefined'\n ? this.pattern : pattern\n\n if (typeof pattern === 'undefined') {\n throw new TypeError('undefined pattern')\n }\n\n if (options.nobrace ||\n !pattern.match(/\\{.*\\}/)) {\n // shortcut. no need to expand.\n return [pattern]\n }\n\n return expand(pattern)\n}\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nMinimatch.prototype.parse = parse\nvar SUBPARSE = {}\nfunction parse (pattern, isSub) {\n if (pattern.length > 1024 * 64) {\n throw new TypeError('pattern is too long')\n }\n\n var options = this.options\n\n // shortcuts\n if (!options.noglobstar && pattern === '**') return GLOBSTAR\n if (pattern === '') return ''\n\n var re = ''\n var hasMagic = !!options.nocase\n var escaping = false\n // ? => one single character\n var patternListStack = []\n var negativeLists = []\n var stateChar\n var inClass = false\n var reClassStart = -1\n var classStart = -1\n // . and .. never match anything that doesn't start with .,\n // even when options.dot is set.\n var patternStart = pattern.charAt(0) === '.' ? '' // anything\n // not (start or / followed by . or .. followed by / or end)\n : options.dot ? '(?!(?:^|\\\\\\/)\\\\.{1,2}(?:$|\\\\\\/))'\n : '(?!\\\\.)'\n var self = this\n\n function clearStateChar () {\n if (stateChar) {\n // we had some state-tracking character\n // that wasn't consumed by this pass.\n switch (stateChar) {\n case '*':\n re += star\n hasMagic = true\n break\n case '?':\n re += qmark\n hasMagic = true\n break\n default:\n re += '\\\\' + stateChar\n break\n }\n self.debug('clearStateChar %j %j', stateChar, re)\n stateChar = false\n }\n }\n\n for (var i = 0, len = pattern.length, c\n ; (i < len) && (c = pattern.charAt(i))\n ; i++) {\n this.debug('%s\\t%s %s %j', pattern, i, re, c)\n\n // skip over any that are escaped.\n if (escaping && reSpecials[c]) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n switch (c) {\n case '/':\n // completely not allowed, even escaped.\n // Should already be path-split by now.\n return false\n\n case '\\\\':\n clearStateChar()\n escaping = true\n continue\n\n // the various stateChar values\n // for the \"extglob\" stuff.\n case '?':\n case '*':\n case '+':\n case '@':\n case '!':\n this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c)\n\n // all of those are literals inside a class, except that\n // the glob [!a] means [^a] in regexp\n if (inClass) {\n this.debug(' in class')\n if (c === '!' && i === classStart + 1) c = '^'\n re += c\n continue\n }\n\n // if we already have a stateChar, then it means\n // that there was something like ** or +? in there.\n // Handle the stateChar, then proceed with this one.\n self.debug('call clearStateChar %j', stateChar)\n clearStateChar()\n stateChar = c\n // if extglob is disabled, then +(asdf|foo) isn't a thing.\n // just clear the statechar *now*, rather than even diving into\n // the patternList stuff.\n if (options.noext) clearStateChar()\n continue\n\n case '(':\n if (inClass) {\n re += '('\n continue\n }\n\n if (!stateChar) {\n re += '\\\\('\n continue\n }\n\n patternListStack.push({\n type: stateChar,\n start: i - 1,\n reStart: re.length,\n open: plTypes[stateChar].open,\n close: plTypes[stateChar].close\n })\n // negation is (?:(?!js)[^/]*)\n re += stateChar === '!' ? '(?:(?!(?:' : '(?:'\n this.debug('plType %j %j', stateChar, re)\n stateChar = false\n continue\n\n case ')':\n if (inClass || !patternListStack.length) {\n re += '\\\\)'\n continue\n }\n\n clearStateChar()\n hasMagic = true\n var pl = patternListStack.pop()\n // negation is (?:(?!js)[^/]*)\n // The others are (?:)\n re += pl.close\n if (pl.type === '!') {\n negativeLists.push(pl)\n }\n pl.reEnd = re.length\n continue\n\n case '|':\n if (inClass || !patternListStack.length || escaping) {\n re += '\\\\|'\n escaping = false\n continue\n }\n\n clearStateChar()\n re += '|'\n continue\n\n // these are mostly the same in regexp and glob\n case '[':\n // swallow any state-tracking char before the [\n clearStateChar()\n\n if (inClass) {\n re += '\\\\' + c\n continue\n }\n\n inClass = true\n classStart = i\n reClassStart = re.length\n re += c\n continue\n\n case ']':\n // a right bracket shall lose its special\n // meaning and represent itself in\n // a bracket expression if it occurs\n // first in the list. -- POSIX.2 2.8.3.2\n if (i === classStart + 1 || !inClass) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n // handle the case where we left a class open.\n // \"[z-a]\" is valid, equivalent to \"\\[z-a\\]\"\n if (inClass) {\n // split where the last [ was, make sure we don't have\n // an invalid re. if so, re-walk the contents of the\n // would-be class to re-translate any characters that\n // were passed through as-is\n // TODO: It would probably be faster to determine this\n // without a try/catch and a new RegExp, but it's tricky\n // to do safely. For now, this is safe and works.\n var cs = pattern.substring(classStart + 1, i)\n try {\n RegExp('[' + cs + ']')\n } catch (er) {\n // not a valid class!\n var sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0] + '\\\\]'\n hasMagic = hasMagic || sp[1]\n inClass = false\n continue\n }\n }\n\n // finish up the class.\n hasMagic = true\n inClass = false\n re += c\n continue\n\n default:\n // swallow any state char that wasn't consumed\n clearStateChar()\n\n if (escaping) {\n // no need\n escaping = false\n } else if (reSpecials[c]\n && !(c === '^' && inClass)) {\n re += '\\\\'\n }\n\n re += c\n\n } // switch\n } // for\n\n // handle the case where we left a class open.\n // \"[abc\" is valid, equivalent to \"\\[abc\"\n if (inClass) {\n // split where the last [ was, and escape it\n // this is a huge pita. We now have to re-walk\n // the contents of the would-be class to re-translate\n // any characters that were passed through as-is\n cs = pattern.substr(classStart + 1)\n sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0]\n hasMagic = hasMagic || sp[1]\n }\n\n // handle the case where we had a +( thing at the *end*\n // of the pattern.\n // each pattern list stack adds 3 chars, and we need to go through\n // and escape any | chars that were passed through as-is for the regexp.\n // Go through and escape them, taking care not to double-escape any\n // | chars that were already escaped.\n for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n var tail = re.slice(pl.reStart + pl.open.length)\n this.debug('setting tail', re, pl)\n // maybe some even number of \\, then maybe 1 \\, followed by a |\n tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, function (_, $1, $2) {\n if (!$2) {\n // the | isn't already escaped, so escape it.\n $2 = '\\\\'\n }\n\n // need to escape all those slashes *again*, without escaping the\n // one that we need for escaping the | character. As it works out,\n // escaping an even number of slashes can be done by simply repeating\n // it exactly after itself. That's why this trick works.\n //\n // I am sorry that you have to see this.\n return $1 + $1 + $2 + '|'\n })\n\n this.debug('tail=%j\\n %s', tail, tail, pl, re)\n var t = pl.type === '*' ? star\n : pl.type === '?' ? qmark\n : '\\\\' + pl.type\n\n hasMagic = true\n re = re.slice(0, pl.reStart) + t + '\\\\(' + tail\n }\n\n // handle trailing things that only matter at the very end.\n clearStateChar()\n if (escaping) {\n // trailing \\\\\n re += '\\\\\\\\'\n }\n\n // only need to apply the nodot start if the re starts with\n // something that could conceivably capture a dot\n var addPatternStart = false\n switch (re.charAt(0)) {\n case '.':\n case '[':\n case '(': addPatternStart = true\n }\n\n // Hack to work around lack of negative lookbehind in JS\n // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n // like 'a.xyz.yz' doesn't match. So, the first negative\n // lookahead, has to look ALL the way ahead, to the end of\n // the pattern.\n for (var n = negativeLists.length - 1; n > -1; n--) {\n var nl = negativeLists[n]\n\n var nlBefore = re.slice(0, nl.reStart)\n var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)\n var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)\n var nlAfter = re.slice(nl.reEnd)\n\n nlLast += nlAfter\n\n // Handle nested stuff like *(*.js|!(*.json)), where open parens\n // mean that we should *not* include the ) in the bit that is considered\n // \"after\" the negated section.\n var openParensBefore = nlBefore.split('(').length - 1\n var cleanAfter = nlAfter\n for (i = 0; i < openParensBefore; i++) {\n cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '')\n }\n nlAfter = cleanAfter\n\n var dollar = ''\n if (nlAfter === '' && isSub !== SUBPARSE) {\n dollar = '$'\n }\n var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast\n re = newRe\n }\n\n // if the re is not \"\" at this point, then we need to make sure\n // it doesn't match against an empty path part.\n // Otherwise a/* will match a/, which it should not.\n if (re !== '' && hasMagic) {\n re = '(?=.)' + re\n }\n\n if (addPatternStart) {\n re = patternStart + re\n }\n\n // parsing just a piece of a larger pattern.\n if (isSub === SUBPARSE) {\n return [re, hasMagic]\n }\n\n // skip the regexp for non-magical patterns\n // unescape anything in it, though, so that it'll be\n // an exact match against a file etc.\n if (!hasMagic) {\n return globUnescape(pattern)\n }\n\n var flags = options.nocase ? 'i' : ''\n try {\n var regExp = new RegExp('^' + re + '$', flags)\n } catch (er) {\n // If it was an invalid regular expression, then it can't match\n // anything. This trick looks for a character after the end of\n // the string, which is of course impossible, except in multi-line\n // mode, but it's not a /m regex.\n return new RegExp('$.')\n }\n\n regExp._glob = pattern\n regExp._src = re\n\n return regExp\n}\n\nminimatch.makeRe = function (pattern, options) {\n return new Minimatch(pattern, options || {}).makeRe()\n}\n\nMinimatch.prototype.makeRe = makeRe\nfunction makeRe () {\n if (this.regexp || this.regexp === false) return this.regexp\n\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n var set = this.set\n\n if (!set.length) {\n this.regexp = false\n return this.regexp\n }\n var options = this.options\n\n var twoStar = options.noglobstar ? star\n : options.dot ? twoStarDot\n : twoStarNoDot\n var flags = options.nocase ? 'i' : ''\n\n var re = set.map(function (pattern) {\n return pattern.map(function (p) {\n return (p === GLOBSTAR) ? twoStar\n : (typeof p === 'string') ? regExpEscape(p)\n : p._src\n }).join('\\\\\\/')\n }).join('|')\n\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^(?:' + re + ')$'\n\n // can match anything, as long as it's not this.\n if (this.negate) re = '^(?!' + re + ').*$'\n\n try {\n this.regexp = new RegExp(re, flags)\n } catch (ex) {\n this.regexp = false\n }\n return this.regexp\n}\n\nminimatch.match = function (list, pattern, options) {\n options = options || {}\n var mm = new Minimatch(pattern, options)\n list = list.filter(function (f) {\n return mm.match(f)\n })\n if (mm.options.nonull && !list.length) {\n list.push(pattern)\n }\n return list\n}\n\nMinimatch.prototype.match = match\nfunction match (f, partial) {\n this.debug('match', f, this.pattern)\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) return false\n if (this.empty) return f === ''\n\n if (f === '/' && partial) return true\n\n var options = this.options\n\n // windows: need to use /, not \\\n if (path.sep !== '/') {\n f = f.split(path.sep).join('/')\n }\n\n // treat the test path as a set of pathparts.\n f = f.split(slashSplit)\n this.debug(this.pattern, 'split', f)\n\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n\n var set = this.set\n this.debug(this.pattern, 'set', set)\n\n // Find the basename of the path by looking for the last non-empty segment\n var filename\n var i\n for (i = f.length - 1; i >= 0; i--) {\n filename = f[i]\n if (filename) break\n }\n\n for (i = 0; i < set.length; i++) {\n var pattern = set[i]\n var file = f\n if (options.matchBase && pattern.length === 1) {\n file = [filename]\n }\n var hit = this.matchOne(file, pattern, partial)\n if (hit) {\n if (options.flipNegate) return true\n return !this.negate\n }\n }\n\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) return false\n return this.negate\n}\n\n// set partial to true to test if, for example,\n// \"/a/b\" matches the start of \"/*/b/*/d\"\n// Partial means, if you run out of file before you run\n// out of pattern, then that's fine, as long as all\n// the parts match.\nMinimatch.prototype.matchOne = function (file, pattern, partial) {\n var options = this.options\n\n this.debug('matchOne',\n { 'this': this, file: file, pattern: pattern })\n\n this.debug('matchOne', file.length, pattern.length)\n\n for (var fi = 0,\n pi = 0,\n fl = file.length,\n pl = pattern.length\n ; (fi < fl) && (pi < pl)\n ; fi++, pi++) {\n this.debug('matchOne loop')\n var p = pattern[pi]\n var f = file[fi]\n\n this.debug(pattern, p, f)\n\n // should be impossible.\n // some invalid regexp stuff in the set.\n if (p === false) return false\n\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f])\n\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi\n var pr = pi + 1\n if (pr === pl) {\n this.debug('** at the end')\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' || file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.')) return false\n }\n return true\n }\n\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr]\n\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee)\n\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee)\n // found a match.\n return true\n } else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' || swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr)\n break\n }\n\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue')\n fr++\n }\n }\n\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n // If there's more *pattern* left, then\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr)\n if (fr === fl) return true\n }\n return false\n }\n\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n var hit\n if (typeof p === 'string') {\n if (options.nocase) {\n hit = f.toLowerCase() === p.toLowerCase()\n } else {\n hit = f === p\n }\n this.debug('string match', p, f, hit)\n } else {\n hit = f.match(p)\n this.debug('pattern match', p, f, hit)\n }\n\n if (!hit) return false\n }\n\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true\n } else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial\n } else if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')\n return emptyFileEnd\n }\n\n // should be unreachable.\n throw new Error('wtf?')\n}\n\n// replace stuff like \\* with *\nfunction globUnescape (s) {\n return s.replace(/\\\\(.)/g, '$1')\n}\n\nfunction regExpEscape (s) {\n return s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */\n'use strict';\n\n\nvar Punycode = require('punycode');\n\n\nvar internals = {};\n\n\n//\n// Read rules from file.\n//\ninternals.rules = require('./data/rules.json').map(function (rule) {\n\n return {\n rule: rule,\n suffix: rule.replace(/^(\\*\\.|\\!)/, ''),\n punySuffix: -1,\n wildcard: rule.charAt(0) === '*',\n exception: rule.charAt(0) === '!'\n };\n});\n\n\n//\n// Check is given string ends with `suffix`.\n//\ninternals.endsWith = function (str, suffix) {\n\n return str.indexOf(suffix, str.length - suffix.length) !== -1;\n};\n\n\n//\n// Find rule for a given domain.\n//\ninternals.findRule = function (domain) {\n\n var punyDomain = Punycode.toASCII(domain);\n return internals.rules.reduce(function (memo, rule) {\n\n if (rule.punySuffix === -1){\n rule.punySuffix = Punycode.toASCII(rule.suffix);\n }\n if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) {\n return memo;\n }\n // This has been commented out as it never seems to run. This is because\n // sub tlds always appear after their parents and we never find a shorter\n // match.\n //if (memo) {\n // var memoSuffix = Punycode.toASCII(memo.suffix);\n // if (memoSuffix.length >= punySuffix.length) {\n // return memo;\n // }\n //}\n return rule;\n }, null);\n};\n\n\n//\n// Error codes and messages.\n//\nexports.errorCodes = {\n DOMAIN_TOO_SHORT: 'Domain name too short.',\n DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.',\n LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.',\n LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.',\n LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.',\n LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.',\n LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.'\n};\n\n\n//\n// Validate domain name and throw if not valid.\n//\n// From wikipedia:\n//\n// Hostnames are composed of series of labels concatenated with dots, as are all\n// domain names. Each label must be between 1 and 63 characters long, and the\n// entire hostname (including the delimiting dots) has a maximum of 255 chars.\n//\n// Allowed chars:\n//\n// * `a-z`\n// * `0-9`\n// * `-` but not as a starting or ending character\n// * `.` as a separator for the textual portions of a domain name\n//\n// * http://en.wikipedia.org/wiki/Domain_name\n// * http://en.wikipedia.org/wiki/Hostname\n//\ninternals.validate = function (input) {\n\n // Before we can validate we need to take care of IDNs with unicode chars.\n var ascii = Punycode.toASCII(input);\n\n if (ascii.length < 1) {\n return 'DOMAIN_TOO_SHORT';\n }\n if (ascii.length > 255) {\n return 'DOMAIN_TOO_LONG';\n }\n\n // Check each part's length and allowed chars.\n var labels = ascii.split('.');\n var label;\n\n for (var i = 0; i < labels.length; ++i) {\n label = labels[i];\n if (!label.length) {\n return 'LABEL_TOO_SHORT';\n }\n if (label.length > 63) {\n return 'LABEL_TOO_LONG';\n }\n if (label.charAt(0) === '-') {\n return 'LABEL_STARTS_WITH_DASH';\n }\n if (label.charAt(label.length - 1) === '-') {\n return 'LABEL_ENDS_WITH_DASH';\n }\n if (!/^[a-z0-9\\-]+$/.test(label)) {\n return 'LABEL_INVALID_CHARS';\n }\n }\n};\n\n\n//\n// Public API\n//\n\n\n//\n// Parse domain.\n//\nexports.parse = function (input) {\n\n if (typeof input !== 'string') {\n throw new TypeError('Domain name must be a string.');\n }\n\n // Force domain to lowercase.\n var domain = input.slice(0).toLowerCase();\n\n // Handle FQDN.\n // TODO: Simply remove trailing dot?\n if (domain.charAt(domain.length - 1) === '.') {\n domain = domain.slice(0, domain.length - 1);\n }\n\n // Validate and sanitise input.\n var error = internals.validate(domain);\n if (error) {\n return {\n input: input,\n error: {\n message: exports.errorCodes[error],\n code: error\n }\n };\n }\n\n var parsed = {\n input: input,\n tld: null,\n sld: null,\n domain: null,\n subdomain: null,\n listed: false\n };\n\n var domainParts = domain.split('.');\n\n // Non-Internet TLD\n if (domainParts[domainParts.length - 1] === 'local') {\n return parsed;\n }\n\n var handlePunycode = function () {\n\n if (!/xn--/.test(domain)) {\n return parsed;\n }\n if (parsed.domain) {\n parsed.domain = Punycode.toASCII(parsed.domain);\n }\n if (parsed.subdomain) {\n parsed.subdomain = Punycode.toASCII(parsed.subdomain);\n }\n return parsed;\n };\n\n var rule = internals.findRule(domain);\n\n // Unlisted tld.\n if (!rule) {\n if (domainParts.length < 2) {\n return parsed;\n }\n parsed.tld = domainParts.pop();\n parsed.sld = domainParts.pop();\n parsed.domain = [parsed.sld, parsed.tld].join('.');\n if (domainParts.length) {\n parsed.subdomain = domainParts.pop();\n }\n return handlePunycode();\n }\n\n // At this point we know the public suffix is listed.\n parsed.listed = true;\n\n var tldParts = rule.suffix.split('.');\n var privateParts = domainParts.slice(0, domainParts.length - tldParts.length);\n\n if (rule.exception) {\n privateParts.push(tldParts.shift());\n }\n\n parsed.tld = tldParts.join('.');\n\n if (!privateParts.length) {\n return handlePunycode();\n }\n\n if (rule.wildcard) {\n tldParts.unshift(privateParts.pop());\n parsed.tld = tldParts.join('.');\n }\n\n if (!privateParts.length) {\n return handlePunycode();\n }\n\n parsed.sld = privateParts.pop();\n parsed.domain = [parsed.sld, parsed.tld].join('.');\n\n if (privateParts.length) {\n parsed.subdomain = privateParts.join('.');\n }\n\n return handlePunycode();\n};\n\n\n//\n// Get domain.\n//\nexports.get = function (domain) {\n\n if (!domain) {\n return null;\n }\n return exports.parse(domain).domain || null;\n};\n\n\n//\n// Check whether domain belongs to a known public suffix.\n//\nexports.isValid = function (domain) {\n\n var parsed = exports.parse(domain);\n return Boolean(parsed.domain && parsed.listed);\n};\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //