UNPKG

378 kBJavaScriptView Raw
1'use strict';
2
3Object.defineProperty(exports, '__esModule', { value: true });
4
5var crypto = require('crypto');
6var uuid$3 = require('uuid');
7var logger$4 = require('@azure/logger');
8var tslib = require('tslib');
9var stableStringify = require('fast-json-stable-stringify');
10var PriorityQueue = require('priorityqueuejs');
11var semaphore = require('semaphore');
12var coreRestPipeline = require('@azure/core-rest-pipeline');
13var nodeAbortController = require('node-abort-controller');
14var universalUserAgent = require('universal-user-agent');
15var JSBI = require('jsbi');
16var abortController = require('@azure/abort-controller');
17
18function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
19
20var stableStringify__default = /*#__PURE__*/_interopDefaultLegacy(stableStringify);
21var PriorityQueue__default = /*#__PURE__*/_interopDefaultLegacy(PriorityQueue);
22var semaphore__default = /*#__PURE__*/_interopDefaultLegacy(semaphore);
23var JSBI__default = /*#__PURE__*/_interopDefaultLegacy(JSBI);
24
25// Copyright (c) Microsoft Corporation.
26// Licensed under the MIT license.
27const DEFAULT_PARTITION_KEY_PATH = "/_partitionKey"; // eslint-disable-line @typescript-eslint/prefer-as-const
28
29// Copyright (c) Microsoft Corporation.
30// Licensed under the MIT license.
31/**
32 * @hidden
33 */
34const Constants = {
35 HttpHeaders: {
36 Authorization: "authorization",
37 ETag: "etag",
38 MethodOverride: "X-HTTP-Method",
39 Slug: "Slug",
40 ContentType: "Content-Type",
41 LastModified: "Last-Modified",
42 ContentEncoding: "Content-Encoding",
43 CharacterSet: "CharacterSet",
44 UserAgent: "User-Agent",
45 IfModifiedSince: "If-Modified-Since",
46 IfMatch: "If-Match",
47 IfNoneMatch: "If-None-Match",
48 ContentLength: "Content-Length",
49 AcceptEncoding: "Accept-Encoding",
50 KeepAlive: "Keep-Alive",
51 CacheControl: "Cache-Control",
52 TransferEncoding: "Transfer-Encoding",
53 ContentLanguage: "Content-Language",
54 ContentLocation: "Content-Location",
55 ContentMd5: "Content-Md5",
56 ContentRange: "Content-Range",
57 Accept: "Accept",
58 AcceptCharset: "Accept-Charset",
59 AcceptLanguage: "Accept-Language",
60 IfRange: "If-Range",
61 IfUnmodifiedSince: "If-Unmodified-Since",
62 MaxForwards: "Max-Forwards",
63 ProxyAuthorization: "Proxy-Authorization",
64 AcceptRanges: "Accept-Ranges",
65 ProxyAuthenticate: "Proxy-Authenticate",
66 RetryAfter: "Retry-After",
67 SetCookie: "Set-Cookie",
68 WwwAuthenticate: "Www-Authenticate",
69 Origin: "Origin",
70 Host: "Host",
71 AccessControlAllowOrigin: "Access-Control-Allow-Origin",
72 AccessControlAllowHeaders: "Access-Control-Allow-Headers",
73 KeyValueEncodingFormat: "application/x-www-form-urlencoded",
74 WrapAssertionFormat: "wrap_assertion_format",
75 WrapAssertion: "wrap_assertion",
76 WrapScope: "wrap_scope",
77 SimpleToken: "SWT",
78 HttpDate: "date",
79 Prefer: "Prefer",
80 Location: "Location",
81 Referer: "referer",
82 A_IM: "A-IM",
83 // Query
84 Query: "x-ms-documentdb-query",
85 IsQuery: "x-ms-documentdb-isquery",
86 IsQueryPlan: "x-ms-cosmos-is-query-plan-request",
87 SupportedQueryFeatures: "x-ms-cosmos-supported-query-features",
88 QueryVersion: "x-ms-cosmos-query-version",
89 // Our custom Azure Cosmos DB headers
90 Continuation: "x-ms-continuation",
91 PageSize: "x-ms-max-item-count",
92 ItemCount: "x-ms-item-count",
93 // Request sender generated. Simply echoed by backend.
94 ActivityId: "x-ms-activity-id",
95 PreTriggerInclude: "x-ms-documentdb-pre-trigger-include",
96 PreTriggerExclude: "x-ms-documentdb-pre-trigger-exclude",
97 PostTriggerInclude: "x-ms-documentdb-post-trigger-include",
98 PostTriggerExclude: "x-ms-documentdb-post-trigger-exclude",
99 IndexingDirective: "x-ms-indexing-directive",
100 SessionToken: "x-ms-session-token",
101 ConsistencyLevel: "x-ms-consistency-level",
102 XDate: "x-ms-date",
103 CollectionPartitionInfo: "x-ms-collection-partition-info",
104 CollectionServiceInfo: "x-ms-collection-service-info",
105 // Deprecated, use RetryAfterInMs instead.
106 RetryAfterInMilliseconds: "x-ms-retry-after-ms",
107 RetryAfterInMs: "x-ms-retry-after-ms",
108 IsFeedUnfiltered: "x-ms-is-feed-unfiltered",
109 ResourceTokenExpiry: "x-ms-documentdb-expiry-seconds",
110 EnableScanInQuery: "x-ms-documentdb-query-enable-scan",
111 EmitVerboseTracesInQuery: "x-ms-documentdb-query-emit-traces",
112 EnableCrossPartitionQuery: "x-ms-documentdb-query-enablecrosspartition",
113 ParallelizeCrossPartitionQuery: "x-ms-documentdb-query-parallelizecrosspartitionquery",
114 ResponseContinuationTokenLimitInKB: "x-ms-documentdb-responsecontinuationtokenlimitinkb",
115 // QueryMetrics
116 // Request header to tell backend to give you query metrics.
117 PopulateQueryMetrics: "x-ms-documentdb-populatequerymetrics",
118 // Response header that holds the serialized version of query metrics.
119 QueryMetrics: "x-ms-documentdb-query-metrics",
120 // Version headers and values
121 Version: "x-ms-version",
122 // Owner name
123 OwnerFullName: "x-ms-alt-content-path",
124 // Owner ID used for name based request in session token.
125 OwnerId: "x-ms-content-path",
126 // Partition Key
127 PartitionKey: "x-ms-documentdb-partitionkey",
128 PartitionKeyRangeID: "x-ms-documentdb-partitionkeyrangeid",
129 // Quota Info
130 MaxEntityCount: "x-ms-root-entity-max-count",
131 CurrentEntityCount: "x-ms-root-entity-current-count",
132 CollectionQuotaInMb: "x-ms-collection-quota-mb",
133 CollectionCurrentUsageInMb: "x-ms-collection-usage-mb",
134 MaxMediaStorageUsageInMB: "x-ms-max-media-storage-usage-mb",
135 CurrentMediaStorageUsageInMB: "x-ms-media-storage-usage-mb",
136 RequestCharge: "x-ms-request-charge",
137 PopulateQuotaInfo: "x-ms-documentdb-populatequotainfo",
138 MaxResourceQuota: "x-ms-resource-quota",
139 // Offer header
140 OfferType: "x-ms-offer-type",
141 OfferThroughput: "x-ms-offer-throughput",
142 AutoscaleSettings: "x-ms-cosmos-offer-autopilot-settings",
143 // Custom RUs/minute headers
144 DisableRUPerMinuteUsage: "x-ms-documentdb-disable-ru-per-minute-usage",
145 IsRUPerMinuteUsed: "x-ms-documentdb-is-ru-per-minute-used",
146 OfferIsRUPerMinuteThroughputEnabled: "x-ms-offer-is-ru-per-minute-throughput-enabled",
147 // Index progress headers
148 IndexTransformationProgress: "x-ms-documentdb-collection-index-transformation-progress",
149 LazyIndexingProgress: "x-ms-documentdb-collection-lazy-indexing-progress",
150 // Upsert header
151 IsUpsert: "x-ms-documentdb-is-upsert",
152 // Sub status of the error
153 SubStatus: "x-ms-substatus",
154 // StoredProcedure related headers
155 EnableScriptLogging: "x-ms-documentdb-script-enable-logging",
156 ScriptLogResults: "x-ms-documentdb-script-log-results",
157 // Multi-Region Write
158 ALLOW_MULTIPLE_WRITES: "x-ms-cosmos-allow-tentative-writes",
159 // Bulk/Batch header
160 IsBatchRequest: "x-ms-cosmos-is-batch-request",
161 IsBatchAtomic: "x-ms-cosmos-batch-atomic",
162 BatchContinueOnError: "x-ms-cosmos-batch-continue-on-error",
163 // Dedicated Gateway Headers
164 DedicatedGatewayPerRequestCacheStaleness: "x-ms-dedicatedgateway-max-age",
165 // Cache Refresh header
166 ForceRefresh: "x-ms-force-refresh",
167 },
168 // GlobalDB related constants
169 WritableLocations: "writableLocations",
170 ReadableLocations: "readableLocations",
171 LocationUnavailableExpirationTimeInMs: 5 * 60 * 1000,
172 // ServiceDocument Resource
173 ENABLE_MULTIPLE_WRITABLE_LOCATIONS: "enableMultipleWriteLocations",
174 // Background refresh time
175 DefaultUnavailableLocationExpirationTimeMS: 5 * 60 * 1000,
176 // Client generated retry count response header
177 ThrottleRetryCount: "x-ms-throttle-retry-count",
178 ThrottleRetryWaitTimeInMs: "x-ms-throttle-retry-wait-time-ms",
179 // Platform
180 CurrentVersion: "2020-07-15",
181 AzureNamespace: "Azure.Cosmos",
182 AzurePackageName: "@azure/cosmos",
183 SDKName: "azure-cosmos-js",
184 SDKVersion: "3.17.3",
185 // Bulk Operations
186 DefaultMaxBulkRequestBodySizeInBytes: 220201,
187 Quota: {
188 CollectionSize: "collectionSize",
189 },
190 Path: {
191 Root: "/",
192 DatabasesPathSegment: "dbs",
193 CollectionsPathSegment: "colls",
194 UsersPathSegment: "users",
195 DocumentsPathSegment: "docs",
196 PermissionsPathSegment: "permissions",
197 StoredProceduresPathSegment: "sprocs",
198 TriggersPathSegment: "triggers",
199 UserDefinedFunctionsPathSegment: "udfs",
200 ConflictsPathSegment: "conflicts",
201 AttachmentsPathSegment: "attachments",
202 PartitionKeyRangesPathSegment: "pkranges",
203 SchemasPathSegment: "schemas",
204 OffersPathSegment: "offers",
205 TopologyPathSegment: "topology",
206 DatabaseAccountPathSegment: "databaseaccount",
207 },
208 PartitionKeyRange: {
209 // Partition Key Range Constants
210 MinInclusive: "minInclusive",
211 MaxExclusive: "maxExclusive",
212 Id: "id",
213 },
214 QueryRangeConstants: {
215 // Partition Key Range Constants
216 MinInclusive: "minInclusive",
217 MaxExclusive: "maxExclusive",
218 min: "min",
219 },
220 /**
221 * @deprecated Use EffectivePartitionKeyConstants instead
222 */
223 EffectiveParitionKeyConstants: {
224 MinimumInclusiveEffectivePartitionKey: "",
225 MaximumExclusiveEffectivePartitionKey: "FF",
226 },
227 EffectivePartitionKeyConstants: {
228 MinimumInclusiveEffectivePartitionKey: "",
229 MaximumExclusiveEffectivePartitionKey: "FF",
230 },
231};
232/**
233 * @hidden
234 */
235exports.ResourceType = void 0;
236(function (ResourceType) {
237 ResourceType["none"] = "";
238 ResourceType["database"] = "dbs";
239 ResourceType["offer"] = "offers";
240 ResourceType["user"] = "users";
241 ResourceType["permission"] = "permissions";
242 ResourceType["container"] = "colls";
243 ResourceType["conflicts"] = "conflicts";
244 ResourceType["sproc"] = "sprocs";
245 ResourceType["udf"] = "udfs";
246 ResourceType["trigger"] = "triggers";
247 ResourceType["item"] = "docs";
248 ResourceType["pkranges"] = "pkranges";
249 ResourceType["partitionkey"] = "partitionKey";
250})(exports.ResourceType || (exports.ResourceType = {}));
251/**
252 * @hidden
253 */
254exports.HTTPMethod = void 0;
255(function (HTTPMethod) {
256 HTTPMethod["get"] = "GET";
257 HTTPMethod["patch"] = "PATCH";
258 HTTPMethod["post"] = "POST";
259 HTTPMethod["put"] = "PUT";
260 HTTPMethod["delete"] = "DELETE";
261})(exports.HTTPMethod || (exports.HTTPMethod = {}));
262/**
263 * @hidden
264 */
265exports.OperationType = void 0;
266(function (OperationType) {
267 OperationType["Create"] = "create";
268 OperationType["Replace"] = "replace";
269 OperationType["Upsert"] = "upsert";
270 OperationType["Delete"] = "delete";
271 OperationType["Read"] = "read";
272 OperationType["Query"] = "query";
273 OperationType["Execute"] = "execute";
274 OperationType["Batch"] = "batch";
275 OperationType["Patch"] = "patch";
276})(exports.OperationType || (exports.OperationType = {}));
277/**
278 * @hidden
279 */
280var CosmosKeyType;
281(function (CosmosKeyType) {
282 CosmosKeyType["PrimaryMaster"] = "PRIMARY_MASTER";
283 CosmosKeyType["SecondaryMaster"] = "SECONDARY_MASTER";
284 CosmosKeyType["PrimaryReadOnly"] = "PRIMARY_READONLY";
285 CosmosKeyType["SecondaryReadOnly"] = "SECONDARY_READONLY";
286})(CosmosKeyType || (CosmosKeyType = {}));
287/**
288 * @hidden
289 */
290var CosmosContainerChildResourceKind;
291(function (CosmosContainerChildResourceKind) {
292 CosmosContainerChildResourceKind["Item"] = "ITEM";
293 CosmosContainerChildResourceKind["StoredProcedure"] = "STORED_PROCEDURE";
294 CosmosContainerChildResourceKind["UserDefinedFunction"] = "USER_DEFINED_FUNCTION";
295 CosmosContainerChildResourceKind["Trigger"] = "TRIGGER";
296})(CosmosContainerChildResourceKind || (CosmosContainerChildResourceKind = {}));
297/**
298 * @hidden
299 */
300var PermissionScopeValues;
301(function (PermissionScopeValues) {
302 /**
303 * Values which set permission Scope applicable to control plane related operations.
304 */
305 PermissionScopeValues[PermissionScopeValues["ScopeAccountReadValue"] = 1] = "ScopeAccountReadValue";
306 PermissionScopeValues[PermissionScopeValues["ScopeAccountListDatabasesValue"] = 2] = "ScopeAccountListDatabasesValue";
307 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadValue"] = 4] = "ScopeDatabaseReadValue";
308 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadOfferValue"] = 8] = "ScopeDatabaseReadOfferValue";
309 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseListContainerValue"] = 16] = "ScopeDatabaseListContainerValue";
310 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadValue"] = 32] = "ScopeContainerReadValue";
311 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadOfferValue"] = 64] = "ScopeContainerReadOfferValue";
312 PermissionScopeValues[PermissionScopeValues["ScopeAccountCreateDatabasesValue"] = 1] = "ScopeAccountCreateDatabasesValue";
313 PermissionScopeValues[PermissionScopeValues["ScopeAccountDeleteDatabasesValue"] = 2] = "ScopeAccountDeleteDatabasesValue";
314 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseDeleteValue"] = 4] = "ScopeDatabaseDeleteValue";
315 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReplaceOfferValue"] = 8] = "ScopeDatabaseReplaceOfferValue";
316 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseCreateContainerValue"] = 16] = "ScopeDatabaseCreateContainerValue";
317 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseDeleteContainerValue"] = 32] = "ScopeDatabaseDeleteContainerValue";
318 PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceValue"] = 64] = "ScopeContainerReplaceValue";
319 PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteValue"] = 128] = "ScopeContainerDeleteValue";
320 PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceOfferValue"] = 256] = "ScopeContainerReplaceOfferValue";
321 PermissionScopeValues[PermissionScopeValues["ScopeAccountReadAllAccessValue"] = 65535] = "ScopeAccountReadAllAccessValue";
322 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadAllAccessValue"] = 124] = "ScopeDatabaseReadAllAccessValue";
323 PermissionScopeValues[PermissionScopeValues["ScopeContainersReadAllAccessValue"] = 96] = "ScopeContainersReadAllAccessValue";
324 PermissionScopeValues[PermissionScopeValues["ScopeAccountWriteAllAccessValue"] = 65535] = "ScopeAccountWriteAllAccessValue";
325 PermissionScopeValues[PermissionScopeValues["ScopeDatabaseWriteAllAccessValue"] = 508] = "ScopeDatabaseWriteAllAccessValue";
326 PermissionScopeValues[PermissionScopeValues["ScopeContainersWriteAllAccessValue"] = 448] = "ScopeContainersWriteAllAccessValue";
327 /**
328 * Values which set permission Scope applicable to data plane related operations.
329 */
330 PermissionScopeValues[PermissionScopeValues["ScopeContainerExecuteQueriesValue"] = 1] = "ScopeContainerExecuteQueriesValue";
331 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadFeedsValue"] = 2] = "ScopeContainerReadFeedsValue";
332 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadStoredProceduresValue"] = 4] = "ScopeContainerReadStoredProceduresValue";
333 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadUserDefinedFunctionsValue"] = 8] = "ScopeContainerReadUserDefinedFunctionsValue";
334 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadTriggersValue"] = 16] = "ScopeContainerReadTriggersValue";
335 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadConflictsValue"] = 32] = "ScopeContainerReadConflictsValue";
336 PermissionScopeValues[PermissionScopeValues["ScopeItemReadValue"] = 64] = "ScopeItemReadValue";
337 PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureReadValue"] = 128] = "ScopeStoredProcedureReadValue";
338 PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionReadValue"] = 256] = "ScopeUserDefinedFunctionReadValue";
339 PermissionScopeValues[PermissionScopeValues["ScopeTriggerReadValue"] = 512] = "ScopeTriggerReadValue";
340 PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateItemsValue"] = 1] = "ScopeContainerCreateItemsValue";
341 PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceItemsValue"] = 2] = "ScopeContainerReplaceItemsValue";
342 PermissionScopeValues[PermissionScopeValues["ScopeContainerUpsertItemsValue"] = 4] = "ScopeContainerUpsertItemsValue";
343 PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteItemsValue"] = 8] = "ScopeContainerDeleteItemsValue";
344 PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateStoredProceduresValue"] = 16] = "ScopeContainerCreateStoredProceduresValue";
345 PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceStoredProceduresValue"] = 32] = "ScopeContainerReplaceStoredProceduresValue";
346 PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteStoredProceduresValue"] = 64] = "ScopeContainerDeleteStoredProceduresValue";
347 PermissionScopeValues[PermissionScopeValues["ScopeContainerExecuteStoredProceduresValue"] = 128] = "ScopeContainerExecuteStoredProceduresValue";
348 PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateTriggersValue"] = 256] = "ScopeContainerCreateTriggersValue";
349 PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceTriggersValue"] = 512] = "ScopeContainerReplaceTriggersValue";
350 PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteTriggersValue"] = 1024] = "ScopeContainerDeleteTriggersValue";
351 PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateUserDefinedFunctionsValue"] = 2048] = "ScopeContainerCreateUserDefinedFunctionsValue";
352 PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceUserDefinedFunctionsValue"] = 4096] = "ScopeContainerReplaceUserDefinedFunctionsValue";
353 PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteUserDefinedFunctionSValue"] = 8192] = "ScopeContainerDeleteUserDefinedFunctionSValue";
354 PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteCONFLICTSValue"] = 16384] = "ScopeContainerDeleteCONFLICTSValue";
355 PermissionScopeValues[PermissionScopeValues["ScopeItemReplaceValue"] = 65536] = "ScopeItemReplaceValue";
356 PermissionScopeValues[PermissionScopeValues["ScopeItemUpsertValue"] = 131072] = "ScopeItemUpsertValue";
357 PermissionScopeValues[PermissionScopeValues["ScopeItemDeleteValue"] = 262144] = "ScopeItemDeleteValue";
358 PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureReplaceValue"] = 1048576] = "ScopeStoredProcedureReplaceValue";
359 PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureDeleteValue"] = 2097152] = "ScopeStoredProcedureDeleteValue";
360 PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureExecuteValue"] = 4194304] = "ScopeStoredProcedureExecuteValue";
361 PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionReplaceValue"] = 8388608] = "ScopeUserDefinedFunctionReplaceValue";
362 PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionDeleteValue"] = 16777216] = "ScopeUserDefinedFunctionDeleteValue";
363 PermissionScopeValues[PermissionScopeValues["ScopeTriggerReplaceValue"] = 33554432] = "ScopeTriggerReplaceValue";
364 PermissionScopeValues[PermissionScopeValues["ScopeTriggerDeleteValue"] = 67108864] = "ScopeTriggerDeleteValue";
365 PermissionScopeValues[PermissionScopeValues["ScopeContainerReadAllAccessValue"] = 4294967295] = "ScopeContainerReadAllAccessValue";
366 PermissionScopeValues[PermissionScopeValues["ScopeItemReadAllAccessValue"] = 65] = "ScopeItemReadAllAccessValue";
367 PermissionScopeValues[PermissionScopeValues["ScopeContainerWriteAllAccessValue"] = 4294967295] = "ScopeContainerWriteAllAccessValue";
368 PermissionScopeValues[PermissionScopeValues["ScopeItemWriteAllAccessValue"] = 458767] = "ScopeItemWriteAllAccessValue";
369 PermissionScopeValues[PermissionScopeValues["NoneValue"] = 0] = "NoneValue";
370})(PermissionScopeValues || (PermissionScopeValues = {}));
371/**
372 * @hidden
373 */
374exports.SasTokenPermissionKind = void 0;
375(function (SasTokenPermissionKind) {
376 SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateItems"] = 1] = "ContainerCreateItems";
377 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceItems"] = 2] = "ContainerReplaceItems";
378 SasTokenPermissionKind[SasTokenPermissionKind["ContainerUpsertItems"] = 4] = "ContainerUpsertItems";
379 SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteItems"] = 128] = "ContainerDeleteItems";
380 SasTokenPermissionKind[SasTokenPermissionKind["ContainerExecuteQueries"] = 1] = "ContainerExecuteQueries";
381 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadFeeds"] = 2] = "ContainerReadFeeds";
382 SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateStoreProcedure"] = 16] = "ContainerCreateStoreProcedure";
383 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadStoreProcedure"] = 4] = "ContainerReadStoreProcedure";
384 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceStoreProcedure"] = 32] = "ContainerReplaceStoreProcedure";
385 SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteStoreProcedure"] = 64] = "ContainerDeleteStoreProcedure";
386 SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateTriggers"] = 256] = "ContainerCreateTriggers";
387 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadTriggers"] = 16] = "ContainerReadTriggers";
388 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceTriggers"] = 512] = "ContainerReplaceTriggers";
389 SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteTriggers"] = 1024] = "ContainerDeleteTriggers";
390 SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateUserDefinedFunctions"] = 2048] = "ContainerCreateUserDefinedFunctions";
391 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadUserDefinedFunctions"] = 8] = "ContainerReadUserDefinedFunctions";
392 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceUserDefinedFunctions"] = 4096] = "ContainerReplaceUserDefinedFunctions";
393 SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteUserDefinedFunctions"] = 8192] = "ContainerDeleteUserDefinedFunctions";
394 SasTokenPermissionKind[SasTokenPermissionKind["ContainerExecuteStoredProcedure"] = 128] = "ContainerExecuteStoredProcedure";
395 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadConflicts"] = 32] = "ContainerReadConflicts";
396 SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteConflicts"] = 16384] = "ContainerDeleteConflicts";
397 SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadAny"] = 64] = "ContainerReadAny";
398 SasTokenPermissionKind[SasTokenPermissionKind["ContainerFullAccess"] = 4294967295] = "ContainerFullAccess";
399 SasTokenPermissionKind[SasTokenPermissionKind["ItemReadAny"] = 65536] = "ItemReadAny";
400 SasTokenPermissionKind[SasTokenPermissionKind["ItemFullAccess"] = 65] = "ItemFullAccess";
401 SasTokenPermissionKind[SasTokenPermissionKind["ItemRead"] = 64] = "ItemRead";
402 SasTokenPermissionKind[SasTokenPermissionKind["ItemReplace"] = 65536] = "ItemReplace";
403 SasTokenPermissionKind[SasTokenPermissionKind["ItemUpsert"] = 131072] = "ItemUpsert";
404 SasTokenPermissionKind[SasTokenPermissionKind["ItemDelete"] = 262144] = "ItemDelete";
405 SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureRead"] = 128] = "StoreProcedureRead";
406 SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureReplace"] = 1048576] = "StoreProcedureReplace";
407 SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureDelete"] = 2097152] = "StoreProcedureDelete";
408 SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureExecute"] = 4194304] = "StoreProcedureExecute";
409 SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionRead"] = 256] = "UserDefinedFuntionRead";
410 SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionReplace"] = 8388608] = "UserDefinedFuntionReplace";
411 SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionDelete"] = 16777216] = "UserDefinedFuntionDelete";
412 SasTokenPermissionKind[SasTokenPermissionKind["TriggerRead"] = 512] = "TriggerRead";
413 SasTokenPermissionKind[SasTokenPermissionKind["TriggerReplace"] = 33554432] = "TriggerReplace";
414 SasTokenPermissionKind[SasTokenPermissionKind["TriggerDelete"] = 67108864] = "TriggerDelete";
415})(exports.SasTokenPermissionKind || (exports.SasTokenPermissionKind = {}));
416
417const trimLeftSlashes = new RegExp("^[/]+");
418const trimRightSlashes = new RegExp("[/]+$");
419const illegalResourceIdCharacters = new RegExp("[/\\\\?#]");
420const illegalItemResourceIdCharacters = new RegExp("[/\\\\#]");
421/** @hidden */
422function jsonStringifyAndEscapeNonASCII(arg) {
423 // TODO: better way for this? Not sure.
424 // escapes non-ASCII characters as \uXXXX
425 return JSON.stringify(arg).replace(/[\u007F-\uFFFF]/g, (m) => {
426 return "\\u" + ("0000" + m.charCodeAt(0).toString(16)).slice(-4);
427 });
428}
429/**
430 * @hidden
431 */
432function parseLink(resourcePath) {
433 if (resourcePath.length === 0) {
434 /* for DatabaseAccount case, both type and objectBody will be undefined. */
435 return {
436 type: undefined,
437 objectBody: undefined,
438 };
439 }
440 if (resourcePath[resourcePath.length - 1] !== "/") {
441 resourcePath = resourcePath + "/";
442 }
443 if (resourcePath[0] !== "/") {
444 resourcePath = "/" + resourcePath;
445 }
446 /*
447 The path will be in the form of /[resourceType]/[resourceId]/ ....
448 /[resourceType]//[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/
449 or /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/[resourceType]/[resourceId]/ ....
450 /[resourceType]/[resourceId]/
451 The result of split will be in the form of
452 [[[resourceType], [resourceId] ... ,[resourceType], [resourceId], ""]
453 In the first case, to extract the resourceId it will the element before last ( at length -2 )
454 and the type will be before it ( at length -3 )
455 In the second case, to extract the resource type it will the element before last ( at length -2 )
456 */
457 const pathParts = resourcePath.split("/");
458 let id;
459 let type;
460 if (pathParts.length % 2 === 0) {
461 // request in form /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId].
462 id = pathParts[pathParts.length - 2];
463 type = pathParts[pathParts.length - 3];
464 }
465 else {
466 // request in form /[resourceType]/[resourceId]/ .... /[resourceType]/.
467 id = pathParts[pathParts.length - 3];
468 type = pathParts[pathParts.length - 2];
469 }
470 const result = {
471 type,
472 objectBody: {
473 id,
474 self: resourcePath,
475 },
476 };
477 return result;
478}
479/**
480 * @hidden
481 */
482function isReadRequest(operationType) {
483 return operationType === exports.OperationType.Read || operationType === exports.OperationType.Query;
484}
485/**
486 * @hidden
487 */
488function sleep(time) {
489 return new Promise((resolve) => {
490 setTimeout(() => {
491 resolve();
492 }, time);
493 });
494}
495/**
496 * @hidden
497 */
498function getContainerLink(link) {
499 return link.split("/").slice(0, 4).join("/");
500}
501/**
502 * @hidden
503 */
504function trimSlashes(source) {
505 return source.replace(trimLeftSlashes, "").replace(trimRightSlashes, "");
506}
507/**
508 * @hidden
509 */
510function parsePath(path) {
511 const pathParts = [];
512 let currentIndex = 0;
513 const throwError = () => {
514 throw new Error("Path " + path + " is invalid at index " + currentIndex);
515 };
516 const getEscapedToken = () => {
517 const quote = path[currentIndex];
518 let newIndex = ++currentIndex;
519 for (;;) {
520 newIndex = path.indexOf(quote, newIndex);
521 if (newIndex === -1) {
522 throwError();
523 }
524 if (path[newIndex - 1] !== "\\") {
525 break;
526 }
527 ++newIndex;
528 }
529 const token = path.substr(currentIndex, newIndex - currentIndex);
530 currentIndex = newIndex + 1;
531 return token;
532 };
533 const getToken = () => {
534 const newIndex = path.indexOf("/", currentIndex);
535 let token = null;
536 if (newIndex === -1) {
537 token = path.substr(currentIndex);
538 currentIndex = path.length;
539 }
540 else {
541 token = path.substr(currentIndex, newIndex - currentIndex);
542 currentIndex = newIndex;
543 }
544 token = token.trim();
545 return token;
546 };
547 while (currentIndex < path.length) {
548 if (path[currentIndex] !== "/") {
549 throwError();
550 }
551 if (++currentIndex === path.length) {
552 break;
553 }
554 if (path[currentIndex] === '"' || path[currentIndex] === "'") {
555 pathParts.push(getEscapedToken());
556 }
557 else {
558 pathParts.push(getToken());
559 }
560 }
561 return pathParts;
562}
563/**
564 * @hidden
565 */
566function isResourceValid(resource, err) {
567 // TODO: fix strictness issues so that caller contexts respects the types of the functions
568 if (resource.id) {
569 if (typeof resource.id !== "string") {
570 err.message = "Id must be a string.";
571 return false;
572 }
573 if (resource.id.indexOf("/") !== -1 ||
574 resource.id.indexOf("\\") !== -1 ||
575 resource.id.indexOf("?") !== -1 ||
576 resource.id.indexOf("#") !== -1) {
577 err.message = "Id contains illegal chars.";
578 return false;
579 }
580 if (resource.id[resource.id.length - 1] === " ") {
581 err.message = "Id ends with a space.";
582 return false;
583 }
584 }
585 return true;
586}
587/**
588 * @hidden
589 */
590function isItemResourceValid(resource, err) {
591 // TODO: fix strictness issues so that caller contexts respects the types of the functions
592 if (resource.id) {
593 if (typeof resource.id !== "string") {
594 err.message = "Id must be a string.";
595 return false;
596 }
597 if (resource.id.indexOf("/") !== -1 ||
598 resource.id.indexOf("\\") !== -1 ||
599 resource.id.indexOf("#") !== -1) {
600 err.message = "Id contains illegal chars.";
601 return false;
602 }
603 }
604 return true;
605}
606/** @hidden */
607function getIdFromLink(resourceLink) {
608 resourceLink = trimSlashes(resourceLink);
609 return resourceLink;
610}
611/** @hidden */
612function getPathFromLink(resourceLink, resourceType) {
613 resourceLink = trimSlashes(resourceLink);
614 if (resourceType) {
615 return "/" + encodeURI(resourceLink) + "/" + resourceType;
616 }
617 else {
618 return "/" + encodeURI(resourceLink);
619 }
620}
621/**
622 * @hidden
623 */
624function isStringNullOrEmpty(inputString) {
625 // checks whether string is null, undefined, empty or only contains space
626 return !inputString || /^\s*$/.test(inputString);
627}
628/**
629 * @hidden
630 */
631function trimSlashFromLeftAndRight(inputString) {
632 if (typeof inputString !== "string") {
633 throw new Error("invalid input: input is not string");
634 }
635 return inputString.replace(trimLeftSlashes, "").replace(trimRightSlashes, "");
636}
637/**
638 * @hidden
639 */
640function validateResourceId(resourceId) {
641 // if resourceId is not a string or is empty throw an error
642 if (typeof resourceId !== "string" || isStringNullOrEmpty(resourceId)) {
643 throw new Error("Resource ID must be a string and cannot be undefined, null or empty");
644 }
645 // if resource id contains illegal characters throw an error
646 if (illegalResourceIdCharacters.test(resourceId)) {
647 throw new Error("Illegal characters ['/', '\\', '#', '?'] cannot be used in Resource ID");
648 }
649 return true;
650}
651/**
652 * @hidden
653 */
654function validateItemResourceId(resourceId) {
655 // if resourceId is not a string or is empty throw an error
656 if (typeof resourceId !== "string" || isStringNullOrEmpty(resourceId)) {
657 throw new Error("Resource ID must be a string and cannot be undefined, null or empty");
658 }
659 // if resource id contains illegal characters throw an error
660 if (illegalItemResourceIdCharacters.test(resourceId)) {
661 throw new Error("Illegal characters ['/', '\\', '#'] cannot be used in Resource ID");
662 }
663 return true;
664}
665/**
666 * @hidden
667 */
668function getResourceIdFromPath(resourcePath) {
669 if (!resourcePath || typeof resourcePath !== "string") {
670 return null;
671 }
672 const trimmedPath = trimSlashFromLeftAndRight(resourcePath);
673 const pathSegments = trimmedPath.split("/");
674 // number of segments of a path must always be even
675 if (pathSegments.length % 2 !== 0) {
676 return null;
677 }
678 return pathSegments[pathSegments.length - 1];
679}
680/**
681 * @hidden
682 */
683function parseConnectionString(connectionString) {
684 const keyValueStrings = connectionString.split(";");
685 const { AccountEndpoint, AccountKey } = keyValueStrings.reduce((connectionObject, keyValueString) => {
686 const [key, ...value] = keyValueString.split("=");
687 connectionObject[key] = value.join("=");
688 return connectionObject;
689 }, {});
690 if (!AccountEndpoint || !AccountKey) {
691 throw new Error("Could not parse the provided connection string");
692 }
693 return {
694 endpoint: AccountEndpoint,
695 key: AccountKey,
696 };
697}
698
699// Copyright (c) Microsoft Corporation.
700// Licensed under the MIT license.
701/**
702 * @hidden
703 */
704const StatusCodes = {
705 // Success
706 Ok: 200,
707 Created: 201,
708 Accepted: 202,
709 NoContent: 204,
710 NotModified: 304,
711 // Client error
712 BadRequest: 400,
713 Unauthorized: 401,
714 Forbidden: 403,
715 NotFound: 404,
716 MethodNotAllowed: 405,
717 RequestTimeout: 408,
718 Conflict: 409,
719 Gone: 410,
720 PreconditionFailed: 412,
721 RequestEntityTooLarge: 413,
722 TooManyRequests: 429,
723 RetryWith: 449,
724 // Server Error
725 InternalServerError: 500,
726 ServiceUnavailable: 503,
727 // System codes
728 ENOTFOUND: "ENOTFOUND",
729 // Operation pause and cancel. These are FAKE status codes for QOS logging purpose only.
730 OperationPaused: 1200,
731 OperationCancelled: 1201,
732};
733/**
734 * @hidden
735 */
736const SubStatusCodes = {
737 Unknown: 0,
738 // 400: Bad Request Substatus
739 CrossPartitionQueryNotServable: 1004,
740 // 410: StatusCodeType_Gone: substatus
741 PartitionKeyRangeGone: 1002,
742 // 404: NotFound Substatus
743 ReadSessionNotAvailable: 1002,
744 // 403: Forbidden Substatus
745 WriteForbidden: 3,
746 DatabaseAccountNotFound: 1008,
747};
748
749// Copyright (c) Microsoft Corporation.
750/**
751 * Would be used when creating or deleting a DocumentCollection
752 * or a User in Azure Cosmos DB database service
753 * @hidden
754 * Given a database id, this creates a database link.
755 * @param databaseId - The database id
756 * @returns A database link in the format of `dbs/{0}`
757 * with `{0}` being a Uri escaped version of the databaseId
758 */
759function createDatabaseUri(databaseId) {
760 databaseId = trimSlashFromLeftAndRight(databaseId);
761 validateResourceId(databaseId);
762 return Constants.Path.DatabasesPathSegment + "/" + databaseId;
763}
764/**
765 * Given a database and collection id, this creates a collection link.
766 * Would be used when updating or deleting a DocumentCollection, creating a
767 * Document, a StoredProcedure, a Trigger, a UserDefinedFunction, or when executing a query
768 * with CreateDocumentQuery in Azure Cosmos DB database service.
769 * @param databaseId - The database id
770 * @param collectionId - The collection id
771 * @returns A collection link in the format of `dbs/{0}/colls/{1}`
772 * with `{0}` being a Uri escaped version of the databaseId and `{1}` being collectionId
773 * @hidden
774 */
775function createDocumentCollectionUri(databaseId, collectionId) {
776 collectionId = trimSlashFromLeftAndRight(collectionId);
777 validateResourceId(collectionId);
778 return (createDatabaseUri(databaseId) + "/" + Constants.Path.CollectionsPathSegment + "/" + collectionId);
779}
780/**
781 * Given a database and user id, this creates a user link.
782 * Would be used when creating a Permission, or when replacing or deleting
783 * a User in Azure Cosmos DB database service
784 * @param databaseId - The database id
785 * @param userId - The user id
786 * @returns A user link in the format of `dbs/{0}/users/{1}`
787 * with `{0}` being a Uri escaped version of the databaseId and `{1}` being userId
788 * @hidden
789 */
790function createUserUri(databaseId, userId) {
791 userId = trimSlashFromLeftAndRight(userId);
792 validateResourceId(userId);
793 return createDatabaseUri(databaseId) + "/" + Constants.Path.UsersPathSegment + "/" + userId;
794}
795/**
796 * Given a database and collection id, this creates a collection link.
797 * Would be used when creating an Attachment, or when replacing
798 * or deleting a Document in Azure Cosmos DB database service
799 * @param databaseId - The database id
800 * @param collectionId - The collection id
801 * @param documentId - The document id
802 * @returns A document link in the format of
803 * `dbs/{0}/colls/{1}/docs/{2}` with `{0}` being a Uri escaped version of
804 * the databaseId, `{1}` being collectionId and `{2}` being the documentId
805 * @hidden
806 */
807function createDocumentUri(databaseId, collectionId, documentId) {
808 documentId = trimSlashFromLeftAndRight(documentId);
809 validateItemResourceId(documentId);
810 return (createDocumentCollectionUri(databaseId, collectionId) +
811 "/" +
812 Constants.Path.DocumentsPathSegment +
813 "/" +
814 documentId);
815}
816/**
817 * Given a database, collection and document id, this creates a document link.
818 * Would be used when replacing or deleting a Permission in Azure Cosmos DB database service.
819 * @param databaseId -The database Id
820 * @param userId -The user Id
821 * @param permissionId - The permissionId
822 * @returns A permission link in the format of `dbs/{0}/users/{1}/permissions/{2}`
823 * with `{0}` being a Uri escaped version of the databaseId, `{1}` being userId and `{2}` being permissionId
824 * @hidden
825 */
826function createPermissionUri(databaseId, userId, permissionId) {
827 permissionId = trimSlashFromLeftAndRight(permissionId);
828 validateResourceId(permissionId);
829 return (createUserUri(databaseId, userId) +
830 "/" +
831 Constants.Path.PermissionsPathSegment +
832 "/" +
833 permissionId);
834}
835/**
836 * Given a database, collection and stored proc id, this creates a stored proc link.
837 * Would be used when replacing, executing, or deleting a StoredProcedure in
838 * Azure Cosmos DB database service.
839 * @param databaseId -The database Id
840 * @param collectionId -The collection Id
841 * @param storedProcedureId -The stored procedure Id
842 * @returns A stored procedure link in the format of
843 * `dbs/{0}/colls/{1}/sprocs/{2}` with `{0}` being a Uri escaped version of the databaseId,
844 * `{1}` being collectionId and `{2}` being the storedProcedureId
845 * @hidden
846 */
847function createStoredProcedureUri(databaseId, collectionId, storedProcedureId) {
848 storedProcedureId = trimSlashFromLeftAndRight(storedProcedureId);
849 validateResourceId(storedProcedureId);
850 return (createDocumentCollectionUri(databaseId, collectionId) +
851 "/" +
852 Constants.Path.StoredProceduresPathSegment +
853 "/" +
854 storedProcedureId);
855}
856/**
857 * Given a database, collection and trigger id, this creates a trigger link.
858 * Would be used when replacing, executing, or deleting a Trigger in Azure Cosmos DB database service
859 * @param databaseId -The database Id
860 * @param collectionId -The collection Id
861 * @param triggerId -The trigger Id
862 * @returns A trigger link in the format of
863 * `dbs/{0}/colls/{1}/triggers/{2}` with `{0}` being a Uri escaped version of the databaseId,
864 * `{1}` being collectionId and `{2}` being the triggerId
865 * @hidden
866 */
867function createTriggerUri(databaseId, collectionId, triggerId) {
868 triggerId = trimSlashFromLeftAndRight(triggerId);
869 validateResourceId(triggerId);
870 return (createDocumentCollectionUri(databaseId, collectionId) +
871 "/" +
872 Constants.Path.TriggersPathSegment +
873 "/" +
874 triggerId);
875}
876/**
877 * Given a database, collection and udf id, this creates a udf link.
878 * Would be used when replacing, executing, or deleting a UserDefinedFunction in
879 * Azure Cosmos DB database service
880 * @param databaseId -The database Id
881 * @param collectionId -The collection Id
882 * @param udfId -The User Defined Function Id
883 * @returns A udf link in the format of `dbs/{0}/colls/{1}/udfs/{2}`
884 * with `{0}` being a Uri escaped version of the databaseId, `{1}` being collectionId and `{2}` being the udfId
885 * @hidden
886 */
887function createUserDefinedFunctionUri(databaseId, collectionId, udfId) {
888 udfId = trimSlashFromLeftAndRight(udfId);
889 validateResourceId(udfId);
890 return (createDocumentCollectionUri(databaseId, collectionId) +
891 "/" +
892 Constants.Path.UserDefinedFunctionsPathSegment +
893 "/" +
894 udfId);
895}
896
897// Copyright (c) Microsoft Corporation.
898/**
899 * @hidden
900 */
901function extractPartitionKey(document, partitionKeyDefinition) {
902 if (partitionKeyDefinition &&
903 partitionKeyDefinition.paths &&
904 partitionKeyDefinition.paths.length > 0) {
905 const partitionKey = [];
906 partitionKeyDefinition.paths.forEach((path) => {
907 const pathParts = parsePath(path);
908 let obj = document;
909 for (const part of pathParts) {
910 if (typeof obj === "object" && part in obj) {
911 obj = obj[part];
912 }
913 else {
914 obj = undefined;
915 break;
916 }
917 }
918 partitionKey.push(obj);
919 });
920 if (partitionKey.length === 1 && partitionKey[0] === undefined) {
921 return undefinedPartitionKey(partitionKeyDefinition);
922 }
923 return partitionKey;
924 }
925}
926/**
927 * @hidden
928 */
929function undefinedPartitionKey(partitionKeyDefinition) {
930 if (partitionKeyDefinition.systemKey === true) {
931 return [];
932 }
933 else {
934 return [{}];
935 }
936}
937
938// Copyright (c) Microsoft Corporation.
939async function hmac(key, message) {
940 return crypto.createHmac("sha256", Buffer.from(key, "base64")).update(message).digest("base64");
941}
942
943// Copyright (c) Microsoft Corporation.
944async function generateHeaders(masterKey, method, resourceType = exports.ResourceType.none, resourceId = "", date = new Date()) {
945 if (masterKey.startsWith("type=sas&")) {
946 return {
947 [Constants.HttpHeaders.Authorization]: encodeURIComponent(masterKey),
948 [Constants.HttpHeaders.XDate]: date.toUTCString(),
949 };
950 }
951 const sig = await signature(masterKey, method, resourceType, resourceId, date);
952 return {
953 [Constants.HttpHeaders.Authorization]: sig,
954 [Constants.HttpHeaders.XDate]: date.toUTCString(),
955 };
956}
957async function signature(masterKey, method, resourceType, resourceId = "", date = new Date()) {
958 const type = "master";
959 const version = "1.0";
960 const text = method.toLowerCase() +
961 "\n" +
962 resourceType.toLowerCase() +
963 "\n" +
964 resourceId +
965 "\n" +
966 date.toUTCString().toLowerCase() +
967 "\n" +
968 "" +
969 "\n";
970 const signed = await hmac(masterKey, text);
971 return encodeURIComponent("type=" + type + "&ver=" + version + "&sig=" + signed);
972}
973
974// Copyright (c) Microsoft Corporation.
975/**
976 * @hidden
977 */
978async function setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers) {
979 if (clientOptions.permissionFeed) {
980 clientOptions.resourceTokens = {};
981 for (const permission of clientOptions.permissionFeed) {
982 const id = getResourceIdFromPath(permission.resource);
983 if (!id) {
984 throw new Error(`authorization error: ${id} \
985 is an invalid resourceId in permissionFeed`);
986 }
987 clientOptions.resourceTokens[id] = permission._token; // TODO: any
988 }
989 }
990 if (clientOptions.key) {
991 await setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, clientOptions.key);
992 }
993 else if (clientOptions.resourceTokens) {
994 headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(getAuthorizationTokenUsingResourceTokens(clientOptions.resourceTokens, path, resourceId));
995 }
996 else if (clientOptions.tokenProvider) {
997 headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(await clientOptions.tokenProvider({ verb, path, resourceId, resourceType, headers }));
998 }
999}
1000/**
1001 * The default function for setting header token using the masterKey
1002 * @hidden
1003 */
1004async function setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, masterKey) {
1005 // TODO This should live in cosmos-sign
1006 if (resourceType === exports.ResourceType.offer) {
1007 resourceId = resourceId && resourceId.toLowerCase();
1008 }
1009 headers = Object.assign(headers, await generateHeaders(masterKey, verb, resourceType, resourceId));
1010}
1011/**
1012 * @hidden
1013 */
1014// TODO: Resource tokens
1015function getAuthorizationTokenUsingResourceTokens(resourceTokens, path, resourceId) {
1016 if (resourceTokens && Object.keys(resourceTokens).length > 0) {
1017 // For database account access(through getDatabaseAccount API), path and resourceId are "",
1018 // so in this case we return the first token to be used for creating the auth header as the
1019 // service will accept any token in this case
1020 if (!path && !resourceId) {
1021 return resourceTokens[Object.keys(resourceTokens)[0]];
1022 }
1023 // If we have exact resource token for the path use it
1024 if (resourceId && resourceTokens[resourceId]) {
1025 return resourceTokens[resourceId];
1026 }
1027 // minimum valid path /dbs
1028 if (!path || path.length < 4) {
1029 // TODO: This should throw an error
1030 return null;
1031 }
1032 path = trimSlashFromLeftAndRight(path);
1033 const pathSegments = (path && path.split("/")) || [];
1034 // Item path
1035 if (pathSegments.length === 6) {
1036 // Look for a container token matching the item path
1037 const containerPath = pathSegments.slice(0, 4).map(decodeURIComponent).join("/");
1038 if (resourceTokens[containerPath]) {
1039 return resourceTokens[containerPath];
1040 }
1041 }
1042 // TODO remove in v4: This is legacy behavior that lets someone use a resource token pointing ONLY at an ID
1043 // It was used when _rid was exposed by the SDK, but now that we are using user provided ids it is not needed
1044 // However removing it now would be a breaking change
1045 // if it's an incomplete path like /dbs/db1/colls/, start from the parent resource
1046 let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
1047 for (; index > 0; index -= 2) {
1048 const id = decodeURI(pathSegments[index]);
1049 if (resourceTokens[id]) {
1050 return resourceTokens[id];
1051 }
1052 }
1053 }
1054 // TODO: This should throw an error
1055 return null;
1056}
1057
1058// Copyright (c) Microsoft Corporation.
1059/**
1060 * The \@azure/logger configuration for this package.
1061 */
1062const defaultLogger = logger$4.createClientLogger("cosmosdb");
1063
1064// Copyright (c) Microsoft Corporation.
1065// ----------------------------------------------------------------------------
1066// Utility methods
1067//
1068/** @hidden */
1069function javaScriptFriendlyJSONStringify(s) {
1070 // two line terminators (Line separator and Paragraph separator) are not needed to be escaped in JSON
1071 // but are needed to be escaped in JavaScript.
1072 return JSON.stringify(s)
1073 .replace(/\u2028/g, "\\u2028")
1074 .replace(/\u2029/g, "\\u2029");
1075}
1076/** @hidden */
1077function bodyFromData(data) {
1078 if (typeof data === "object") {
1079 return javaScriptFriendlyJSONStringify(data);
1080 }
1081 return data;
1082}
1083const JsonContentType = "application/json";
1084/**
1085 * @hidden
1086 */
1087async function getHeaders({ clientOptions, defaultHeaders, verb, path, resourceId, resourceType, options = {}, partitionKeyRangeId, useMultipleWriteLocations, partitionKey, }) {
1088 const headers = Object.assign({ [Constants.HttpHeaders.ResponseContinuationTokenLimitInKB]: 1, [Constants.HttpHeaders.EnableCrossPartitionQuery]: true }, defaultHeaders);
1089 if (useMultipleWriteLocations) {
1090 headers[Constants.HttpHeaders.ALLOW_MULTIPLE_WRITES] = true;
1091 }
1092 if (options.continuationTokenLimitInKB) {
1093 headers[Constants.HttpHeaders.ResponseContinuationTokenLimitInKB] =
1094 options.continuationTokenLimitInKB;
1095 }
1096 if (options.continuationToken) {
1097 headers[Constants.HttpHeaders.Continuation] = options.continuationToken;
1098 }
1099 else if (options.continuation) {
1100 headers[Constants.HttpHeaders.Continuation] = options.continuation;
1101 }
1102 if (options.preTriggerInclude) {
1103 headers[Constants.HttpHeaders.PreTriggerInclude] =
1104 options.preTriggerInclude.constructor === Array
1105 ? options.preTriggerInclude.join(",")
1106 : options.preTriggerInclude;
1107 }
1108 if (options.postTriggerInclude) {
1109 headers[Constants.HttpHeaders.PostTriggerInclude] =
1110 options.postTriggerInclude.constructor === Array
1111 ? options.postTriggerInclude.join(",")
1112 : options.postTriggerInclude;
1113 }
1114 if (options.offerType) {
1115 headers[Constants.HttpHeaders.OfferType] = options.offerType;
1116 }
1117 if (options.offerThroughput) {
1118 headers[Constants.HttpHeaders.OfferThroughput] = options.offerThroughput;
1119 }
1120 if (options.maxItemCount) {
1121 headers[Constants.HttpHeaders.PageSize] = options.maxItemCount;
1122 }
1123 if (options.accessCondition) {
1124 if (options.accessCondition.type === "IfMatch") {
1125 headers[Constants.HttpHeaders.IfMatch] = options.accessCondition.condition;
1126 }
1127 else {
1128 headers[Constants.HttpHeaders.IfNoneMatch] = options.accessCondition.condition;
1129 }
1130 }
1131 if (options.useIncrementalFeed) {
1132 headers[Constants.HttpHeaders.A_IM] = "Incremental Feed";
1133 }
1134 if (options.indexingDirective) {
1135 headers[Constants.HttpHeaders.IndexingDirective] = options.indexingDirective;
1136 }
1137 if (options.consistencyLevel) {
1138 headers[Constants.HttpHeaders.ConsistencyLevel] = options.consistencyLevel;
1139 }
1140 if (options.maxIntegratedCacheStalenessInMs && resourceType === exports.ResourceType.item) {
1141 if (typeof options.maxIntegratedCacheStalenessInMs === "number") {
1142 headers[Constants.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness] =
1143 options.maxIntegratedCacheStalenessInMs.toString();
1144 }
1145 else {
1146 defaultLogger.error(`RangeError: maxIntegratedCacheStalenessInMs "${options.maxIntegratedCacheStalenessInMs}" is not a valid parameter.`);
1147 headers[Constants.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness] = "null";
1148 }
1149 }
1150 if (options.resourceTokenExpirySeconds) {
1151 headers[Constants.HttpHeaders.ResourceTokenExpiry] = options.resourceTokenExpirySeconds;
1152 }
1153 if (options.sessionToken) {
1154 headers[Constants.HttpHeaders.SessionToken] = options.sessionToken;
1155 }
1156 if (options.enableScanInQuery) {
1157 headers[Constants.HttpHeaders.EnableScanInQuery] = options.enableScanInQuery;
1158 }
1159 if (options.populateQuotaInfo) {
1160 headers[Constants.HttpHeaders.PopulateQuotaInfo] = options.populateQuotaInfo;
1161 }
1162 if (options.populateQueryMetrics) {
1163 headers[Constants.HttpHeaders.PopulateQueryMetrics] = options.populateQueryMetrics;
1164 }
1165 if (options.maxDegreeOfParallelism !== undefined) {
1166 headers[Constants.HttpHeaders.ParallelizeCrossPartitionQuery] = true;
1167 }
1168 if (options.populateQuotaInfo) {
1169 headers[Constants.HttpHeaders.PopulateQuotaInfo] = true;
1170 }
1171 if (partitionKey !== undefined && !headers[Constants.HttpHeaders.PartitionKey]) {
1172 if (partitionKey === null || !Array.isArray(partitionKey)) {
1173 partitionKey = [partitionKey];
1174 }
1175 headers[Constants.HttpHeaders.PartitionKey] = jsonStringifyAndEscapeNonASCII(partitionKey);
1176 }
1177 if (clientOptions.key || clientOptions.tokenProvider) {
1178 headers[Constants.HttpHeaders.XDate] = new Date().toUTCString();
1179 }
1180 if (verb === exports.HTTPMethod.post || verb === exports.HTTPMethod.put) {
1181 if (!headers[Constants.HttpHeaders.ContentType]) {
1182 headers[Constants.HttpHeaders.ContentType] = JsonContentType;
1183 }
1184 }
1185 if (!headers[Constants.HttpHeaders.Accept]) {
1186 headers[Constants.HttpHeaders.Accept] = JsonContentType;
1187 }
1188 if (partitionKeyRangeId !== undefined) {
1189 headers[Constants.HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId;
1190 }
1191 if (options.enableScriptLogging) {
1192 headers[Constants.HttpHeaders.EnableScriptLogging] = options.enableScriptLogging;
1193 }
1194 if (options.disableRUPerMinuteUsage) {
1195 headers[Constants.HttpHeaders.DisableRUPerMinuteUsage] = true;
1196 }
1197 if (clientOptions.key ||
1198 clientOptions.resourceTokens ||
1199 clientOptions.tokenProvider ||
1200 clientOptions.permissionFeed) {
1201 await setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers);
1202 }
1203 return headers;
1204}
1205
1206// Copyright (c) Microsoft Corporation.
1207const uuid$2 = uuid$3.v4;
1208function isKeyInRange(min, max, key) {
1209 const isAfterMinInclusive = key.localeCompare(min) >= 0;
1210 const isBeforeMax = key.localeCompare(max) < 0;
1211 return isAfterMinInclusive && isBeforeMax;
1212}
1213const BulkOperationType = {
1214 Create: "Create",
1215 Upsert: "Upsert",
1216 Read: "Read",
1217 Delete: "Delete",
1218 Replace: "Replace",
1219 Patch: "Patch",
1220};
1221function hasResource(operation) {
1222 return (operation.operationType !== "Patch" &&
1223 operation.resourceBody !== undefined);
1224}
1225function getPartitionKeyToHash(operation, partitionProperty) {
1226 const toHashKey = hasResource(operation)
1227 ? deepFind(operation.resourceBody, partitionProperty)
1228 : (operation.partitionKey && operation.partitionKey.replace(/[[\]"']/g, "")) ||
1229 operation.partitionKey;
1230 // We check for empty object since replace will stringify the value
1231 // The second check avoids cases where the partitionKey value is actually the string '{}'
1232 if (toHashKey === "{}" && operation.partitionKey === "[{}]") {
1233 return {};
1234 }
1235 if (toHashKey === "null" && operation.partitionKey === "[null]") {
1236 return null;
1237 }
1238 if (toHashKey === "0" && operation.partitionKey === "[0]") {
1239 return 0;
1240 }
1241 return toHashKey;
1242}
1243function decorateOperation(operation, definition, options = {}) {
1244 if (operation.operationType === BulkOperationType.Create ||
1245 operation.operationType === BulkOperationType.Upsert) {
1246 if ((operation.resourceBody.id === undefined || operation.resourceBody.id === "") &&
1247 !options.disableAutomaticIdGeneration) {
1248 operation.resourceBody.id = uuid$2();
1249 }
1250 }
1251 if ("partitionKey" in operation) {
1252 const extracted = extractPartitionKey(operation, { paths: ["/partitionKey"] });
1253 return Object.assign(Object.assign({}, operation), { partitionKey: JSON.stringify(extracted) });
1254 }
1255 else if (operation.operationType === BulkOperationType.Create ||
1256 operation.operationType === BulkOperationType.Replace ||
1257 operation.operationType === BulkOperationType.Upsert) {
1258 const pk = extractPartitionKey(operation.resourceBody, definition);
1259 return Object.assign(Object.assign({}, operation), { partitionKey: JSON.stringify(pk) });
1260 }
1261 else if (operation.operationType === BulkOperationType.Read ||
1262 operation.operationType === BulkOperationType.Delete) {
1263 return Object.assign(Object.assign({}, operation), { partitionKey: "[{}]" });
1264 }
1265 return operation;
1266}
1267/**
1268 * Splits a batch into array of batches based on cumulative size of its operations by making sure
1269 * cumulative size of an individual batch is not larger than {@link Constants.DefaultMaxBulkRequestBodySizeInBytes}.
1270 * If a single operation itself is larger than {@link Constants.DefaultMaxBulkRequestBodySizeInBytes}, that
1271 * operation would be moved into a batch containing only that operation.
1272 * @param originalBatch - A batch of operations needed to be checked.
1273 * @returns
1274 * @hidden
1275 */
1276function splitBatchBasedOnBodySize(originalBatch) {
1277 if ((originalBatch === null || originalBatch === void 0 ? void 0 : originalBatch.operations) === undefined || originalBatch.operations.length < 1)
1278 return [];
1279 let currentBatchSize = calculateObjectSizeInBytes(originalBatch.operations[0]);
1280 let currentBatch = Object.assign(Object.assign({}, originalBatch), { operations: [originalBatch.operations[0]], indexes: [originalBatch.indexes[0]] });
1281 const processedBatches = [];
1282 processedBatches.push(currentBatch);
1283 for (let index = 1; index < originalBatch.operations.length; index++) {
1284 const operation = originalBatch.operations[index];
1285 const currentOpSize = calculateObjectSizeInBytes(operation);
1286 if (currentBatchSize + currentOpSize > Constants.DefaultMaxBulkRequestBodySizeInBytes) {
1287 currentBatch = Object.assign(Object.assign({}, originalBatch), { operations: [], indexes: [] });
1288 processedBatches.push(currentBatch);
1289 currentBatchSize = 0;
1290 }
1291 currentBatch.operations.push(operation);
1292 currentBatch.indexes.push(originalBatch.indexes[index]);
1293 currentBatchSize += currentOpSize;
1294 }
1295 return processedBatches;
1296}
1297/**
1298 * Calculates size of an JSON object in bytes with utf-8 encoding.
1299 * @hidden
1300 */
1301function calculateObjectSizeInBytes(obj) {
1302 return new TextEncoder().encode(bodyFromData(obj)).length;
1303}
1304function decorateBatchOperation(operation, options = {}) {
1305 if (operation.operationType === BulkOperationType.Create ||
1306 operation.operationType === BulkOperationType.Upsert) {
1307 if ((operation.resourceBody.id === undefined || operation.resourceBody.id === "") &&
1308 !options.disableAutomaticIdGeneration) {
1309 operation.resourceBody.id = uuid$2();
1310 }
1311 }
1312 return operation;
1313}
1314/**
1315 * Util function for finding partition key values nested in objects at slash (/) separated paths
1316 * @hidden
1317 */
1318function deepFind(document, path) {
1319 const apath = path.split("/");
1320 let h = document;
1321 for (const p of apath) {
1322 if (p in h)
1323 h = h[p];
1324 else {
1325 if (p !== "_partitionKey") {
1326 console.warn(`Partition key not found, using undefined: ${path} at ${p}`);
1327 }
1328 return "{}";
1329 }
1330 }
1331 return h;
1332}
1333
1334// Copyright (c) Microsoft Corporation.
1335// Licensed under the MIT license.
1336const PatchOperationType = {
1337 add: "add",
1338 replace: "replace",
1339 remove: "remove",
1340 set: "set",
1341 incr: "incr",
1342};
1343
1344// Copyright (c) Microsoft Corporation.
1345// Licensed under the MIT license.
1346/** Determines the connection behavior of the CosmosClient. Note, we currently only support Gateway Mode. */
1347exports.ConnectionMode = void 0;
1348(function (ConnectionMode) {
1349 /** Gateway mode talks to an intermediate gateway which handles the direct communication with your individual partitions. */
1350 ConnectionMode[ConnectionMode["Gateway"] = 0] = "Gateway";
1351})(exports.ConnectionMode || (exports.ConnectionMode = {}));
1352
1353/**
1354 * @hidden
1355 */
1356const defaultConnectionPolicy = Object.freeze({
1357 connectionMode: exports.ConnectionMode.Gateway,
1358 requestTimeout: 60000,
1359 enableEndpointDiscovery: true,
1360 preferredLocations: [],
1361 retryOptions: {
1362 maxRetryAttemptCount: 9,
1363 fixedRetryIntervalInMilliseconds: 0,
1364 maxWaitTimeInSeconds: 30,
1365 },
1366 useMultipleWriteLocations: true,
1367 endpointRefreshRateInMs: 300000,
1368 enableBackgroundEndpointRefreshing: true,
1369});
1370
1371// Copyright (c) Microsoft Corporation.
1372// Licensed under the MIT license.
1373/**
1374 * Represents the consistency levels supported for Azure Cosmos DB client operations.<br>
1375 * The requested ConsistencyLevel must match or be weaker than that provisioned for the database account.
1376 * Consistency levels.
1377 *
1378 * Consistency levels by order of strength are Strong, BoundedStaleness, Session, Consistent Prefix, and Eventual.
1379 *
1380 * See https://aka.ms/cosmos-consistency for more detailed documentation on Consistency Levels.
1381 */
1382exports.ConsistencyLevel = void 0;
1383(function (ConsistencyLevel) {
1384 /**
1385 * Strong Consistency guarantees that read operations always return the value that was last written.
1386 */
1387 ConsistencyLevel["Strong"] = "Strong";
1388 /**
1389 * Bounded Staleness guarantees that reads are not too out-of-date.
1390 * This can be configured based on number of operations (MaxStalenessPrefix) or time (MaxStalenessIntervalInSeconds).
1391 */
1392 ConsistencyLevel["BoundedStaleness"] = "BoundedStaleness";
1393 /**
1394 * Session Consistency guarantees monotonic reads (you never read old data, then new, then old again),
1395 * monotonic writes (writes are ordered) and read your writes (your writes are immediately visible to your reads)
1396 * within any single session.
1397 */
1398 ConsistencyLevel["Session"] = "Session";
1399 /**
1400 * Eventual Consistency guarantees that reads will return a subset of writes.
1401 * All writes will be eventually be available for reads.
1402 */
1403 ConsistencyLevel["Eventual"] = "Eventual";
1404 /**
1405 * ConsistentPrefix Consistency guarantees that reads will return some prefix of all writes with no gaps.
1406 * All writes will be eventually be available for reads.
1407 */
1408 ConsistencyLevel["ConsistentPrefix"] = "ConsistentPrefix";
1409})(exports.ConsistencyLevel || (exports.ConsistencyLevel = {}));
1410
1411// Copyright (c) Microsoft Corporation.
1412/**
1413 * Represents a DatabaseAccount in the Azure Cosmos DB database service.
1414 */
1415class DatabaseAccount {
1416 // TODO: body - any
1417 constructor(body, headers) {
1418 /** The list of writable locations for a geo-replicated database account. */
1419 this.writableLocations = [];
1420 /** The list of readable locations for a geo-replicated database account. */
1421 this.readableLocations = [];
1422 this.databasesLink = "/dbs/";
1423 this.mediaLink = "/media/";
1424 this.maxMediaStorageUsageInMB = headers[Constants.HttpHeaders.MaxMediaStorageUsageInMB];
1425 this.currentMediaStorageUsageInMB = headers[Constants.HttpHeaders.CurrentMediaStorageUsageInMB];
1426 this.consistencyPolicy = body.userConsistencyPolicy
1427 ? body.userConsistencyPolicy.defaultConsistencyLevel
1428 : exports.ConsistencyLevel.Session;
1429 if (body[Constants.WritableLocations] && body.id !== "localhost") {
1430 this.writableLocations = body[Constants.WritableLocations];
1431 }
1432 if (body[Constants.ReadableLocations] && body.id !== "localhost") {
1433 this.readableLocations = body[Constants.ReadableLocations];
1434 }
1435 if (body[Constants.ENABLE_MULTIPLE_WRITABLE_LOCATIONS]) {
1436 this.enableMultipleWritableLocations =
1437 body[Constants.ENABLE_MULTIPLE_WRITABLE_LOCATIONS] === true ||
1438 body[Constants.ENABLE_MULTIPLE_WRITABLE_LOCATIONS] === "true";
1439 }
1440 }
1441 /**
1442 * The self-link for Databases in the databaseAccount.
1443 * @deprecated Use `databasesLink`
1444 */
1445 get DatabasesLink() {
1446 return this.databasesLink;
1447 }
1448 /**
1449 * The self-link for Media in the databaseAccount.
1450 * @deprecated Use `mediaLink`
1451 */
1452 get MediaLink() {
1453 return this.mediaLink;
1454 }
1455 /**
1456 * Attachment content (media) storage quota in MBs ( Retrieved from gateway ).
1457 * @deprecated use `maxMediaStorageUsageInMB`
1458 */
1459 get MaxMediaStorageUsageInMB() {
1460 return this.maxMediaStorageUsageInMB;
1461 }
1462 /**
1463 * Current attachment content (media) usage in MBs (Retrieved from gateway )
1464 *
1465 * Value is returned from cached information updated periodically and is not guaranteed
1466 * to be real time.
1467 *
1468 * @deprecated use `currentMediaStorageUsageInMB`
1469 */
1470 get CurrentMediaStorageUsageInMB() {
1471 return this.currentMediaStorageUsageInMB;
1472 }
1473 /**
1474 * Gets the UserConsistencyPolicy settings.
1475 * @deprecated use `consistencyPolicy`
1476 */
1477 get ConsistencyPolicy() {
1478 return this.consistencyPolicy;
1479 }
1480}
1481
1482// Copyright (c) Microsoft Corporation.
1483// Licensed under the MIT license.
1484/** Defines a target data type of an index path specification in the Azure Cosmos DB service. */
1485exports.DataType = void 0;
1486(function (DataType) {
1487 /** Represents a numeric data type. */
1488 DataType["Number"] = "Number";
1489 /** Represents a string data type. */
1490 DataType["String"] = "String";
1491 /** Represents a point data type. */
1492 DataType["Point"] = "Point";
1493 /** Represents a line string data type. */
1494 DataType["LineString"] = "LineString";
1495 /** Represents a polygon data type. */
1496 DataType["Polygon"] = "Polygon";
1497 /** Represents a multi-polygon data type. */
1498 DataType["MultiPolygon"] = "MultiPolygon";
1499})(exports.DataType || (exports.DataType = {}));
1500
1501// Copyright (c) Microsoft Corporation.
1502// Licensed under the MIT license.
1503/**
1504 * Specifies the supported indexing modes.
1505 */
1506exports.IndexingMode = void 0;
1507(function (IndexingMode) {
1508 /**
1509 * Index is updated synchronously with a create or update operation.
1510 *
1511 * With consistent indexing, query behavior is the same as the default consistency level for the container.
1512 * The index is always kept up to date with the data.
1513 */
1514 IndexingMode["consistent"] = "consistent";
1515 /**
1516 * Index is updated asynchronously with respect to a create or update operation.
1517 *
1518 * With lazy indexing, queries are eventually consistent. The index is updated when the container is idle.
1519 */
1520 IndexingMode["lazy"] = "lazy";
1521 /** No Index is provided. */
1522 IndexingMode["none"] = "none";
1523})(exports.IndexingMode || (exports.IndexingMode = {}));
1524
1525/* The target data type of a spatial path */
1526exports.SpatialType = void 0;
1527(function (SpatialType) {
1528 SpatialType["LineString"] = "LineString";
1529 SpatialType["MultiPolygon"] = "MultiPolygon";
1530 SpatialType["Point"] = "Point";
1531 SpatialType["Polygon"] = "Polygon";
1532})(exports.SpatialType || (exports.SpatialType = {}));
1533
1534// Copyright (c) Microsoft Corporation.
1535// Licensed under the MIT license.
1536/**
1537 * Specifies the supported Index types.
1538 */
1539exports.IndexKind = void 0;
1540(function (IndexKind) {
1541 /**
1542 * This is supplied for a path which requires sorting.
1543 */
1544 IndexKind["Range"] = "Range";
1545 /**
1546 * This is supplied for a path which requires geospatial indexing.
1547 */
1548 IndexKind["Spatial"] = "Spatial";
1549})(exports.IndexKind || (exports.IndexKind = {}));
1550
1551// Copyright (c) Microsoft Corporation.
1552// Licensed under the MIT license.
1553/**
1554 * Enum for permission mode values.
1555 */
1556exports.PermissionMode = void 0;
1557(function (PermissionMode) {
1558 /** Permission not valid. */
1559 PermissionMode["None"] = "none";
1560 /** Permission applicable for read operations only. */
1561 PermissionMode["Read"] = "read";
1562 /** Permission applicable for all operations. */
1563 PermissionMode["All"] = "all";
1564})(exports.PermissionMode || (exports.PermissionMode = {}));
1565
1566// Copyright (c) Microsoft Corporation.
1567// Licensed under the MIT license.
1568/**
1569 * Enum for trigger operation values.
1570 * specifies the operations on which a trigger should be executed.
1571 */
1572exports.TriggerOperation = void 0;
1573(function (TriggerOperation) {
1574 /** All operations. */
1575 TriggerOperation["All"] = "all";
1576 /** Create operations only. */
1577 TriggerOperation["Create"] = "create";
1578 /** Update operations only. */
1579 TriggerOperation["Update"] = "update";
1580 /** Delete operations only. */
1581 TriggerOperation["Delete"] = "delete";
1582 /** Replace operations only. */
1583 TriggerOperation["Replace"] = "replace";
1584})(exports.TriggerOperation || (exports.TriggerOperation = {}));
1585
1586// Copyright (c) Microsoft Corporation.
1587// Licensed under the MIT license.
1588/**
1589 * Enum for trigger type values.
1590 * Specifies the type of the trigger.
1591 */
1592exports.TriggerType = void 0;
1593(function (TriggerType) {
1594 /** Trigger should be executed before the associated operation(s). */
1595 TriggerType["Pre"] = "pre";
1596 /** Trigger should be executed after the associated operation(s). */
1597 TriggerType["Post"] = "post";
1598})(exports.TriggerType || (exports.TriggerType = {}));
1599
1600// Copyright (c) Microsoft Corporation.
1601// Licensed under the MIT license.
1602/**
1603 * Enum for udf type values.
1604 * Specifies the types of user defined functions.
1605 */
1606exports.UserDefinedFunctionType = void 0;
1607(function (UserDefinedFunctionType) {
1608 /** The User Defined Function is written in JavaScript. This is currently the only option. */
1609 UserDefinedFunctionType["Javascript"] = "Javascript";
1610})(exports.UserDefinedFunctionType || (exports.UserDefinedFunctionType = {}));
1611
1612// Copyright (c) Microsoft Corporation.
1613// Licensed under the MIT license.
1614exports.GeospatialType = void 0;
1615(function (GeospatialType) {
1616 /** Represents data in round-earth coordinate system. */
1617 GeospatialType["Geography"] = "Geography";
1618 /** Represents data in Eucledian(flat) coordinate system. */
1619 GeospatialType["Geometry"] = "Geometry";
1620})(exports.GeospatialType || (exports.GeospatialType = {}));
1621
1622class ErrorResponse extends Error {
1623}
1624
1625// Copyright (c) Microsoft Corporation.
1626class ResourceResponse {
1627 constructor(resource, headers, statusCode, substatus) {
1628 this.resource = resource;
1629 this.headers = headers;
1630 this.statusCode = statusCode;
1631 this.substatus = substatus;
1632 }
1633 get requestCharge() {
1634 return Number(this.headers[Constants.HttpHeaders.RequestCharge]) || 0;
1635 }
1636 get activityId() {
1637 return this.headers[Constants.HttpHeaders.ActivityId];
1638 }
1639 get etag() {
1640 return this.headers[Constants.HttpHeaders.ETag];
1641 }
1642}
1643
1644// Copyright (c) Microsoft Corporation.
1645class FeedResponse {
1646 constructor(resources, headers, hasMoreResults) {
1647 this.resources = resources;
1648 this.headers = headers;
1649 this.hasMoreResults = hasMoreResults;
1650 }
1651 get continuation() {
1652 return this.continuationToken;
1653 }
1654 get continuationToken() {
1655 return this.headers[Constants.HttpHeaders.Continuation];
1656 }
1657 get queryMetrics() {
1658 return this.headers[Constants.HttpHeaders.QueryMetrics];
1659 }
1660 get requestCharge() {
1661 return this.headers[Constants.HttpHeaders.RequestCharge];
1662 }
1663 get activityId() {
1664 return this.headers[Constants.HttpHeaders.ActivityId];
1665 }
1666}
1667
1668// Copyright (c) Microsoft Corporation.
1669// Licensed under the MIT license.
1670/**
1671 * @hidden
1672 */
1673const TimeoutErrorCode = "TimeoutError";
1674class TimeoutError extends Error {
1675 constructor(message = "Timeout Error") {
1676 super(message);
1677 this.code = TimeoutErrorCode;
1678 this.name = TimeoutErrorCode;
1679 }
1680}
1681
1682// Copyright (c) Microsoft Corporation.
1683// Licensed under the MIT license.
1684class ClientSideMetrics {
1685 constructor(requestCharge) {
1686 this.requestCharge = requestCharge;
1687 }
1688 /**
1689 * Adds one or more ClientSideMetrics to a copy of this instance and returns the result.
1690 */
1691 add(...clientSideMetricsArray) {
1692 let requestCharge = this.requestCharge;
1693 for (const clientSideMetrics of clientSideMetricsArray) {
1694 if (clientSideMetrics == null) {
1695 throw new Error("clientSideMetrics has null or undefined item(s)");
1696 }
1697 requestCharge += clientSideMetrics.requestCharge;
1698 }
1699 return new ClientSideMetrics(requestCharge);
1700 }
1701 static createFromArray(...clientSideMetricsArray) {
1702 if (clientSideMetricsArray == null) {
1703 throw new Error("clientSideMetricsArray is null or undefined item(s)");
1704 }
1705 return this.zero.add(...clientSideMetricsArray);
1706 }
1707}
1708ClientSideMetrics.zero = new ClientSideMetrics(0);
1709
1710// Copyright (c) Microsoft Corporation.
1711// Licensed under the MIT license.
1712var QueryMetricsConstants = {
1713 // QueryMetrics
1714 RetrievedDocumentCount: "retrievedDocumentCount",
1715 RetrievedDocumentSize: "retrievedDocumentSize",
1716 OutputDocumentCount: "outputDocumentCount",
1717 OutputDocumentSize: "outputDocumentSize",
1718 IndexHitRatio: "indexUtilizationRatio",
1719 IndexHitDocumentCount: "indexHitDocumentCount",
1720 TotalQueryExecutionTimeInMs: "totalExecutionTimeInMs",
1721 // QueryPreparationTimes
1722 QueryCompileTimeInMs: "queryCompileTimeInMs",
1723 LogicalPlanBuildTimeInMs: "queryLogicalPlanBuildTimeInMs",
1724 PhysicalPlanBuildTimeInMs: "queryPhysicalPlanBuildTimeInMs",
1725 QueryOptimizationTimeInMs: "queryOptimizationTimeInMs",
1726 // QueryTimes
1727 IndexLookupTimeInMs: "indexLookupTimeInMs",
1728 DocumentLoadTimeInMs: "documentLoadTimeInMs",
1729 VMExecutionTimeInMs: "VMExecutionTimeInMs",
1730 DocumentWriteTimeInMs: "writeOutputTimeInMs",
1731 // RuntimeExecutionTimes
1732 QueryEngineTimes: "queryEngineTimes",
1733 SystemFunctionExecuteTimeInMs: "systemFunctionExecuteTimeInMs",
1734 UserDefinedFunctionExecutionTimeInMs: "userFunctionExecuteTimeInMs",
1735 // QueryMetrics Text
1736 RetrievedDocumentCountText: "Retrieved Document Count",
1737 RetrievedDocumentSizeText: "Retrieved Document Size",
1738 OutputDocumentCountText: "Output Document Count",
1739 OutputDocumentSizeText: "Output Document Size",
1740 IndexUtilizationText: "Index Utilization",
1741 TotalQueryExecutionTimeText: "Total Query Execution Time",
1742 // QueryPreparationTimes Text
1743 QueryPreparationTimesText: "Query Preparation Times",
1744 QueryCompileTimeText: "Query Compilation Time",
1745 LogicalPlanBuildTimeText: "Logical Plan Build Time",
1746 PhysicalPlanBuildTimeText: "Physical Plan Build Time",
1747 QueryOptimizationTimeText: "Query Optimization Time",
1748 // QueryTimes Text
1749 QueryEngineTimesText: "Query Engine Times",
1750 IndexLookupTimeText: "Index Lookup Time",
1751 DocumentLoadTimeText: "Document Load Time",
1752 WriteOutputTimeText: "Document Write Time",
1753 // RuntimeExecutionTimes Text
1754 RuntimeExecutionTimesText: "Runtime Execution Times",
1755 TotalExecutionTimeText: "Query Engine Execution Time",
1756 SystemFunctionExecuteTimeText: "System Function Execution Time",
1757 UserDefinedFunctionExecutionTimeText: "User-defined Function Execution Time",
1758 // ClientSideQueryMetrics Text
1759 ClientSideQueryMetricsText: "Client Side Metrics",
1760 RetriesText: "Retry Count",
1761 RequestChargeText: "Request Charge",
1762 FetchExecutionRangesText: "Partition Execution Timeline",
1763 SchedulingMetricsText: "Scheduling Metrics",
1764};
1765
1766// Copyright (c) Microsoft Corporation.
1767// Licensed under the MIT license.
1768// Ported this implementation to javascript:
1769// https://referencesource.microsoft.com/#mscorlib/system/timespan.cs,83e476c1ae112117
1770/** @hidden */
1771// Copyright (c) Microsoft Corporation.
1772// Licensed under the MIT license.
1773const ticksPerMillisecond = 10000;
1774/** @hidden */
1775const millisecondsPerTick = 1.0 / ticksPerMillisecond;
1776/** @hidden */
1777const ticksPerSecond = ticksPerMillisecond * 1000; // 10,000,000
1778/** @hidden */
1779const secondsPerTick = 1.0 / ticksPerSecond; // 0.0001
1780/** @hidden */
1781const ticksPerMinute = ticksPerSecond * 60; // 600,000,000
1782/** @hidden */
1783const minutesPerTick = 1.0 / ticksPerMinute; // 1.6666666666667e-9
1784/** @hidden */
1785const ticksPerHour = ticksPerMinute * 60; // 36,000,000,000
1786/** @hidden */
1787const hoursPerTick = 1.0 / ticksPerHour; // 2.77777777777777778e-11
1788/** @hidden */
1789const ticksPerDay = ticksPerHour * 24; // 864,000,000,000
1790/** @hidden */
1791const daysPerTick = 1.0 / ticksPerDay; // 1.1574074074074074074e-12
1792/** @hidden */
1793const millisPerSecond = 1000;
1794/** @hidden */
1795const millisPerMinute = millisPerSecond * 60; // 60,000
1796/** @hidden */
1797const millisPerHour = millisPerMinute * 60; // 3,600,000
1798/** @hidden */
1799const millisPerDay = millisPerHour * 24; // 86,400,000
1800/** @hidden */
1801const maxMilliSeconds = Number.MAX_SAFE_INTEGER / ticksPerMillisecond;
1802/** @hidden */
1803const minMilliSeconds = Number.MIN_SAFE_INTEGER / ticksPerMillisecond;
1804/**
1805 * Represents a time interval.
1806 *
1807 * @param days - Number of days.
1808 * @param hours - Number of hours.
1809 * @param minutes - Number of minutes.
1810 * @param seconds - Number of seconds.
1811 * @param milliseconds - Number of milliseconds.
1812 * @hidden
1813 */
1814class TimeSpan {
1815 constructor(days, hours, minutes, seconds, milliseconds) {
1816 // Constructor
1817 if (!Number.isInteger(days)) {
1818 throw new Error("days is not an integer");
1819 }
1820 if (!Number.isInteger(hours)) {
1821 throw new Error("hours is not an integer");
1822 }
1823 if (!Number.isInteger(minutes)) {
1824 throw new Error("minutes is not an integer");
1825 }
1826 if (!Number.isInteger(seconds)) {
1827 throw new Error("seconds is not an integer");
1828 }
1829 if (!Number.isInteger(milliseconds)) {
1830 throw new Error("milliseconds is not an integer");
1831 }
1832 const totalMilliSeconds = (days * 3600 * 24 + hours * 3600 + minutes * 60 + seconds) * 1000 + milliseconds;
1833 if (totalMilliSeconds > maxMilliSeconds || totalMilliSeconds < minMilliSeconds) {
1834 throw new Error("Total number of milliseconds was either too large or too small");
1835 }
1836 this._ticks = totalMilliSeconds * ticksPerMillisecond;
1837 }
1838 /**
1839 * Returns a new TimeSpan object whose value is the sum of the specified TimeSpan object and this instance.
1840 * @param ts - The time interval to add.
1841 */
1842 add(ts) {
1843 if (TimeSpan.additionDoesOverflow(this._ticks, ts._ticks)) {
1844 throw new Error("Adding the two timestamps causes an overflow.");
1845 }
1846 const results = this._ticks + ts._ticks;
1847 return TimeSpan.fromTicks(results);
1848 }
1849 /**
1850 * Returns a new TimeSpan object whose value is the difference of the specified TimeSpan object and this instance.
1851 * @param ts - The time interval to subtract.
1852 */
1853 subtract(ts) {
1854 if (TimeSpan.subtractionDoesUnderflow(this._ticks, ts._ticks)) {
1855 throw new Error("Subtracting the two timestamps causes an underflow.");
1856 }
1857 const results = this._ticks - ts._ticks;
1858 return TimeSpan.fromTicks(results);
1859 }
1860 /**
1861 * Compares this instance to a specified object and returns an integer that indicates whether this
1862 * instance is shorter than, equal to, or longer than the specified object.
1863 * @param value - The time interval to add.
1864 */
1865 compareTo(value) {
1866 if (value == null) {
1867 return 1;
1868 }
1869 if (!TimeSpan.isTimeSpan(value)) {
1870 throw new Error("Argument must be a TimeSpan object");
1871 }
1872 return TimeSpan.compare(this, value);
1873 }
1874 /**
1875 * Returns a new TimeSpan object whose value is the absolute value of the current TimeSpan object.
1876 */
1877 duration() {
1878 return TimeSpan.fromTicks(this._ticks >= 0 ? this._ticks : -this._ticks);
1879 }
1880 /**
1881 * Returns a value indicating whether this instance is equal to a specified object.
1882 * @param value - The time interval to check for equality.
1883 */
1884 equals(value) {
1885 if (TimeSpan.isTimeSpan(value)) {
1886 return this._ticks === value._ticks;
1887 }
1888 return false;
1889 }
1890 /**
1891 * Returns a new TimeSpan object whose value is the negated value of this instance.
1892 * @param value - The time interval to check for equality.
1893 */
1894 negate() {
1895 return TimeSpan.fromTicks(-this._ticks);
1896 }
1897 days() {
1898 return Math.floor(this._ticks / ticksPerDay);
1899 }
1900 hours() {
1901 return Math.floor(this._ticks / ticksPerHour);
1902 }
1903 milliseconds() {
1904 return Math.floor(this._ticks / ticksPerMillisecond);
1905 }
1906 seconds() {
1907 return Math.floor(this._ticks / ticksPerSecond);
1908 }
1909 ticks() {
1910 return this._ticks;
1911 }
1912 totalDays() {
1913 return this._ticks * daysPerTick;
1914 }
1915 totalHours() {
1916 return this._ticks * hoursPerTick;
1917 }
1918 totalMilliseconds() {
1919 return this._ticks * millisecondsPerTick;
1920 }
1921 totalMinutes() {
1922 return this._ticks * minutesPerTick;
1923 }
1924 totalSeconds() {
1925 return this._ticks * secondsPerTick;
1926 }
1927 static fromTicks(value) {
1928 const timeSpan = new TimeSpan(0, 0, 0, 0, 0);
1929 timeSpan._ticks = value;
1930 return timeSpan;
1931 }
1932 static isTimeSpan(timespan) {
1933 return timespan._ticks;
1934 }
1935 static additionDoesOverflow(a, b) {
1936 const c = a + b;
1937 return a !== c - b || b !== c - a;
1938 }
1939 static subtractionDoesUnderflow(a, b) {
1940 const c = a - b;
1941 return a !== c + b || b !== a - c;
1942 }
1943 static compare(t1, t2) {
1944 if (t1._ticks > t2._ticks) {
1945 return 1;
1946 }
1947 if (t1._ticks < t2._ticks) {
1948 return -1;
1949 }
1950 return 0;
1951 }
1952 static interval(value, scale) {
1953 if (isNaN(value)) {
1954 throw new Error("value must be a number");
1955 }
1956 const milliseconds = value * scale;
1957 if (milliseconds > maxMilliSeconds || milliseconds < minMilliSeconds) {
1958 throw new Error("timespan too long");
1959 }
1960 return TimeSpan.fromTicks(Math.floor(milliseconds * ticksPerMillisecond));
1961 }
1962 static fromMilliseconds(value) {
1963 return TimeSpan.interval(value, 1);
1964 }
1965 static fromSeconds(value) {
1966 return TimeSpan.interval(value, millisPerSecond);
1967 }
1968 static fromMinutes(value) {
1969 return TimeSpan.interval(value, millisPerMinute);
1970 }
1971 static fromHours(value) {
1972 return TimeSpan.interval(value, millisPerHour);
1973 }
1974 static fromDays(value) {
1975 return TimeSpan.interval(value, millisPerDay);
1976 }
1977}
1978TimeSpan.zero = new TimeSpan(0, 0, 0, 0, 0);
1979TimeSpan.maxValue = TimeSpan.fromTicks(Number.MAX_SAFE_INTEGER);
1980TimeSpan.minValue = TimeSpan.fromTicks(Number.MIN_SAFE_INTEGER);
1981
1982// Copyright (c) Microsoft Corporation.
1983/**
1984 * @hidden
1985 */
1986function parseDelimitedString(delimitedString) {
1987 if (delimitedString == null) {
1988 throw new Error("delimitedString is null or undefined");
1989 }
1990 const metrics = {};
1991 const headerAttributes = delimitedString.split(";");
1992 for (const attribute of headerAttributes) {
1993 const attributeKeyValue = attribute.split("=");
1994 if (attributeKeyValue.length !== 2) {
1995 throw new Error("recieved a malformed delimited string");
1996 }
1997 const attributeKey = attributeKeyValue[0];
1998 const attributeValue = parseFloat(attributeKeyValue[1]);
1999 metrics[attributeKey] = attributeValue;
2000 }
2001 return metrics;
2002}
2003/**
2004 * @hidden
2005 */
2006function timeSpanFromMetrics(metrics /* TODO: any */, key) {
2007 if (key in metrics) {
2008 return TimeSpan.fromMilliseconds(metrics[key]);
2009 }
2010 return TimeSpan.zero;
2011}
2012
2013// Copyright (c) Microsoft Corporation.
2014class QueryPreparationTimes {
2015 constructor(queryCompilationTime, logicalPlanBuildTime, physicalPlanBuildTime, queryOptimizationTime) {
2016 this.queryCompilationTime = queryCompilationTime;
2017 this.logicalPlanBuildTime = logicalPlanBuildTime;
2018 this.physicalPlanBuildTime = physicalPlanBuildTime;
2019 this.queryOptimizationTime = queryOptimizationTime;
2020 }
2021 /**
2022 * returns a new QueryPreparationTimes instance that is the addition of this and the arguments.
2023 */
2024 add(...queryPreparationTimesArray) {
2025 let queryCompilationTime = this.queryCompilationTime;
2026 let logicalPlanBuildTime = this.logicalPlanBuildTime;
2027 let physicalPlanBuildTime = this.physicalPlanBuildTime;
2028 let queryOptimizationTime = this.queryOptimizationTime;
2029 for (const queryPreparationTimes of queryPreparationTimesArray) {
2030 if (queryPreparationTimes == null) {
2031 throw new Error("queryPreparationTimesArray has null or undefined item(s)");
2032 }
2033 queryCompilationTime = queryCompilationTime.add(queryPreparationTimes.queryCompilationTime);
2034 logicalPlanBuildTime = logicalPlanBuildTime.add(queryPreparationTimes.logicalPlanBuildTime);
2035 physicalPlanBuildTime = physicalPlanBuildTime.add(queryPreparationTimes.physicalPlanBuildTime);
2036 queryOptimizationTime = queryOptimizationTime.add(queryPreparationTimes.queryOptimizationTime);
2037 }
2038 return new QueryPreparationTimes(queryCompilationTime, logicalPlanBuildTime, physicalPlanBuildTime, queryOptimizationTime);
2039 }
2040 /**
2041 * Output the QueryPreparationTimes as a delimited string.
2042 */
2043 toDelimitedString() {
2044 return (`${QueryMetricsConstants.QueryCompileTimeInMs}=${this.queryCompilationTime.totalMilliseconds()};` +
2045 `${QueryMetricsConstants.LogicalPlanBuildTimeInMs}=${this.logicalPlanBuildTime.totalMilliseconds()};` +
2046 `${QueryMetricsConstants.PhysicalPlanBuildTimeInMs}=${this.physicalPlanBuildTime.totalMilliseconds()};` +
2047 `${QueryMetricsConstants.QueryOptimizationTimeInMs}=${this.queryOptimizationTime.totalMilliseconds()}`);
2048 }
2049 /**
2050 * Returns a new instance of the QueryPreparationTimes class that is the
2051 * aggregation of an array of QueryPreparationTimes.
2052 */
2053 static createFromArray(queryPreparationTimesArray) {
2054 if (queryPreparationTimesArray == null) {
2055 throw new Error("queryPreparationTimesArray is null or undefined item(s)");
2056 }
2057 return QueryPreparationTimes.zero.add(...queryPreparationTimesArray);
2058 }
2059 /**
2060 * Returns a new instance of the QueryPreparationTimes class this is deserialized from a delimited string.
2061 */
2062 static createFromDelimitedString(delimitedString) {
2063 const metrics = parseDelimitedString(delimitedString);
2064 return new QueryPreparationTimes(timeSpanFromMetrics(metrics, QueryMetricsConstants.QueryCompileTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.LogicalPlanBuildTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.PhysicalPlanBuildTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.QueryOptimizationTimeInMs));
2065 }
2066}
2067QueryPreparationTimes.zero = new QueryPreparationTimes(TimeSpan.zero, TimeSpan.zero, TimeSpan.zero, TimeSpan.zero);
2068
2069// Copyright (c) Microsoft Corporation.
2070class RuntimeExecutionTimes {
2071 constructor(queryEngineExecutionTime, systemFunctionExecutionTime, userDefinedFunctionExecutionTime) {
2072 this.queryEngineExecutionTime = queryEngineExecutionTime;
2073 this.systemFunctionExecutionTime = systemFunctionExecutionTime;
2074 this.userDefinedFunctionExecutionTime = userDefinedFunctionExecutionTime;
2075 }
2076 /**
2077 * returns a new RuntimeExecutionTimes instance that is the addition of this and the arguments.
2078 */
2079 add(...runtimeExecutionTimesArray) {
2080 let queryEngineExecutionTime = this.queryEngineExecutionTime;
2081 let systemFunctionExecutionTime = this.systemFunctionExecutionTime;
2082 let userDefinedFunctionExecutionTime = this.userDefinedFunctionExecutionTime;
2083 for (const runtimeExecutionTimes of runtimeExecutionTimesArray) {
2084 if (runtimeExecutionTimes == null) {
2085 throw new Error("runtimeExecutionTimes has null or undefined item(s)");
2086 }
2087 queryEngineExecutionTime = queryEngineExecutionTime.add(runtimeExecutionTimes.queryEngineExecutionTime);
2088 systemFunctionExecutionTime = systemFunctionExecutionTime.add(runtimeExecutionTimes.systemFunctionExecutionTime);
2089 userDefinedFunctionExecutionTime = userDefinedFunctionExecutionTime.add(runtimeExecutionTimes.userDefinedFunctionExecutionTime);
2090 }
2091 return new RuntimeExecutionTimes(queryEngineExecutionTime, systemFunctionExecutionTime, userDefinedFunctionExecutionTime);
2092 }
2093 /**
2094 * Output the RuntimeExecutionTimes as a delimited string.
2095 */
2096 toDelimitedString() {
2097 return (`${QueryMetricsConstants.SystemFunctionExecuteTimeInMs}=${this.systemFunctionExecutionTime.totalMilliseconds()};` +
2098 `${QueryMetricsConstants.UserDefinedFunctionExecutionTimeInMs}=${this.userDefinedFunctionExecutionTime.totalMilliseconds()}`);
2099 }
2100 /**
2101 * Returns a new instance of the RuntimeExecutionTimes class that is
2102 * the aggregation of an array of RuntimeExecutionTimes.
2103 */
2104 static createFromArray(runtimeExecutionTimesArray) {
2105 if (runtimeExecutionTimesArray == null) {
2106 throw new Error("runtimeExecutionTimesArray is null or undefined item(s)");
2107 }
2108 return RuntimeExecutionTimes.zero.add(...runtimeExecutionTimesArray);
2109 }
2110 /**
2111 * Returns a new instance of the RuntimeExecutionTimes class this is deserialized from a delimited string.
2112 */
2113 static createFromDelimitedString(delimitedString) {
2114 const metrics = parseDelimitedString(delimitedString);
2115 const vmExecutionTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.VMExecutionTimeInMs);
2116 const indexLookupTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.IndexLookupTimeInMs);
2117 const documentLoadTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentLoadTimeInMs);
2118 const documentWriteTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentWriteTimeInMs);
2119 let queryEngineExecutionTime = TimeSpan.zero;
2120 queryEngineExecutionTime = queryEngineExecutionTime.add(vmExecutionTime);
2121 queryEngineExecutionTime = queryEngineExecutionTime.subtract(indexLookupTime);
2122 queryEngineExecutionTime = queryEngineExecutionTime.subtract(documentLoadTime);
2123 queryEngineExecutionTime = queryEngineExecutionTime.subtract(documentWriteTime);
2124 return new RuntimeExecutionTimes(queryEngineExecutionTime, timeSpanFromMetrics(metrics, QueryMetricsConstants.SystemFunctionExecuteTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.UserDefinedFunctionExecutionTimeInMs));
2125 }
2126}
2127RuntimeExecutionTimes.zero = new RuntimeExecutionTimes(TimeSpan.zero, TimeSpan.zero, TimeSpan.zero);
2128
2129// Copyright (c) Microsoft Corporation.
2130class QueryMetrics {
2131 constructor(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitDocumentCount, totalQueryExecutionTime, queryPreparationTimes, indexLookupTime, documentLoadTime, vmExecutionTime, runtimeExecutionTimes, documentWriteTime, clientSideMetrics) {
2132 this.retrievedDocumentCount = retrievedDocumentCount;
2133 this.retrievedDocumentSize = retrievedDocumentSize;
2134 this.outputDocumentCount = outputDocumentCount;
2135 this.outputDocumentSize = outputDocumentSize;
2136 this.indexHitDocumentCount = indexHitDocumentCount;
2137 this.totalQueryExecutionTime = totalQueryExecutionTime;
2138 this.queryPreparationTimes = queryPreparationTimes;
2139 this.indexLookupTime = indexLookupTime;
2140 this.documentLoadTime = documentLoadTime;
2141 this.vmExecutionTime = vmExecutionTime;
2142 this.runtimeExecutionTimes = runtimeExecutionTimes;
2143 this.documentWriteTime = documentWriteTime;
2144 this.clientSideMetrics = clientSideMetrics;
2145 }
2146 /**
2147 * Gets the IndexHitRatio
2148 * @hidden
2149 */
2150 get indexHitRatio() {
2151 return this.retrievedDocumentCount === 0
2152 ? 1
2153 : this.indexHitDocumentCount / this.retrievedDocumentCount;
2154 }
2155 /**
2156 * returns a new QueryMetrics instance that is the addition of this and the arguments.
2157 */
2158 add(queryMetricsArray) {
2159 let retrievedDocumentCount = 0;
2160 let retrievedDocumentSize = 0;
2161 let outputDocumentCount = 0;
2162 let outputDocumentSize = 0;
2163 let indexHitDocumentCount = 0;
2164 let totalQueryExecutionTime = TimeSpan.zero;
2165 const queryPreparationTimesArray = [];
2166 let indexLookupTime = TimeSpan.zero;
2167 let documentLoadTime = TimeSpan.zero;
2168 let vmExecutionTime = TimeSpan.zero;
2169 const runtimeExecutionTimesArray = [];
2170 let documentWriteTime = TimeSpan.zero;
2171 const clientSideQueryMetricsArray = [];
2172 queryMetricsArray.push(this);
2173 for (const queryMetrics of queryMetricsArray) {
2174 if (queryMetrics) {
2175 retrievedDocumentCount += queryMetrics.retrievedDocumentCount;
2176 retrievedDocumentSize += queryMetrics.retrievedDocumentSize;
2177 outputDocumentCount += queryMetrics.outputDocumentCount;
2178 outputDocumentSize += queryMetrics.outputDocumentSize;
2179 indexHitDocumentCount += queryMetrics.indexHitDocumentCount;
2180 totalQueryExecutionTime = totalQueryExecutionTime.add(queryMetrics.totalQueryExecutionTime);
2181 queryPreparationTimesArray.push(queryMetrics.queryPreparationTimes);
2182 indexLookupTime = indexLookupTime.add(queryMetrics.indexLookupTime);
2183 documentLoadTime = documentLoadTime.add(queryMetrics.documentLoadTime);
2184 vmExecutionTime = vmExecutionTime.add(queryMetrics.vmExecutionTime);
2185 runtimeExecutionTimesArray.push(queryMetrics.runtimeExecutionTimes);
2186 documentWriteTime = documentWriteTime.add(queryMetrics.documentWriteTime);
2187 clientSideQueryMetricsArray.push(queryMetrics.clientSideMetrics);
2188 }
2189 }
2190 return new QueryMetrics(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitDocumentCount, totalQueryExecutionTime, QueryPreparationTimes.createFromArray(queryPreparationTimesArray), indexLookupTime, documentLoadTime, vmExecutionTime, RuntimeExecutionTimes.createFromArray(runtimeExecutionTimesArray), documentWriteTime, ClientSideMetrics.createFromArray(...clientSideQueryMetricsArray));
2191 }
2192 /**
2193 * Output the QueryMetrics as a delimited string.
2194 * @hidden
2195 */
2196 toDelimitedString() {
2197 return (QueryMetricsConstants.RetrievedDocumentCount +
2198 "=" +
2199 this.retrievedDocumentCount +
2200 ";" +
2201 QueryMetricsConstants.RetrievedDocumentSize +
2202 "=" +
2203 this.retrievedDocumentSize +
2204 ";" +
2205 QueryMetricsConstants.OutputDocumentCount +
2206 "=" +
2207 this.outputDocumentCount +
2208 ";" +
2209 QueryMetricsConstants.OutputDocumentSize +
2210 "=" +
2211 this.outputDocumentSize +
2212 ";" +
2213 QueryMetricsConstants.IndexHitRatio +
2214 "=" +
2215 this.indexHitRatio +
2216 ";" +
2217 QueryMetricsConstants.TotalQueryExecutionTimeInMs +
2218 "=" +
2219 this.totalQueryExecutionTime.totalMilliseconds() +
2220 ";" +
2221 this.queryPreparationTimes.toDelimitedString() +
2222 ";" +
2223 QueryMetricsConstants.IndexLookupTimeInMs +
2224 "=" +
2225 this.indexLookupTime.totalMilliseconds() +
2226 ";" +
2227 QueryMetricsConstants.DocumentLoadTimeInMs +
2228 "=" +
2229 this.documentLoadTime.totalMilliseconds() +
2230 ";" +
2231 QueryMetricsConstants.VMExecutionTimeInMs +
2232 "=" +
2233 this.vmExecutionTime.totalMilliseconds() +
2234 ";" +
2235 this.runtimeExecutionTimes.toDelimitedString() +
2236 ";" +
2237 QueryMetricsConstants.DocumentWriteTimeInMs +
2238 "=" +
2239 this.documentWriteTime.totalMilliseconds());
2240 }
2241 /**
2242 * Returns a new instance of the QueryMetrics class that is the aggregation of an array of query metrics.
2243 */
2244 static createFromArray(queryMetricsArray) {
2245 if (!queryMetricsArray) {
2246 throw new Error("queryMetricsArray is null or undefined item(s)");
2247 }
2248 return QueryMetrics.zero.add(queryMetricsArray);
2249 }
2250 /**
2251 * Returns a new instance of the QueryMetrics class this is deserialized from a delimited string.
2252 */
2253 static createFromDelimitedString(delimitedString, clientSideMetrics) {
2254 const metrics = parseDelimitedString(delimitedString);
2255 const indexHitRatio = metrics[QueryMetricsConstants.IndexHitRatio] || 0;
2256 const retrievedDocumentCount = metrics[QueryMetricsConstants.RetrievedDocumentCount] || 0;
2257 const indexHitCount = indexHitRatio * retrievedDocumentCount;
2258 const outputDocumentCount = metrics[QueryMetricsConstants.OutputDocumentCount] || 0;
2259 const outputDocumentSize = metrics[QueryMetricsConstants.OutputDocumentSize] || 0;
2260 const retrievedDocumentSize = metrics[QueryMetricsConstants.RetrievedDocumentSize] || 0;
2261 const totalQueryExecutionTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.TotalQueryExecutionTimeInMs);
2262 return new QueryMetrics(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitCount, totalQueryExecutionTime, QueryPreparationTimes.createFromDelimitedString(delimitedString), timeSpanFromMetrics(metrics, QueryMetricsConstants.IndexLookupTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentLoadTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.VMExecutionTimeInMs), RuntimeExecutionTimes.createFromDelimitedString(delimitedString), timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentWriteTimeInMs), clientSideMetrics || ClientSideMetrics.zero);
2263 }
2264}
2265QueryMetrics.zero = new QueryMetrics(0, 0, 0, 0, 0, TimeSpan.zero, QueryPreparationTimes.zero, TimeSpan.zero, TimeSpan.zero, TimeSpan.zero, RuntimeExecutionTimes.zero, TimeSpan.zero, ClientSideMetrics.zero);
2266
2267// Copyright (c) Microsoft Corporation.
2268/** @hidden */
2269// TODO: docs
2270function getRequestChargeIfAny(headers) {
2271 if (typeof headers === "number") {
2272 return headers;
2273 }
2274 else if (typeof headers === "string") {
2275 return parseFloat(headers);
2276 }
2277 if (headers) {
2278 const rc = headers[Constants.HttpHeaders.RequestCharge];
2279 if (rc) {
2280 return parseFloat(rc);
2281 }
2282 else {
2283 return 0;
2284 }
2285 }
2286 else {
2287 return 0;
2288 }
2289}
2290/**
2291 * @hidden
2292 */
2293function getInitialHeader() {
2294 const headers = {};
2295 headers[Constants.HttpHeaders.RequestCharge] = 0;
2296 headers[Constants.HttpHeaders.QueryMetrics] = {};
2297 return headers;
2298}
2299/**
2300 * @hidden
2301 */
2302// TODO: The name of this method isn't very accurate to what it does
2303function mergeHeaders(headers, toBeMergedHeaders) {
2304 if (headers[Constants.HttpHeaders.RequestCharge] === undefined) {
2305 headers[Constants.HttpHeaders.RequestCharge] = 0;
2306 }
2307 if (headers[Constants.HttpHeaders.QueryMetrics] === undefined) {
2308 headers[Constants.HttpHeaders.QueryMetrics] = QueryMetrics.zero;
2309 }
2310 if (!toBeMergedHeaders) {
2311 return;
2312 }
2313 headers[Constants.HttpHeaders.RequestCharge] += getRequestChargeIfAny(toBeMergedHeaders);
2314 if (toBeMergedHeaders[Constants.HttpHeaders.IsRUPerMinuteUsed]) {
2315 headers[Constants.HttpHeaders.IsRUPerMinuteUsed] =
2316 toBeMergedHeaders[Constants.HttpHeaders.IsRUPerMinuteUsed];
2317 }
2318 if (Constants.HttpHeaders.QueryMetrics in toBeMergedHeaders) {
2319 const headerQueryMetrics = headers[Constants.HttpHeaders.QueryMetrics];
2320 const toBeMergedHeaderQueryMetrics = toBeMergedHeaders[Constants.HttpHeaders.QueryMetrics];
2321 for (const partitionId in toBeMergedHeaderQueryMetrics) {
2322 if (headerQueryMetrics[partitionId]) {
2323 const combinedQueryMetrics = headerQueryMetrics[partitionId].add([
2324 toBeMergedHeaderQueryMetrics[partitionId],
2325 ]);
2326 headerQueryMetrics[partitionId] = combinedQueryMetrics;
2327 }
2328 else {
2329 headerQueryMetrics[partitionId] = toBeMergedHeaderQueryMetrics[partitionId];
2330 }
2331 }
2332 }
2333}
2334
2335// Copyright (c) Microsoft Corporation.
2336const logger$3 = logger$4.createClientLogger("ClientContext");
2337/** @hidden */
2338var STATES;
2339(function (STATES) {
2340 STATES["start"] = "start";
2341 STATES["inProgress"] = "inProgress";
2342 STATES["ended"] = "ended";
2343})(STATES || (STATES = {}));
2344/** @hidden */
2345class DefaultQueryExecutionContext {
2346 /**
2347 * Provides the basic Query Execution Context.
2348 * This wraps the internal logic query execution using provided fetch functions
2349 *
2350 * @param clientContext - Is used to read the partitionKeyRanges for split proofing
2351 * @param query - A SQL query.
2352 * @param options - Represents the feed options.
2353 * @param fetchFunctions - A function to retrieve each page of data.
2354 * An array of functions may be used to query more than one partition.
2355 * @hidden
2356 */
2357 constructor(options, fetchFunctions) {
2358 this.resources = [];
2359 this.currentIndex = 0;
2360 this.currentPartitionIndex = 0;
2361 this.fetchFunctions = Array.isArray(fetchFunctions) ? fetchFunctions : [fetchFunctions];
2362 this.options = options || {};
2363 this.continuationToken = this.options.continuationToken || this.options.continuation || null;
2364 this.state = DefaultQueryExecutionContext.STATES.start;
2365 }
2366 get continuation() {
2367 return this.continuationToken;
2368 }
2369 /**
2370 * Execute a provided callback on the next element in the execution context.
2371 */
2372 async nextItem() {
2373 ++this.currentIndex;
2374 const response = await this.current();
2375 return response;
2376 }
2377 /**
2378 * Retrieve the current element on the execution context.
2379 */
2380 async current() {
2381 if (this.currentIndex < this.resources.length) {
2382 return {
2383 result: this.resources[this.currentIndex],
2384 headers: getInitialHeader(),
2385 };
2386 }
2387 if (this._canFetchMore()) {
2388 const { result: resources, headers } = await this.fetchMore();
2389 this.resources = resources;
2390 if (this.resources.length === 0) {
2391 if (!this.continuationToken && this.currentPartitionIndex >= this.fetchFunctions.length) {
2392 this.state = DefaultQueryExecutionContext.STATES.ended;
2393 return { result: undefined, headers };
2394 }
2395 else {
2396 return this.current();
2397 }
2398 }
2399 return { result: this.resources[this.currentIndex], headers };
2400 }
2401 else {
2402 this.state = DefaultQueryExecutionContext.STATES.ended;
2403 return { result: undefined, headers: getInitialHeader() };
2404 }
2405 }
2406 /**
2407 * Determine if there are still remaining resources to processs based on
2408 * the value of the continuation token or the elements remaining on the current batch in the execution context.
2409 *
2410 * @returns true if there is other elements to process in the DefaultQueryExecutionContext.
2411 */
2412 hasMoreResults() {
2413 return (this.state === DefaultQueryExecutionContext.STATES.start ||
2414 this.continuationToken !== undefined ||
2415 this.currentIndex < this.resources.length - 1 ||
2416 this.currentPartitionIndex < this.fetchFunctions.length);
2417 }
2418 /**
2419 * Fetches the next batch of the feed and pass them as an array to a callback
2420 */
2421 async fetchMore() {
2422 if (this.currentPartitionIndex >= this.fetchFunctions.length) {
2423 return { headers: getInitialHeader(), result: undefined };
2424 }
2425 // Keep to the original continuation and to restore the value after fetchFunction call
2426 const originalContinuation = this.options.continuationToken || this.options.continuation;
2427 this.options.continuationToken = this.continuationToken;
2428 // Return undefined if there is no more results
2429 if (this.currentPartitionIndex >= this.fetchFunctions.length) {
2430 return { headers: getInitialHeader(), result: undefined };
2431 }
2432 let resources;
2433 let responseHeaders;
2434 try {
2435 let p;
2436 if (this.nextFetchFunction !== undefined) {
2437 logger$3.verbose("using prefetch");
2438 p = this.nextFetchFunction;
2439 this.nextFetchFunction = undefined;
2440 }
2441 else {
2442 logger$3.verbose("using fresh fetch");
2443 p = this.fetchFunctions[this.currentPartitionIndex](this.options);
2444 }
2445 const response = await p;
2446 resources = response.result;
2447 responseHeaders = response.headers;
2448 this.continuationToken = responseHeaders[Constants.HttpHeaders.Continuation];
2449 if (!this.continuationToken) {
2450 ++this.currentPartitionIndex;
2451 }
2452 if (this.options && this.options.bufferItems === true) {
2453 const fetchFunction = this.fetchFunctions[this.currentPartitionIndex];
2454 this.nextFetchFunction = fetchFunction
2455 ? fetchFunction(Object.assign(Object.assign({}, this.options), { continuationToken: this.continuationToken }))
2456 : undefined;
2457 }
2458 }
2459 catch (err) {
2460 this.state = DefaultQueryExecutionContext.STATES.ended;
2461 // return callback(err, undefined, responseHeaders);
2462 // TODO: Error and data being returned is an antipattern, this might broken
2463 throw err;
2464 }
2465 this.state = DefaultQueryExecutionContext.STATES.inProgress;
2466 this.currentIndex = 0;
2467 this.options.continuationToken = originalContinuation;
2468 this.options.continuation = originalContinuation;
2469 // deserializing query metrics so that we aren't working with delimited strings in the rest of the code base
2470 if (Constants.HttpHeaders.QueryMetrics in responseHeaders) {
2471 const delimitedString = responseHeaders[Constants.HttpHeaders.QueryMetrics];
2472 let queryMetrics = QueryMetrics.createFromDelimitedString(delimitedString);
2473 // Add the request charge to the query metrics so that we can have per partition request charge.
2474 if (Constants.HttpHeaders.RequestCharge in responseHeaders) {
2475 const requestCharge = Number(responseHeaders[Constants.HttpHeaders.RequestCharge]) || 0;
2476 queryMetrics = new QueryMetrics(queryMetrics.retrievedDocumentCount, queryMetrics.retrievedDocumentSize, queryMetrics.outputDocumentCount, queryMetrics.outputDocumentSize, queryMetrics.indexHitDocumentCount, queryMetrics.totalQueryExecutionTime, queryMetrics.queryPreparationTimes, queryMetrics.indexLookupTime, queryMetrics.documentLoadTime, queryMetrics.vmExecutionTime, queryMetrics.runtimeExecutionTimes, queryMetrics.documentWriteTime, new ClientSideMetrics(requestCharge));
2477 }
2478 // Wraping query metrics in a object where the key is '0' just so single partition
2479 // and partition queries have the same response schema
2480 responseHeaders[Constants.HttpHeaders.QueryMetrics] = {};
2481 responseHeaders[Constants.HttpHeaders.QueryMetrics]["0"] = queryMetrics;
2482 }
2483 return { result: resources, headers: responseHeaders };
2484 }
2485 _canFetchMore() {
2486 const res = this.state === DefaultQueryExecutionContext.STATES.start ||
2487 (this.continuationToken && this.state === DefaultQueryExecutionContext.STATES.inProgress) ||
2488 (this.currentPartitionIndex < this.fetchFunctions.length &&
2489 this.state === DefaultQueryExecutionContext.STATES.inProgress);
2490 return res;
2491 }
2492}
2493DefaultQueryExecutionContext.STATES = STATES;
2494
2495/** @hidden */
2496class AverageAggregator {
2497 /**
2498 * Add the provided item to aggregation result.
2499 */
2500 aggregate(other) {
2501 if (other == null || other.sum == null) {
2502 return;
2503 }
2504 if (this.sum == null) {
2505 this.sum = 0.0;
2506 this.count = 0;
2507 }
2508 this.sum += other.sum;
2509 this.count += other.count;
2510 }
2511 /**
2512 * Get the aggregation result.
2513 */
2514 getResult() {
2515 if (this.sum == null || this.count <= 0) {
2516 return undefined;
2517 }
2518 return this.sum / this.count;
2519 }
2520}
2521
2522/** @hidden */
2523class CountAggregator {
2524 /**
2525 * Represents an aggregator for COUNT operator.
2526 * @hidden
2527 */
2528 constructor() {
2529 this.value = 0;
2530 }
2531 /**
2532 * Add the provided item to aggregation result.
2533 */
2534 aggregate(other) {
2535 this.value += other;
2536 }
2537 /**
2538 * Get the aggregation result.
2539 */
2540 getResult() {
2541 return this.value;
2542 }
2543}
2544
2545// TODO: this smells funny
2546/** @hidden */
2547const TYPEORDCOMPARATOR = Object.freeze({
2548 NoValue: {
2549 ord: 0,
2550 },
2551 undefined: {
2552 ord: 1,
2553 },
2554 boolean: {
2555 ord: 2,
2556 compFunc: (a, b) => {
2557 return a === b ? 0 : a > b ? 1 : -1;
2558 },
2559 },
2560 number: {
2561 ord: 4,
2562 compFunc: (a, b) => {
2563 return a === b ? 0 : a > b ? 1 : -1;
2564 },
2565 },
2566 string: {
2567 ord: 5,
2568 compFunc: (a, b) => {
2569 return a === b ? 0 : a > b ? 1 : -1;
2570 },
2571 },
2572});
2573/** @hidden */
2574class OrderByDocumentProducerComparator {
2575 constructor(sortOrder) {
2576 this.sortOrder = sortOrder;
2577 } // TODO: This should be an enum
2578 targetPartitionKeyRangeDocProdComparator(docProd1, docProd2) {
2579 const a = docProd1.getTargetParitionKeyRange()["minInclusive"];
2580 const b = docProd2.getTargetParitionKeyRange()["minInclusive"];
2581 return a === b ? 0 : a > b ? 1 : -1;
2582 }
2583 compare(docProd1, docProd2) {
2584 // Need to check for split, since we don't want to dereference "item" of undefined / exception
2585 if (docProd1.gotSplit()) {
2586 return -1;
2587 }
2588 if (docProd2.gotSplit()) {
2589 return 1;
2590 }
2591 const orderByItemsRes1 = this.getOrderByItems(docProd1.peekBufferedItems()[0]);
2592 const orderByItemsRes2 = this.getOrderByItems(docProd2.peekBufferedItems()[0]);
2593 // validate order by items and types
2594 // TODO: once V1 order by on different types is fixed this need to change
2595 this.validateOrderByItems(orderByItemsRes1, orderByItemsRes2);
2596 // no async call in the for loop
2597 for (let i = 0; i < orderByItemsRes1.length; i++) {
2598 // compares the orderby items one by one
2599 const compRes = this.compareOrderByItem(orderByItemsRes1[i], orderByItemsRes2[i]);
2600 if (compRes !== 0) {
2601 if (this.sortOrder[i] === "Ascending") {
2602 return compRes;
2603 }
2604 else if (this.sortOrder[i] === "Descending") {
2605 return -compRes;
2606 }
2607 }
2608 }
2609 return this.targetPartitionKeyRangeDocProdComparator(docProd1, docProd2);
2610 }
2611 // TODO: This smells funny
2612 compareValue(item1, type1, item2, type2) {
2613 if (type1 === "object" || type2 === "object") {
2614 throw new Error("Tried to compare an object type");
2615 }
2616 const type1Ord = TYPEORDCOMPARATOR[type1].ord;
2617 const type2Ord = TYPEORDCOMPARATOR[type2].ord;
2618 const typeCmp = type1Ord - type2Ord;
2619 if (typeCmp !== 0) {
2620 // if the types are different, use type ordinal
2621 return typeCmp;
2622 }
2623 // both are of the same type
2624 if (type1Ord === TYPEORDCOMPARATOR["undefined"].ord ||
2625 type1Ord === TYPEORDCOMPARATOR["NoValue"].ord) {
2626 // if both types are undefined or Null they are equal
2627 return 0;
2628 }
2629 const compFunc = TYPEORDCOMPARATOR[type1].compFunc;
2630 if (typeof compFunc === "undefined") {
2631 throw new Error("Cannot find the comparison function");
2632 }
2633 // same type and type is defined compare the items
2634 return compFunc(item1, item2);
2635 }
2636 compareOrderByItem(orderByItem1, orderByItem2) {
2637 const type1 = this.getType(orderByItem1);
2638 const type2 = this.getType(orderByItem2);
2639 return this.compareValue(orderByItem1["item"], type1, orderByItem2["item"], type2);
2640 }
2641 validateOrderByItems(res1, res2) {
2642 if (res1.length !== res2.length) {
2643 throw new Error(`Expected ${res1.length}, but got ${res2.length}.`);
2644 }
2645 if (res1.length !== this.sortOrder.length) {
2646 throw new Error("orderByItems cannot have a different size than sort orders.");
2647 }
2648 for (let i = 0; i < this.sortOrder.length; i++) {
2649 const type1 = this.getType(res1[i]);
2650 const type2 = this.getType(res2[i]);
2651 if (type1 !== type2) {
2652 throw new Error(`Expected ${type1}, but got ${type2}. Cannot execute cross partition order-by queries on mixed types. Consider filtering your query using IS_STRING or IS_NUMBER to get around this exception.`);
2653 }
2654 }
2655 }
2656 getType(orderByItem) {
2657 // TODO: any item?
2658 if (orderByItem === undefined || orderByItem.item === undefined) {
2659 return "NoValue";
2660 }
2661 const type = typeof orderByItem.item;
2662 if (TYPEORDCOMPARATOR[type] === undefined) {
2663 throw new Error(`unrecognizable type ${type}`);
2664 }
2665 return type;
2666 }
2667 getOrderByItems(res) {
2668 // TODO: any res?
2669 return res["orderByItems"];
2670 }
2671}
2672
2673// Copyright (c) Microsoft Corporation.
2674/** @hidden */
2675class MaxAggregator {
2676 /**
2677 * Represents an aggregator for MAX operator.
2678 * @hidden
2679 */
2680 constructor() {
2681 this.value = undefined;
2682 this.comparer = new OrderByDocumentProducerComparator(["Ascending"]);
2683 }
2684 /**
2685 * Add the provided item to aggregation result.
2686 */
2687 aggregate(other) {
2688 if (this.value === undefined) {
2689 this.value = other.max;
2690 }
2691 else if (this.comparer.compareValue(other.max, typeof other.max, this.value, typeof this.value) > 0) {
2692 this.value = other.max;
2693 }
2694 }
2695 /**
2696 * Get the aggregation result.
2697 */
2698 getResult() {
2699 return this.value;
2700 }
2701}
2702
2703// Copyright (c) Microsoft Corporation.
2704/** @hidden */
2705class MinAggregator {
2706 /**
2707 * Represents an aggregator for MIN operator.
2708 * @hidden
2709 */
2710 constructor() {
2711 this.value = undefined;
2712 this.comparer = new OrderByDocumentProducerComparator(["Ascending"]);
2713 }
2714 /**
2715 * Add the provided item to aggregation result.
2716 */
2717 aggregate(other) {
2718 if (this.value === undefined) {
2719 // || typeof this.value === "object"
2720 this.value = other.min;
2721 }
2722 else {
2723 const otherType = other.min === null ? "NoValue" : typeof other.min; // || typeof other === "object"
2724 const thisType = this.value === null ? "NoValue" : typeof this.value;
2725 if (this.comparer.compareValue(other.min, otherType, this.value, thisType) < 0) {
2726 this.value = other.min;
2727 }
2728 }
2729 }
2730 /**
2731 * Get the aggregation result.
2732 */
2733 getResult() {
2734 return this.value;
2735 }
2736}
2737
2738/** @hidden */
2739class SumAggregator {
2740 /**
2741 * Add the provided item to aggregation result.
2742 */
2743 aggregate(other) {
2744 if (other === undefined) {
2745 return;
2746 }
2747 if (this.sum === undefined) {
2748 this.sum = other;
2749 }
2750 else {
2751 this.sum += other;
2752 }
2753 }
2754 /**
2755 * Get the aggregation result.
2756 */
2757 getResult() {
2758 return this.sum;
2759 }
2760}
2761
2762/** @hidden */
2763class StaticValueAggregator {
2764 aggregate(other) {
2765 if (this.value === undefined) {
2766 this.value = other;
2767 }
2768 }
2769 getResult() {
2770 return this.value;
2771 }
2772}
2773
2774// Copyright (c) Microsoft Corporation.
2775function createAggregator(aggregateType) {
2776 switch (aggregateType) {
2777 case "Average":
2778 return new AverageAggregator();
2779 case "Count":
2780 return new CountAggregator();
2781 case "Max":
2782 return new MaxAggregator();
2783 case "Min":
2784 return new MinAggregator();
2785 case "Sum":
2786 return new SumAggregator();
2787 default:
2788 return new StaticValueAggregator();
2789 }
2790}
2791
2792// Copyright (c) Microsoft Corporation.
2793// Licensed under the MIT license.
2794/** @hidden */
2795var FetchResultType;
2796(function (FetchResultType) {
2797 FetchResultType[FetchResultType["Done"] = 0] = "Done";
2798 FetchResultType[FetchResultType["Exception"] = 1] = "Exception";
2799 FetchResultType[FetchResultType["Result"] = 2] = "Result";
2800})(FetchResultType || (FetchResultType = {}));
2801/** @hidden */
2802class FetchResult {
2803 /**
2804 * Wraps fetch results for the document producer.
2805 * This allows the document producer to buffer exceptions so that actual results don't get flushed during splits.
2806 *
2807 * @param feedReponse - The response the document producer got back on a successful fetch
2808 * @param error - The exception meant to be buffered on an unsuccessful fetch
2809 * @hidden
2810 */
2811 constructor(feedResponse, error) {
2812 // TODO: feedResponse/error
2813 if (feedResponse !== undefined) {
2814 this.feedResponse = feedResponse;
2815 this.fetchResultType = FetchResultType.Result;
2816 }
2817 else {
2818 this.error = error;
2819 this.fetchResultType = FetchResultType.Exception;
2820 }
2821 }
2822}
2823
2824/** @hidden */
2825class DocumentProducer {
2826 /**
2827 * Provides the Target Partition Range Query Execution Context.
2828 * @param clientContext - The service endpoint to use to create the client.
2829 * @param collectionLink - Represents collection link
2830 * @param query - A SQL query.
2831 * @param targetPartitionKeyRange - Query Target Partition key Range
2832 * @hidden
2833 */
2834 constructor(clientContext, collectionLink, query, targetPartitionKeyRange, options) {
2835 this.clientContext = clientContext;
2836 this.generation = 0;
2837 this.fetchFunction = async (options) => {
2838 const path = getPathFromLink(this.collectionLink, exports.ResourceType.item);
2839 const id = getIdFromLink(this.collectionLink);
2840 return this.clientContext.queryFeed({
2841 path,
2842 resourceType: exports.ResourceType.item,
2843 resourceId: id,
2844 resultFn: (result) => result.Documents,
2845 query: this.query,
2846 options,
2847 partitionKeyRangeId: this.targetPartitionKeyRange["id"],
2848 });
2849 };
2850 // TODO: any options
2851 this.collectionLink = collectionLink;
2852 this.query = query;
2853 this.targetPartitionKeyRange = targetPartitionKeyRange;
2854 this.fetchResults = [];
2855 this.allFetched = false;
2856 this.err = undefined;
2857 this.previousContinuationToken = undefined;
2858 this.continuationToken = undefined;
2859 this.respHeaders = getInitialHeader();
2860 this.internalExecutionContext = new DefaultQueryExecutionContext(options, this.fetchFunction);
2861 }
2862 /**
2863 * Synchronously gives the contiguous buffered results (stops at the first non result) if any
2864 * @returns buffered current items if any
2865 * @hidden
2866 */
2867 peekBufferedItems() {
2868 const bufferedResults = [];
2869 for (let i = 0, done = false; i < this.fetchResults.length && !done; i++) {
2870 const fetchResult = this.fetchResults[i];
2871 switch (fetchResult.fetchResultType) {
2872 case FetchResultType.Done:
2873 done = true;
2874 break;
2875 case FetchResultType.Exception:
2876 done = true;
2877 break;
2878 case FetchResultType.Result:
2879 bufferedResults.push(fetchResult.feedResponse);
2880 break;
2881 }
2882 }
2883 return bufferedResults;
2884 }
2885 hasMoreResults() {
2886 return this.internalExecutionContext.hasMoreResults() || this.fetchResults.length !== 0;
2887 }
2888 gotSplit() {
2889 const fetchResult = this.fetchResults[0];
2890 if (fetchResult.fetchResultType === FetchResultType.Exception) {
2891 if (DocumentProducer._needPartitionKeyRangeCacheRefresh(fetchResult.error)) {
2892 return true;
2893 }
2894 }
2895 return false;
2896 }
2897 _getAndResetActiveResponseHeaders() {
2898 const ret = this.respHeaders;
2899 this.respHeaders = getInitialHeader();
2900 return ret;
2901 }
2902 _updateStates(err, allFetched) {
2903 // TODO: any Error
2904 if (err) {
2905 this.err = err;
2906 return;
2907 }
2908 if (allFetched) {
2909 this.allFetched = true;
2910 }
2911 if (this.internalExecutionContext.continuationToken === this.continuationToken) {
2912 // nothing changed
2913 return;
2914 }
2915 this.previousContinuationToken = this.continuationToken;
2916 this.continuationToken = this.internalExecutionContext.continuationToken;
2917 }
2918 static _needPartitionKeyRangeCacheRefresh(error) {
2919 // TODO: error
2920 return (error.code === StatusCodes.Gone &&
2921 "substatus" in error &&
2922 error["substatus"] === SubStatusCodes.PartitionKeyRangeGone);
2923 }
2924 /**
2925 * Fetches and bufferes the next page of results and executes the given callback
2926 */
2927 async bufferMore() {
2928 if (this.err) {
2929 throw this.err;
2930 }
2931 try {
2932 const { result: resources, headers: headerResponse } = await this.internalExecutionContext.fetchMore();
2933 ++this.generation;
2934 this._updateStates(undefined, resources === undefined);
2935 if (resources !== undefined) {
2936 // some more results
2937 resources.forEach((element) => {
2938 // TODO: resources any
2939 this.fetchResults.push(new FetchResult(element, undefined));
2940 });
2941 }
2942 // need to modify the header response so that the query metrics are per partition
2943 if (headerResponse != null && Constants.HttpHeaders.QueryMetrics in headerResponse) {
2944 // "0" is the default partition before one is actually assigned.
2945 const queryMetrics = headerResponse[Constants.HttpHeaders.QueryMetrics]["0"];
2946 // Wraping query metrics in a object where the keys are the partition key range.
2947 headerResponse[Constants.HttpHeaders.QueryMetrics] = {};
2948 headerResponse[Constants.HttpHeaders.QueryMetrics][this.targetPartitionKeyRange.id] =
2949 queryMetrics;
2950 }
2951 return { result: resources, headers: headerResponse };
2952 }
2953 catch (err) {
2954 // TODO: any error
2955 if (DocumentProducer._needPartitionKeyRangeCacheRefresh(err)) {
2956 // Split just happend
2957 // Buffer the error so the execution context can still get the feedResponses in the itemBuffer
2958 const bufferedError = new FetchResult(undefined, err);
2959 this.fetchResults.push(bufferedError);
2960 // Putting a dummy result so that the rest of code flows
2961 return { result: [bufferedError], headers: err.headers };
2962 }
2963 else {
2964 this._updateStates(err, err.resources === undefined);
2965 throw err;
2966 }
2967 }
2968 }
2969 /**
2970 * Synchronously gives the bufferend current item if any
2971 * @returns buffered current item if any
2972 * @hidden
2973 */
2974 getTargetParitionKeyRange() {
2975 return this.targetPartitionKeyRange;
2976 }
2977 /**
2978 * Fetches the next element in the DocumentProducer.
2979 */
2980 async nextItem() {
2981 if (this.err) {
2982 this._updateStates(this.err, undefined);
2983 throw this.err;
2984 }
2985 try {
2986 const { result, headers } = await this.current();
2987 const fetchResult = this.fetchResults.shift();
2988 this._updateStates(undefined, result === undefined);
2989 if (fetchResult.feedResponse !== result) {
2990 throw new Error(`Expected ${fetchResult.feedResponse} to equal ${result}`);
2991 }
2992 switch (fetchResult.fetchResultType) {
2993 case FetchResultType.Done:
2994 return { result: undefined, headers };
2995 case FetchResultType.Exception:
2996 fetchResult.error.headers = headers;
2997 throw fetchResult.error;
2998 case FetchResultType.Result:
2999 return { result: fetchResult.feedResponse, headers };
3000 }
3001 }
3002 catch (err) {
3003 this._updateStates(err, err.item === undefined);
3004 throw err;
3005 }
3006 }
3007 /**
3008 * Retrieve the current element on the DocumentProducer.
3009 */
3010 async current() {
3011 // If something is buffered just give that
3012 if (this.fetchResults.length > 0) {
3013 const fetchResult = this.fetchResults[0];
3014 // Need to unwrap fetch results
3015 switch (fetchResult.fetchResultType) {
3016 case FetchResultType.Done:
3017 return {
3018 result: undefined,
3019 headers: this._getAndResetActiveResponseHeaders(),
3020 };
3021 case FetchResultType.Exception:
3022 fetchResult.error.headers = this._getAndResetActiveResponseHeaders();
3023 throw fetchResult.error;
3024 case FetchResultType.Result:
3025 return {
3026 result: fetchResult.feedResponse,
3027 headers: this._getAndResetActiveResponseHeaders(),
3028 };
3029 }
3030 }
3031 // If there isn't anymore items left to fetch then let the user know.
3032 if (this.allFetched) {
3033 return {
3034 result: undefined,
3035 headers: this._getAndResetActiveResponseHeaders(),
3036 };
3037 }
3038 // If there are no more bufferd items and there are still items to be fetched then buffer more
3039 const { result, headers } = await this.bufferMore();
3040 mergeHeaders(this.respHeaders, headers);
3041 if (result === undefined) {
3042 return { result: undefined, headers: this.respHeaders };
3043 }
3044 return this.current();
3045 }
3046}
3047
3048/** @hidden */
3049class QueryRange {
3050 /**
3051 * Represents a QueryRange.
3052 *
3053 * @param rangeMin - min
3054 * @param rangeMin - max
3055 * @param isMinInclusive - isMinInclusive
3056 * @param isMaxInclusive - isMaxInclusive
3057 * @hidden
3058 */
3059 constructor(rangeMin, rangeMax, isMinInclusive, isMaxInclusive) {
3060 this.min = rangeMin;
3061 this.max = rangeMax;
3062 this.isMinInclusive = isMinInclusive;
3063 this.isMaxInclusive = isMaxInclusive;
3064 }
3065 overlaps(other) {
3066 const range1 = this; // eslint-disable-line @typescript-eslint/no-this-alias
3067 const range2 = other;
3068 if (range1 === undefined || range2 === undefined) {
3069 return false;
3070 }
3071 if (range1.isEmpty() || range2.isEmpty()) {
3072 return false;
3073 }
3074 if (range1.min <= range2.max || range2.min <= range1.max) {
3075 if ((range1.min === range2.max && !(range1.isMinInclusive && range2.isMaxInclusive)) ||
3076 (range2.min === range1.max && !(range2.isMinInclusive && range1.isMaxInclusive))) {
3077 return false;
3078 }
3079 return true;
3080 }
3081 return false;
3082 }
3083 isFullRange() {
3084 return (this.min === Constants.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey &&
3085 this.max === Constants.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey &&
3086 this.isMinInclusive === true &&
3087 this.isMaxInclusive === false);
3088 }
3089 isEmpty() {
3090 return !(this.isMinInclusive && this.isMaxInclusive) && this.min === this.max;
3091 }
3092 /**
3093 * Parse a QueryRange from a partitionKeyRange
3094 * @returns QueryRange
3095 * @hidden
3096 */
3097 static parsePartitionKeyRange(partitionKeyRange) {
3098 return new QueryRange(partitionKeyRange[Constants.PartitionKeyRange.MinInclusive], partitionKeyRange[Constants.PartitionKeyRange.MaxExclusive], true, false);
3099 }
3100 /**
3101 * Parse a QueryRange from a dictionary
3102 * @returns QueryRange
3103 * @hidden
3104 */
3105 static parseFromDict(queryRangeDict) {
3106 return new QueryRange(queryRangeDict.min, queryRangeDict.max, queryRangeDict.isMinInclusive, queryRangeDict.isMaxInclusive);
3107 }
3108}
3109
3110/** @hidden */
3111class InMemoryCollectionRoutingMap {
3112 /**
3113 * Represents a InMemoryCollectionRoutingMap Object,
3114 * Stores partition key ranges in an efficient way with some additional information and provides
3115 * convenience methods for working with set of ranges.
3116 */
3117 constructor(orderedPartitionKeyRanges, orderedPartitionInfo) {
3118 this.orderedPartitionKeyRanges = orderedPartitionKeyRanges;
3119 this.orderedRanges = orderedPartitionKeyRanges.map((pkr) => {
3120 return new QueryRange(pkr[Constants.PartitionKeyRange.MinInclusive], pkr[Constants.PartitionKeyRange.MaxExclusive], true, false);
3121 });
3122 this.orderedPartitionInfo = orderedPartitionInfo;
3123 }
3124 getOrderedParitionKeyRanges() {
3125 return this.orderedPartitionKeyRanges;
3126 }
3127 getOverlappingRanges(providedQueryRanges) {
3128 // TODO This code has all kinds of smells. Multiple iterations and sorts just to grab overlapping ranges
3129 // stfaul attempted to bring it down to one for-loop and failed
3130 const pqr = Array.isArray(providedQueryRanges)
3131 ? providedQueryRanges
3132 : [providedQueryRanges];
3133 const minToPartitionRange = {}; // TODO: any
3134 // this for loop doesn't invoke any async callback
3135 for (const queryRange of pqr) {
3136 if (queryRange.isEmpty()) {
3137 continue;
3138 }
3139 if (queryRange.isFullRange()) {
3140 return this.orderedPartitionKeyRanges;
3141 }
3142 const minIndex = this.orderedRanges.findIndex((range) => {
3143 if (queryRange.min > range.min && queryRange.min < range.max) {
3144 return true;
3145 }
3146 if (queryRange.min === range.min) {
3147 return true;
3148 }
3149 if (queryRange.min === range.max) {
3150 return true;
3151 }
3152 });
3153 if (minIndex < 0) {
3154 throw new Error("error in collection routing map, queried value is less than the start range.");
3155 }
3156 // Start at the end and work backwards
3157 let maxIndex;
3158 for (let i = this.orderedRanges.length - 1; i >= 0; i--) {
3159 const range = this.orderedRanges[i];
3160 if (queryRange.max > range.min && queryRange.max < range.max) {
3161 maxIndex = i;
3162 break;
3163 }
3164 if (queryRange.max === range.min) {
3165 maxIndex = i;
3166 break;
3167 }
3168 if (queryRange.max === range.max) {
3169 maxIndex = i;
3170 break;
3171 }
3172 }
3173 if (maxIndex > this.orderedRanges.length) {
3174 throw new Error("error in collection routing map, queried value is greater than the end range.");
3175 }
3176 for (let j = minIndex; j < maxIndex + 1; j++) {
3177 if (queryRange.overlaps(this.orderedRanges[j])) {
3178 minToPartitionRange[this.orderedPartitionKeyRanges[j][Constants.PartitionKeyRange.MinInclusive]] = this.orderedPartitionKeyRanges[j];
3179 }
3180 }
3181 }
3182 const overlappingPartitionKeyRanges = Object.keys(minToPartitionRange).map((k) => minToPartitionRange[k]);
3183 return overlappingPartitionKeyRanges.sort((a, b) => {
3184 return a[Constants.PartitionKeyRange.MinInclusive].localeCompare(b[Constants.PartitionKeyRange.MinInclusive]);
3185 });
3186 }
3187}
3188
3189// Copyright (c) Microsoft Corporation.
3190/**
3191 * @hidden
3192 */
3193function compareRanges(a, b) {
3194 const aVal = a[0][Constants.PartitionKeyRange.MinInclusive];
3195 const bVal = b[0][Constants.PartitionKeyRange.MinInclusive];
3196 if (aVal > bVal) {
3197 return 1;
3198 }
3199 if (aVal < bVal) {
3200 return -1;
3201 }
3202 return 0;
3203}
3204/** @hidden */
3205function createCompleteRoutingMap(partitionKeyRangeInfoTuppleList) {
3206 const rangeById = {}; // TODO: any
3207 const rangeByInfo = {}; // TODO: any
3208 let sortedRanges = [];
3209 // the for loop doesn't invoke any async callback
3210 for (const r of partitionKeyRangeInfoTuppleList) {
3211 rangeById[r[0][Constants.PartitionKeyRange.Id]] = r;
3212 rangeByInfo[r[1]] = r[0];
3213 sortedRanges.push(r);
3214 }
3215 sortedRanges = sortedRanges.sort(compareRanges);
3216 const partitionKeyOrderedRange = sortedRanges.map((r) => r[0]);
3217 const orderedPartitionInfo = sortedRanges.map((r) => r[1]);
3218 if (!isCompleteSetOfRange(partitionKeyOrderedRange)) {
3219 return undefined;
3220 }
3221 return new InMemoryCollectionRoutingMap(partitionKeyOrderedRange, orderedPartitionInfo);
3222}
3223/**
3224 * @hidden
3225 */
3226function isCompleteSetOfRange(partitionKeyOrderedRange) {
3227 // TODO: any
3228 let isComplete = false;
3229 if (partitionKeyOrderedRange.length > 0) {
3230 const firstRange = partitionKeyOrderedRange[0];
3231 const lastRange = partitionKeyOrderedRange[partitionKeyOrderedRange.length - 1];
3232 isComplete =
3233 firstRange[Constants.PartitionKeyRange.MinInclusive] ===
3234 Constants.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey;
3235 isComplete =
3236 isComplete &&
3237 lastRange[Constants.PartitionKeyRange.MaxExclusive] ===
3238 Constants.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey;
3239 for (let i = 1; i < partitionKeyOrderedRange.length; i++) {
3240 const previousRange = partitionKeyOrderedRange[i - 1];
3241 const currentRange = partitionKeyOrderedRange[i];
3242 isComplete =
3243 isComplete &&
3244 previousRange[Constants.PartitionKeyRange.MaxExclusive] ===
3245 currentRange[Constants.PartitionKeyRange.MinInclusive];
3246 if (!isComplete) {
3247 if (previousRange[Constants.PartitionKeyRange.MaxExclusive] >
3248 currentRange[Constants.PartitionKeyRange.MinInclusive]) {
3249 throw Error("Ranges overlap");
3250 }
3251 break;
3252 }
3253 }
3254 }
3255 return isComplete;
3256}
3257
3258/** @hidden */
3259class PartitionKeyRangeCache {
3260 constructor(clientContext) {
3261 this.clientContext = clientContext;
3262 this.collectionRoutingMapByCollectionId = {};
3263 }
3264 /**
3265 * Finds or Instantiates the requested Collection Routing Map
3266 * @param collectionLink - Requested collectionLink
3267 * @hidden
3268 */
3269 async onCollectionRoutingMap(collectionLink) {
3270 const collectionId = getIdFromLink(collectionLink);
3271 if (this.collectionRoutingMapByCollectionId[collectionId] === undefined) {
3272 this.collectionRoutingMapByCollectionId[collectionId] =
3273 this.requestCollectionRoutingMap(collectionLink);
3274 }
3275 return this.collectionRoutingMapByCollectionId[collectionId];
3276 }
3277 /**
3278 * Given the query ranges and a collection, invokes the callback on the list of overlapping partition key ranges
3279 * @hidden
3280 */
3281 async getOverlappingRanges(collectionLink, queryRange) {
3282 const crm = await this.onCollectionRoutingMap(collectionLink);
3283 return crm.getOverlappingRanges(queryRange);
3284 }
3285 async requestCollectionRoutingMap(collectionLink) {
3286 const { resources } = await this.clientContext
3287 .queryPartitionKeyRanges(collectionLink)
3288 .fetchAll();
3289 return createCompleteRoutingMap(resources.map((r) => [r, true]));
3290 }
3291}
3292
3293/** @hidden */
3294const PARITIONKEYRANGE = Constants.PartitionKeyRange;
3295/** @hidden */
3296class SmartRoutingMapProvider {
3297 constructor(clientContext) {
3298 this.partitionKeyRangeCache = new PartitionKeyRangeCache(clientContext);
3299 }
3300 static _secondRangeIsAfterFirstRange(range1, range2) {
3301 if (typeof range1.max === "undefined") {
3302 throw new Error("range1 must have max");
3303 }
3304 if (typeof range2.min === "undefined") {
3305 throw new Error("range2 must have min");
3306 }
3307 if (range1.max > range2.min) {
3308 // r.min < #previous_r.max
3309 return false;
3310 }
3311 else {
3312 if (range1.max === range2.min && range1.isMaxInclusive && range2.isMinInclusive) {
3313 // the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r
3314 // they share a point
3315 return false;
3316 }
3317 return true;
3318 }
3319 }
3320 static _isSortedAndNonOverlapping(ranges) {
3321 for (let idx = 1; idx < ranges.length; idx++) {
3322 const previousR = ranges[idx - 1];
3323 const r = ranges[idx];
3324 if (!this._secondRangeIsAfterFirstRange(previousR, r)) {
3325 return false;
3326 }
3327 }
3328 return true;
3329 }
3330 static _stringMax(a, b) {
3331 return a >= b ? a : b;
3332 }
3333 static _stringCompare(a, b) {
3334 return a === b ? 0 : a > b ? 1 : -1;
3335 }
3336 static _subtractRange(r, partitionKeyRange) {
3337 const left = this._stringMax(partitionKeyRange[PARITIONKEYRANGE.MaxExclusive], r.min);
3338 const leftInclusive = this._stringCompare(left, r.min) === 0 ? r.isMinInclusive : false;
3339 return new QueryRange(left, r.max, leftInclusive, r.isMaxInclusive);
3340 }
3341 /**
3342 * Given the sorted ranges and a collection, invokes the callback on the list of overlapping partition key ranges
3343 * @param callback - Function execute on the overlapping partition key ranges result,
3344 * takes two parameters error, partition key ranges
3345 * @hidden
3346 */
3347 async getOverlappingRanges(collectionLink, sortedRanges) {
3348 // validate if the list is non- overlapping and sorted TODO: any PartitionKeyRanges
3349 if (!SmartRoutingMapProvider._isSortedAndNonOverlapping(sortedRanges)) {
3350 throw new Error("the list of ranges is not a non-overlapping sorted ranges");
3351 }
3352 let partitionKeyRanges = []; // TODO: any ParitionKeyRanges
3353 if (sortedRanges.length === 0) {
3354 return partitionKeyRanges;
3355 }
3356 const collectionRoutingMap = await this.partitionKeyRangeCache.onCollectionRoutingMap(collectionLink);
3357 let index = 0;
3358 let currentProvidedRange = sortedRanges[index];
3359 for (;;) {
3360 if (currentProvidedRange.isEmpty()) {
3361 // skip and go to the next item
3362 if (++index >= sortedRanges.length) {
3363 return partitionKeyRanges;
3364 }
3365 currentProvidedRange = sortedRanges[index];
3366 continue;
3367 }
3368 let queryRange;
3369 if (partitionKeyRanges.length > 0) {
3370 queryRange = SmartRoutingMapProvider._subtractRange(currentProvidedRange, partitionKeyRanges[partitionKeyRanges.length - 1]);
3371 }
3372 else {
3373 queryRange = currentProvidedRange;
3374 }
3375 const overlappingRanges = collectionRoutingMap.getOverlappingRanges(queryRange);
3376 if (overlappingRanges.length <= 0) {
3377 throw new Error(`error: returned overlapping ranges for queryRange ${queryRange} is empty`);
3378 }
3379 partitionKeyRanges = partitionKeyRanges.concat(overlappingRanges);
3380 const lastKnownTargetRange = QueryRange.parsePartitionKeyRange(partitionKeyRanges[partitionKeyRanges.length - 1]);
3381 if (!lastKnownTargetRange) {
3382 throw new Error("expected lastKnowTargetRange to be truthy");
3383 }
3384 // the overlapping ranges must contain the requested range
3385 if (SmartRoutingMapProvider._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) >
3386 0) {
3387 throw new Error(`error: returned overlapping ranges ${overlappingRanges} \
3388 does not contain the requested range ${queryRange}`);
3389 }
3390 // the current range is contained in partitionKeyRanges just move forward
3391 if (++index >= sortedRanges.length) {
3392 return partitionKeyRanges;
3393 }
3394 currentProvidedRange = sortedRanges[index];
3395 while (SmartRoutingMapProvider._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0) {
3396 // the current range is covered too.just move forward
3397 if (++index >= sortedRanges.length) {
3398 return partitionKeyRanges;
3399 }
3400 currentProvidedRange = sortedRanges[index];
3401 }
3402 }
3403 }
3404}
3405
3406// Copyright (c) Microsoft Corporation.
3407/** @hidden */
3408const logger$2 = logger$4.createClientLogger("parallelQueryExecutionContextBase");
3409/** @hidden */
3410var ParallelQueryExecutionContextBaseStates;
3411(function (ParallelQueryExecutionContextBaseStates) {
3412 ParallelQueryExecutionContextBaseStates["started"] = "started";
3413 ParallelQueryExecutionContextBaseStates["inProgress"] = "inProgress";
3414 ParallelQueryExecutionContextBaseStates["ended"] = "ended";
3415})(ParallelQueryExecutionContextBaseStates || (ParallelQueryExecutionContextBaseStates = {}));
3416/** @hidden */
3417class ParallelQueryExecutionContextBase {
3418 /**
3419 * Provides the ParallelQueryExecutionContextBase.
3420 * This is the base class that ParallelQueryExecutionContext and OrderByQueryExecutionContext will derive from.
3421 *
3422 * When handling a parallelized query, it instantiates one instance of
3423 * DocumentProcuder per target partition key range and aggregates the result of each.
3424 *
3425 * @param clientContext - The service endpoint to use to create the client.
3426 * @param collectionLink - The Collection Link
3427 * @param options - Represents the feed options.
3428 * @param partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo
3429 * @hidden
3430 */
3431 constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) {
3432 this.clientContext = clientContext;
3433 this.collectionLink = collectionLink;
3434 this.query = query;
3435 this.options = options;
3436 this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo;
3437 this.clientContext = clientContext;
3438 this.collectionLink = collectionLink;
3439 this.query = query;
3440 this.options = options;
3441 this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo;
3442 this.err = undefined;
3443 this.state = ParallelQueryExecutionContextBase.STATES.started;
3444 this.routingProvider = new SmartRoutingMapProvider(this.clientContext);
3445 this.sortOrders = this.partitionedQueryExecutionInfo.queryInfo.orderBy;
3446 this.requestContinuation = options ? options.continuationToken || options.continuation : null;
3447 // response headers of undergoing operation
3448 this.respHeaders = getInitialHeader();
3449 // Make priority queue for documentProducers
3450 // The comparator is supplied by the derived class
3451 this.orderByPQ = new PriorityQueue__default["default"]((a, b) => this.documentProducerComparator(b, a));
3452 // Creating the documentProducers
3453 this.sem = semaphore__default["default"](1);
3454 // Creating callback for semaphore
3455 // TODO: Code smell
3456 const createDocumentProducersAndFillUpPriorityQueueFunc = async () => {
3457 // ensure the lock is released after finishing up
3458 try {
3459 const targetPartitionRanges = await this._onTargetPartitionRanges();
3460 this.waitingForInternalExecutionContexts = targetPartitionRanges.length;
3461 const maxDegreeOfParallelism = options.maxDegreeOfParallelism === undefined || options.maxDegreeOfParallelism < 1
3462 ? targetPartitionRanges.length
3463 : Math.min(options.maxDegreeOfParallelism, targetPartitionRanges.length);
3464 logger$2.info("Query starting against " +
3465 targetPartitionRanges.length +
3466 " ranges with parallelism of " +
3467 maxDegreeOfParallelism);
3468 const parallelismSem = semaphore__default["default"](maxDegreeOfParallelism);
3469 let filteredPartitionKeyRanges = [];
3470 // The document producers generated from filteredPartitionKeyRanges
3471 const targetPartitionQueryExecutionContextList = [];
3472 if (this.requestContinuation) {
3473 throw new Error("Continuation tokens are not yet supported for cross partition queries");
3474 }
3475 else {
3476 filteredPartitionKeyRanges = targetPartitionRanges;
3477 }
3478 // Create one documentProducer for each partitionTargetRange
3479 filteredPartitionKeyRanges.forEach((partitionTargetRange) => {
3480 // TODO: any partitionTargetRange
3481 // no async callback
3482 targetPartitionQueryExecutionContextList.push(this._createTargetPartitionQueryExecutionContext(partitionTargetRange));
3483 });
3484 // Fill up our priority queue with documentProducers
3485 targetPartitionQueryExecutionContextList.forEach((documentProducer) => {
3486 // has async callback
3487 const throttledFunc = async () => {
3488 try {
3489 const { result: document, headers } = await documentProducer.current();
3490 this._mergeWithActiveResponseHeaders(headers);
3491 if (document === undefined) {
3492 // no results on this one
3493 return;
3494 }
3495 // if there are matching results in the target ex range add it to the priority queue
3496 try {
3497 this.orderByPQ.enq(documentProducer);
3498 }
3499 catch (e) {
3500 this.err = e;
3501 }
3502 }
3503 catch (err) {
3504 this._mergeWithActiveResponseHeaders(err.headers);
3505 this.err = err;
3506 }
3507 finally {
3508 parallelismSem.leave();
3509 this._decrementInitiationLock();
3510 }
3511 };
3512 parallelismSem.take(throttledFunc);
3513 });
3514 }
3515 catch (err) {
3516 this.err = err;
3517 // release the lock
3518 this.sem.leave();
3519 return;
3520 }
3521 };
3522 this.sem.take(createDocumentProducersAndFillUpPriorityQueueFunc);
3523 }
3524 _decrementInitiationLock() {
3525 // decrements waitingForInternalExecutionContexts
3526 // if waitingForInternalExecutionContexts reaches 0 releases the semaphore and changes the state
3527 this.waitingForInternalExecutionContexts = this.waitingForInternalExecutionContexts - 1;
3528 if (this.waitingForInternalExecutionContexts === 0) {
3529 this.sem.leave();
3530 if (this.orderByPQ.size() === 0) {
3531 this.state = ParallelQueryExecutionContextBase.STATES.inProgress;
3532 }
3533 }
3534 }
3535 _mergeWithActiveResponseHeaders(headers) {
3536 mergeHeaders(this.respHeaders, headers);
3537 }
3538 _getAndResetActiveResponseHeaders() {
3539 const ret = this.respHeaders;
3540 this.respHeaders = getInitialHeader();
3541 return ret;
3542 }
3543 async _onTargetPartitionRanges() {
3544 // invokes the callback when the target partition ranges are ready
3545 const parsedRanges = this.partitionedQueryExecutionInfo.queryRanges;
3546 const queryRanges = parsedRanges.map((item) => QueryRange.parseFromDict(item));
3547 return this.routingProvider.getOverlappingRanges(this.collectionLink, queryRanges);
3548 }
3549 /**
3550 * Gets the replacement ranges for a partitionkeyrange that has been split
3551 */
3552 async _getReplacementPartitionKeyRanges(documentProducer) {
3553 const partitionKeyRange = documentProducer.targetPartitionKeyRange;
3554 // Download the new routing map
3555 this.routingProvider = new SmartRoutingMapProvider(this.clientContext);
3556 // Get the queryRange that relates to this partitionKeyRange
3557 const queryRange = QueryRange.parsePartitionKeyRange(partitionKeyRange);
3558 return this.routingProvider.getOverlappingRanges(this.collectionLink, [queryRange]);
3559 }
3560 // TODO: P0 Code smell - can barely tell what this is doing
3561 /**
3562 * Removes the current document producer from the priqueue,
3563 * replaces that document producer with child document producers,
3564 * then reexecutes the originFunction with the corrrected executionContext
3565 */
3566 async _repairExecutionContext(originFunction) {
3567 // TODO: any
3568 // Get the replacement ranges
3569 // Removing the invalid documentProducer from the orderByPQ
3570 const parentDocumentProducer = this.orderByPQ.deq();
3571 try {
3572 const replacementPartitionKeyRanges = await this._getReplacementPartitionKeyRanges(parentDocumentProducer);
3573 const replacementDocumentProducers = [];
3574 // Create the replacement documentProducers
3575 replacementPartitionKeyRanges.forEach((partitionKeyRange) => {
3576 // Create replacment document producers with the parent's continuationToken
3577 const replacementDocumentProducer = this._createTargetPartitionQueryExecutionContext(partitionKeyRange, parentDocumentProducer.continuationToken);
3578 replacementDocumentProducers.push(replacementDocumentProducer);
3579 });
3580 // We need to check if the documentProducers even has anything left to fetch from before enqueing them
3581 const checkAndEnqueueDocumentProducer = async (documentProducerToCheck, checkNextDocumentProducerCallback) => {
3582 try {
3583 const { result: afterItem } = await documentProducerToCheck.current();
3584 if (afterItem === undefined) {
3585 // no more results left in this document producer, so we don't enqueue it
3586 }
3587 else {
3588 // Safe to put document producer back in the queue
3589 this.orderByPQ.enq(documentProducerToCheck);
3590 }
3591 await checkNextDocumentProducerCallback();
3592 }
3593 catch (err) {
3594 this.err = err;
3595 return;
3596 }
3597 };
3598 const checkAndEnqueueDocumentProducers = async (rdp) => {
3599 if (rdp.length > 0) {
3600 // We still have a replacementDocumentProducer to check
3601 const replacementDocumentProducer = rdp.shift();
3602 await checkAndEnqueueDocumentProducer(replacementDocumentProducer, async () => {
3603 await checkAndEnqueueDocumentProducers(rdp);
3604 });
3605 }
3606 else {
3607 // reexecutes the originFunction with the corrrected executionContext
3608 return originFunction();
3609 }
3610 };
3611 // Invoke the recursive function to get the ball rolling
3612 await checkAndEnqueueDocumentProducers(replacementDocumentProducers);
3613 }
3614 catch (err) {
3615 this.err = err;
3616 throw err;
3617 }
3618 }
3619 static _needPartitionKeyRangeCacheRefresh(error) {
3620 // TODO: any error
3621 return (error.code === StatusCodes.Gone &&
3622 "substatus" in error &&
3623 error["substatus"] === SubStatusCodes.PartitionKeyRangeGone);
3624 }
3625 /**
3626 * Checks to see if the executionContext needs to be repaired.
3627 * if so it repairs the execution context and executes the ifCallback,
3628 * else it continues with the current execution context and executes the elseCallback
3629 */
3630 async _repairExecutionContextIfNeeded(ifCallback, elseCallback) {
3631 const documentProducer = this.orderByPQ.peek();
3632 // Check if split happened
3633 try {
3634 await documentProducer.current();
3635 elseCallback();
3636 }
3637 catch (err) {
3638 if (ParallelQueryExecutionContextBase._needPartitionKeyRangeCacheRefresh(err)) {
3639 // Split has happened so we need to repair execution context before continueing
3640 return this._repairExecutionContext(ifCallback);
3641 }
3642 else {
3643 // Something actually bad happened ...
3644 this.err = err;
3645 throw err;
3646 }
3647 }
3648 }
3649 /**
3650 * Fetches the next element in the ParallelQueryExecutionContextBase.
3651 */
3652 async nextItem() {
3653 if (this.err) {
3654 // if there is a prior error return error
3655 throw this.err;
3656 }
3657 return new Promise((resolve, reject) => {
3658 this.sem.take(() => {
3659 // NOTE: lock must be released before invoking quitting
3660 if (this.err) {
3661 // release the lock before invoking callback
3662 this.sem.leave();
3663 // if there is a prior error return error
3664 this.err.headers = this._getAndResetActiveResponseHeaders();
3665 reject(this.err);
3666 return;
3667 }
3668 if (this.orderByPQ.size() === 0) {
3669 // there is no more results
3670 this.state = ParallelQueryExecutionContextBase.STATES.ended;
3671 // release the lock before invoking callback
3672 this.sem.leave();
3673 return resolve({
3674 result: undefined,
3675 headers: this._getAndResetActiveResponseHeaders(),
3676 });
3677 }
3678 const ifCallback = () => {
3679 // Release the semaphore to avoid deadlock
3680 this.sem.leave();
3681 // Reexcute the function
3682 return resolve(this.nextItem());
3683 };
3684 const elseCallback = async () => {
3685 let documentProducer;
3686 try {
3687 documentProducer = this.orderByPQ.deq();
3688 }
3689 catch (e) {
3690 // if comparing elements of the priority queue throws exception
3691 // set that error and return error
3692 this.err = e;
3693 // release the lock before invoking callback
3694 this.sem.leave();
3695 this.err.headers = this._getAndResetActiveResponseHeaders();
3696 reject(this.err);
3697 return;
3698 }
3699 let item;
3700 let headers;
3701 try {
3702 const response = await documentProducer.nextItem();
3703 item = response.result;
3704 headers = response.headers;
3705 this._mergeWithActiveResponseHeaders(headers);
3706 if (item === undefined) {
3707 // this should never happen
3708 // because the documentProducer already has buffered an item
3709 // assert item !== undefined
3710 this.err = new Error(`Extracted DocumentProducer from the priority queue \
3711 doesn't have any buffered item!`);
3712 // release the lock before invoking callback
3713 this.sem.leave();
3714 return resolve({
3715 result: undefined,
3716 headers: this._getAndResetActiveResponseHeaders(),
3717 });
3718 }
3719 }
3720 catch (err) {
3721 this.err = new Error(`Extracted DocumentProducer from the priority queue fails to get the \
3722 buffered item. Due to ${JSON.stringify(err)}`);
3723 this.err.headers = this._getAndResetActiveResponseHeaders();
3724 // release the lock before invoking callback
3725 this.sem.leave();
3726 reject(this.err);
3727 return;
3728 }
3729 // we need to put back the document producer to the queue if it has more elements.
3730 // the lock will be released after we know document producer must be put back in the queue or not
3731 try {
3732 const { result: afterItem, headers: otherHeaders } = await documentProducer.current();
3733 this._mergeWithActiveResponseHeaders(otherHeaders);
3734 if (afterItem === undefined) {
3735 // no more results is left in this document producer
3736 }
3737 else {
3738 try {
3739 const headItem = documentProducer.fetchResults[0];
3740 if (typeof headItem === "undefined") {
3741 throw new Error("Extracted DocumentProducer from PQ is invalid state with no result!");
3742 }
3743 this.orderByPQ.enq(documentProducer);
3744 }
3745 catch (e) {
3746 // if comparing elements in priority queue throws exception
3747 // set error
3748 this.err = e;
3749 }
3750 }
3751 }
3752 catch (err) {
3753 if (ParallelQueryExecutionContextBase._needPartitionKeyRangeCacheRefresh(err)) {
3754 // We want the document producer enqueued
3755 // So that later parts of the code can repair the execution context
3756 this.orderByPQ.enq(documentProducer);
3757 }
3758 else {
3759 // Something actually bad happened
3760 this.err = err;
3761 reject(this.err);
3762 }
3763 }
3764 finally {
3765 // release the lock before returning
3766 this.sem.leave();
3767 }
3768 // invoke the callback on the item
3769 return resolve({
3770 result: item,
3771 headers: this._getAndResetActiveResponseHeaders(),
3772 });
3773 };
3774 this._repairExecutionContextIfNeeded(ifCallback, elseCallback).catch(reject);
3775 });
3776 });
3777 }
3778 /**
3779 * Determine if there are still remaining resources to processs based on the value of the continuation
3780 * token or the elements remaining on the current batch in the QueryIterator.
3781 * @returns true if there is other elements to process in the ParallelQueryExecutionContextBase.
3782 */
3783 hasMoreResults() {
3784 return !(this.state === ParallelQueryExecutionContextBase.STATES.ended || this.err !== undefined);
3785 }
3786 /**
3787 * Creates document producers
3788 */
3789 _createTargetPartitionQueryExecutionContext(partitionKeyTargetRange, continuationToken) {
3790 // TODO: any
3791 // creates target partition range Query Execution Context
3792 let rewrittenQuery = this.partitionedQueryExecutionInfo.queryInfo.rewrittenQuery;
3793 let sqlQuerySpec;
3794 const query = this.query;
3795 if (typeof query === "string") {
3796 sqlQuerySpec = { query };
3797 }
3798 else {
3799 sqlQuerySpec = query;
3800 }
3801 const formatPlaceHolder = "{documentdb-formattableorderbyquery-filter}";
3802 if (rewrittenQuery) {
3803 sqlQuerySpec = JSON.parse(JSON.stringify(sqlQuerySpec));
3804 // We hardcode the formattable filter to true for now
3805 rewrittenQuery = rewrittenQuery.replace(formatPlaceHolder, "true");
3806 sqlQuerySpec["query"] = rewrittenQuery;
3807 }
3808 const options = Object.assign({}, this.options);
3809 options.continuationToken = continuationToken;
3810 return new DocumentProducer(this.clientContext, this.collectionLink, sqlQuerySpec, partitionKeyTargetRange, options);
3811 }
3812}
3813ParallelQueryExecutionContextBase.STATES = ParallelQueryExecutionContextBaseStates;
3814
3815// Copyright (c) Microsoft Corporation.
3816/**
3817 * Provides the ParallelQueryExecutionContext.
3818 * This class is capable of handling parallelized queries and derives from ParallelQueryExecutionContextBase.
3819 * @hidden
3820 */
3821class ParallelQueryExecutionContext extends ParallelQueryExecutionContextBase {
3822 // Instance members are inherited
3823 // Overriding documentProducerComparator for ParallelQueryExecutionContexts
3824 /**
3825 * Provides a Comparator for document producers using the min value of the corresponding target partition.
3826 * @returns Comparator Function
3827 * @hidden
3828 */
3829 documentProducerComparator(docProd1, docProd2) {
3830 return docProd1.generation - docProd2.generation;
3831 }
3832}
3833
3834/** @hidden */
3835class OrderByQueryExecutionContext extends ParallelQueryExecutionContextBase {
3836 /**
3837 * Provides the OrderByQueryExecutionContext.
3838 * This class is capable of handling orderby queries and dervives from ParallelQueryExecutionContextBase.
3839 *
3840 * When handling a parallelized query, it instantiates one instance of
3841 * DocumentProcuder per target partition key range and aggregates the result of each.
3842 *
3843 * @param clientContext - The service endpoint to use to create the client.
3844 * @param collectionLink - The Collection Link
3845 * @param options - Represents the feed options.
3846 * @param partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo
3847 * @hidden
3848 */
3849 constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) {
3850 // Calling on base class constructor
3851 super(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo);
3852 this.orderByComparator = new OrderByDocumentProducerComparator(this.sortOrders);
3853 }
3854 // Instance members are inherited
3855 // Overriding documentProducerComparator for OrderByQueryExecutionContexts
3856 /**
3857 * Provides a Comparator for document producers which respects orderby sort order.
3858 * @returns Comparator Function
3859 * @hidden
3860 */
3861 documentProducerComparator(docProd1, docProd2) {
3862 return this.orderByComparator.compare(docProd1, docProd2);
3863 }
3864}
3865
3866/** @hidden */
3867class OffsetLimitEndpointComponent {
3868 constructor(executionContext, offset, limit) {
3869 this.executionContext = executionContext;
3870 this.offset = offset;
3871 this.limit = limit;
3872 }
3873 async nextItem() {
3874 const aggregateHeaders = getInitialHeader();
3875 while (this.offset > 0) {
3876 // Grab next item but ignore the result. We only need the headers
3877 const { headers } = await this.executionContext.nextItem();
3878 this.offset--;
3879 mergeHeaders(aggregateHeaders, headers);
3880 }
3881 if (this.limit > 0) {
3882 const { result, headers } = await this.executionContext.nextItem();
3883 this.limit--;
3884 mergeHeaders(aggregateHeaders, headers);
3885 return { result, headers: aggregateHeaders };
3886 }
3887 // If both limit and offset are 0, return nothing
3888 return { result: undefined, headers: getInitialHeader() };
3889 }
3890 hasMoreResults() {
3891 return (this.offset > 0 || this.limit > 0) && this.executionContext.hasMoreResults();
3892 }
3893}
3894
3895/** @hidden */
3896class OrderByEndpointComponent {
3897 /**
3898 * Represents an endpoint in handling an order by query. For each processed orderby
3899 * result it returns 'payload' item of the result
3900 *
3901 * @param executionContext - Underlying Execution Context
3902 * @hidden
3903 */
3904 constructor(executionContext) {
3905 this.executionContext = executionContext;
3906 }
3907 /**
3908 * Execute a provided function on the next element in the OrderByEndpointComponent.
3909 */
3910 async nextItem() {
3911 const { result: item, headers } = await this.executionContext.nextItem();
3912 return {
3913 result: item !== undefined ? item.payload : undefined,
3914 headers,
3915 };
3916 }
3917 /**
3918 * Determine if there are still remaining resources to processs.
3919 * @returns true if there is other elements to process in the OrderByEndpointComponent.
3920 */
3921 hasMoreResults() {
3922 return this.executionContext.hasMoreResults();
3923 }
3924}
3925
3926// Copyright (c) Microsoft Corporation.
3927async function digest(str) {
3928 const hash = crypto.createHash("sha256");
3929 hash.update(str, "utf8");
3930 return hash.digest("hex");
3931}
3932
3933// Copyright (c) Microsoft Corporation.
3934async function hashObject(object) {
3935 const stringifiedObject = stableStringify__default["default"](object);
3936 return digest(stringifiedObject);
3937}
3938
3939/** @hidden */
3940class OrderedDistinctEndpointComponent {
3941 constructor(executionContext) {
3942 this.executionContext = executionContext;
3943 }
3944 async nextItem() {
3945 const { headers, result } = await this.executionContext.nextItem();
3946 if (result) {
3947 const hashedResult = await hashObject(result);
3948 if (hashedResult === this.hashedLastResult) {
3949 return { result: undefined, headers };
3950 }
3951 this.hashedLastResult = hashedResult;
3952 }
3953 return { result, headers };
3954 }
3955 hasMoreResults() {
3956 return this.executionContext.hasMoreResults();
3957 }
3958}
3959
3960/** @hidden */
3961class UnorderedDistinctEndpointComponent {
3962 constructor(executionContext) {
3963 this.executionContext = executionContext;
3964 this.hashedResults = new Set();
3965 }
3966 async nextItem() {
3967 const { headers, result } = await this.executionContext.nextItem();
3968 if (result) {
3969 const hashedResult = await hashObject(result);
3970 if (this.hashedResults.has(hashedResult)) {
3971 return { result: undefined, headers };
3972 }
3973 this.hashedResults.add(hashedResult);
3974 }
3975 return { result, headers };
3976 }
3977 hasMoreResults() {
3978 return this.executionContext.hasMoreResults();
3979 }
3980}
3981
3982// Copyright (c) Microsoft Corporation.
3983// Licensed under the MIT license.
3984// All aggregates are effectively a group by operation
3985// The empty group is used for aggregates without a GROUP BY clause
3986const emptyGroup = "__empty__";
3987// Newer API versions rewrite the query to return `item2`. It fixes some legacy issues with the original `item` result
3988// Aggregator code should use item2 when available
3989const extractAggregateResult = (payload) => Object.keys(payload).length > 0 ? (payload.item2 ? payload.item2 : payload.item) : null;
3990
3991/** @hidden */
3992class GroupByEndpointComponent {
3993 constructor(executionContext, queryInfo) {
3994 this.executionContext = executionContext;
3995 this.queryInfo = queryInfo;
3996 this.groupings = new Map();
3997 this.aggregateResultArray = [];
3998 this.completed = false;
3999 }
4000 async nextItem() {
4001 // If we have a full result set, begin returning results
4002 if (this.aggregateResultArray.length > 0) {
4003 return { result: this.aggregateResultArray.pop(), headers: getInitialHeader() };
4004 }
4005 if (this.completed) {
4006 return { result: undefined, headers: getInitialHeader() };
4007 }
4008 const aggregateHeaders = getInitialHeader();
4009 while (this.executionContext.hasMoreResults()) {
4010 // Grab the next result
4011 const { result, headers } = (await this.executionContext.nextItem());
4012 mergeHeaders(aggregateHeaders, headers);
4013 // If it exists, process it via aggregators
4014 if (result) {
4015 const group = result.groupByItems ? await hashObject(result.groupByItems) : emptyGroup;
4016 const aggregators = this.groupings.get(group);
4017 const payload = result.payload;
4018 if (aggregators) {
4019 // Iterator over all results in the payload
4020 Object.keys(payload).map((key) => {
4021 // in case the value of a group is null make sure we create a dummy payload with item2==null
4022 const effectiveGroupByValue = payload[key]
4023 ? payload[key]
4024 : new Map().set("item2", null);
4025 const aggregateResult = extractAggregateResult(effectiveGroupByValue);
4026 aggregators.get(key).aggregate(aggregateResult);
4027 });
4028 }
4029 else {
4030 // This is the first time we have seen a grouping. Setup the initial result without aggregate values
4031 const grouping = new Map();
4032 this.groupings.set(group, grouping);
4033 // Iterator over all results in the payload
4034 Object.keys(payload).map((key) => {
4035 const aggregateType = this.queryInfo.groupByAliasToAggregateType[key];
4036 // Create a new aggregator for this specific aggregate field
4037 const aggregator = createAggregator(aggregateType);
4038 grouping.set(key, aggregator);
4039 if (aggregateType) {
4040 const aggregateResult = extractAggregateResult(payload[key]);
4041 aggregator.aggregate(aggregateResult);
4042 }
4043 else {
4044 aggregator.aggregate(payload[key]);
4045 }
4046 });
4047 }
4048 }
4049 }
4050 for (const grouping of this.groupings.values()) {
4051 const groupResult = {};
4052 for (const [aggregateKey, aggregator] of grouping.entries()) {
4053 groupResult[aggregateKey] = aggregator.getResult();
4054 }
4055 this.aggregateResultArray.push(groupResult);
4056 }
4057 this.completed = true;
4058 return { result: this.aggregateResultArray.pop(), headers: aggregateHeaders };
4059 }
4060 hasMoreResults() {
4061 return this.executionContext.hasMoreResults() || this.aggregateResultArray.length > 0;
4062 }
4063}
4064
4065/** @hidden */
4066class GroupByValueEndpointComponent {
4067 constructor(executionContext, queryInfo) {
4068 this.executionContext = executionContext;
4069 this.queryInfo = queryInfo;
4070 this.aggregators = new Map();
4071 this.aggregateResultArray = [];
4072 this.completed = false;
4073 // VALUE queries will only every have a single grouping
4074 this.aggregateType = this.queryInfo.aggregates[0];
4075 }
4076 async nextItem() {
4077 // Start returning results if we have processed a full results set
4078 if (this.aggregateResultArray.length > 0) {
4079 return { result: this.aggregateResultArray.pop(), headers: getInitialHeader() };
4080 }
4081 if (this.completed) {
4082 return { result: undefined, headers: getInitialHeader() };
4083 }
4084 const aggregateHeaders = getInitialHeader();
4085 while (this.executionContext.hasMoreResults()) {
4086 // Grab the next result
4087 const { result, headers } = (await this.executionContext.nextItem());
4088 mergeHeaders(aggregateHeaders, headers);
4089 // If it exists, process it via aggregators
4090 if (result) {
4091 let grouping = emptyGroup;
4092 let payload = result;
4093 if (result.groupByItems) {
4094 // If the query contains a GROUP BY clause, it will have a payload property and groupByItems
4095 payload = result.payload;
4096 grouping = await hashObject(result.groupByItems);
4097 }
4098 const aggregator = this.aggregators.get(grouping);
4099 if (!aggregator) {
4100 // This is the first time we have seen a grouping so create a new aggregator
4101 this.aggregators.set(grouping, createAggregator(this.aggregateType));
4102 }
4103 if (this.aggregateType) {
4104 const aggregateResult = extractAggregateResult(payload[0]);
4105 // if aggregate result is null, we need to short circuit aggregation and return undefined
4106 if (aggregateResult === null) {
4107 this.completed = true;
4108 }
4109 this.aggregators.get(grouping).aggregate(aggregateResult);
4110 }
4111 else {
4112 // Queries with no aggregates pass the payload directly to the aggregator
4113 // Example: SELECT VALUE c.team FROM c GROUP BY c.team
4114 this.aggregators.get(grouping).aggregate(payload);
4115 }
4116 }
4117 }
4118 // We bail early since we got an undefined result back `[{}]`
4119 if (this.completed) {
4120 return { result: undefined, headers: aggregateHeaders };
4121 }
4122 // If no results are left in the underlying execution context, convert our aggregate results to an array
4123 for (const aggregator of this.aggregators.values()) {
4124 this.aggregateResultArray.push(aggregator.getResult());
4125 }
4126 this.completed = true;
4127 return { result: this.aggregateResultArray.pop(), headers: aggregateHeaders };
4128 }
4129 hasMoreResults() {
4130 return this.executionContext.hasMoreResults() || this.aggregateResultArray.length > 0;
4131 }
4132}
4133
4134/** @hidden */
4135class PipelinedQueryExecutionContext {
4136 constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) {
4137 this.clientContext = clientContext;
4138 this.collectionLink = collectionLink;
4139 this.query = query;
4140 this.options = options;
4141 this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo;
4142 this.endpoint = null;
4143 this.pageSize = options["maxItemCount"];
4144 if (this.pageSize === undefined) {
4145 this.pageSize = PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE;
4146 }
4147 // Pick between parallel vs order by execution context
4148 const sortOrders = partitionedQueryExecutionInfo.queryInfo.orderBy;
4149 if (Array.isArray(sortOrders) && sortOrders.length > 0) {
4150 // Need to wrap orderby execution context in endpoint component, since the data is nested as a \
4151 // "payload" property.
4152 this.endpoint = new OrderByEndpointComponent(new OrderByQueryExecutionContext(this.clientContext, this.collectionLink, this.query, this.options, this.partitionedQueryExecutionInfo));
4153 }
4154 else {
4155 this.endpoint = new ParallelQueryExecutionContext(this.clientContext, this.collectionLink, this.query, this.options, this.partitionedQueryExecutionInfo);
4156 }
4157 if (Object.keys(partitionedQueryExecutionInfo.queryInfo.groupByAliasToAggregateType).length > 0 ||
4158 partitionedQueryExecutionInfo.queryInfo.aggregates.length > 0 ||
4159 partitionedQueryExecutionInfo.queryInfo.groupByExpressions.length > 0) {
4160 if (partitionedQueryExecutionInfo.queryInfo.hasSelectValue) {
4161 this.endpoint = new GroupByValueEndpointComponent(this.endpoint, partitionedQueryExecutionInfo.queryInfo);
4162 }
4163 else {
4164 this.endpoint = new GroupByEndpointComponent(this.endpoint, partitionedQueryExecutionInfo.queryInfo);
4165 }
4166 }
4167 // If top then add that to the pipeline. TOP N is effectively OFFSET 0 LIMIT N
4168 const top = partitionedQueryExecutionInfo.queryInfo.top;
4169 if (typeof top === "number") {
4170 this.endpoint = new OffsetLimitEndpointComponent(this.endpoint, 0, top);
4171 }
4172 // If offset+limit then add that to the pipeline
4173 const limit = partitionedQueryExecutionInfo.queryInfo.limit;
4174 const offset = partitionedQueryExecutionInfo.queryInfo.offset;
4175 if (typeof limit === "number" && typeof offset === "number") {
4176 this.endpoint = new OffsetLimitEndpointComponent(this.endpoint, offset, limit);
4177 }
4178 // If distinct then add that to the pipeline
4179 const distinctType = partitionedQueryExecutionInfo.queryInfo.distinctType;
4180 if (distinctType === "Ordered") {
4181 this.endpoint = new OrderedDistinctEndpointComponent(this.endpoint);
4182 }
4183 if (distinctType === "Unordered") {
4184 this.endpoint = new UnorderedDistinctEndpointComponent(this.endpoint);
4185 }
4186 }
4187 async nextItem() {
4188 return this.endpoint.nextItem();
4189 }
4190 // Removed callback here beacuse it wouldn't have ever worked...
4191 hasMoreResults() {
4192 return this.endpoint.hasMoreResults();
4193 }
4194 async fetchMore() {
4195 // if the wrapped endpoint has different implementation for fetchMore use that
4196 // otherwise use the default implementation
4197 if (typeof this.endpoint.fetchMore === "function") {
4198 return this.endpoint.fetchMore();
4199 }
4200 else {
4201 this.fetchBuffer = [];
4202 this.fetchMoreRespHeaders = getInitialHeader();
4203 return this._fetchMoreImplementation();
4204 }
4205 }
4206 async _fetchMoreImplementation() {
4207 try {
4208 const { result: item, headers } = await this.endpoint.nextItem();
4209 mergeHeaders(this.fetchMoreRespHeaders, headers);
4210 if (item === undefined) {
4211 // no more results
4212 if (this.fetchBuffer.length === 0) {
4213 return {
4214 result: undefined,
4215 headers: this.fetchMoreRespHeaders,
4216 };
4217 }
4218 else {
4219 // Just give what we have
4220 const temp = this.fetchBuffer;
4221 this.fetchBuffer = [];
4222 return { result: temp, headers: this.fetchMoreRespHeaders };
4223 }
4224 }
4225 else {
4226 // append the result
4227 this.fetchBuffer.push(item);
4228 if (this.fetchBuffer.length >= this.pageSize) {
4229 // fetched enough results
4230 const temp = this.fetchBuffer.slice(0, this.pageSize);
4231 this.fetchBuffer = this.fetchBuffer.splice(this.pageSize);
4232 return { result: temp, headers: this.fetchMoreRespHeaders };
4233 }
4234 else {
4235 // recursively fetch more
4236 // TODO: is recursion a good idea?
4237 return this._fetchMoreImplementation();
4238 }
4239 }
4240 }
4241 catch (err) {
4242 mergeHeaders(this.fetchMoreRespHeaders, err.headers);
4243 err.headers = this.fetchMoreRespHeaders;
4244 if (err) {
4245 throw err;
4246 }
4247 }
4248 }
4249}
4250PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE = 10;
4251
4252// Copyright (c) Microsoft Corporation.
4253/**
4254 * Represents a QueryIterator Object, an implementation of feed or query response that enables
4255 * traversal and iterating over the response
4256 * in the Azure Cosmos DB database service.
4257 */
4258class QueryIterator {
4259 /**
4260 * @hidden
4261 */
4262 constructor(clientContext, query, options, fetchFunctions, resourceLink, resourceType) {
4263 this.clientContext = clientContext;
4264 this.query = query;
4265 this.options = options;
4266 this.fetchFunctions = fetchFunctions;
4267 this.resourceLink = resourceLink;
4268 this.resourceType = resourceType;
4269 this.query = query;
4270 this.fetchFunctions = fetchFunctions;
4271 this.options = options || {};
4272 this.resourceLink = resourceLink;
4273 this.fetchAllLastResHeaders = getInitialHeader();
4274 this.reset();
4275 this.isInitialized = false;
4276 }
4277 /**
4278 * Gets an async iterator that will yield results until completion.
4279 *
4280 * NOTE: AsyncIterators are a very new feature and you might need to
4281 * use polyfils/etc. in order to use them in your code.
4282 *
4283 * If you're using TypeScript, you can use the following polyfill as long
4284 * as you target ES6 or higher and are running on Node 6 or higher.
4285 *
4286 * ```typescript
4287 * if (!Symbol || !Symbol.asyncIterator) {
4288 * (Symbol as any).asyncIterator = Symbol.for("Symbol.asyncIterator");
4289 * }
4290 * ```
4291 *
4292 * @example Iterate over all databases
4293 * ```typescript
4294 * for await(const { resources: db } of client.databases.readAll().getAsyncIterator()) {
4295 * console.log(`Got ${db} from AsyncIterator`);
4296 * }
4297 * ```
4298 */
4299 getAsyncIterator() {
4300 return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() {
4301 this.reset();
4302 this.queryPlanPromise = this.fetchQueryPlan();
4303 while (this.queryExecutionContext.hasMoreResults()) {
4304 let response;
4305 try {
4306 response = yield tslib.__await(this.queryExecutionContext.fetchMore());
4307 }
4308 catch (error) {
4309 if (this.needsQueryPlan(error)) {
4310 yield tslib.__await(this.createPipelinedExecutionContext());
4311 try {
4312 response = yield tslib.__await(this.queryExecutionContext.fetchMore());
4313 }
4314 catch (queryError) {
4315 this.handleSplitError(queryError);
4316 }
4317 }
4318 else {
4319 throw error;
4320 }
4321 }
4322 const feedResponse = new FeedResponse(response.result, response.headers, this.queryExecutionContext.hasMoreResults());
4323 if (response.result !== undefined) {
4324 yield yield tslib.__await(feedResponse);
4325 }
4326 }
4327 });
4328 }
4329 /**
4330 * Determine if there are still remaining resources to processs based on the value of the continuation token or the
4331 * elements remaining on the current batch in the QueryIterator.
4332 * @returns true if there is other elements to process in the QueryIterator.
4333 */
4334 hasMoreResults() {
4335 return this.queryExecutionContext.hasMoreResults();
4336 }
4337 /**
4338 * Fetch all pages for the query and return a single FeedResponse.
4339 */
4340 async fetchAll() {
4341 this.reset();
4342 this.fetchAllTempResources = [];
4343 let response;
4344 try {
4345 response = await this.toArrayImplementation();
4346 }
4347 catch (error) {
4348 this.handleSplitError(error);
4349 }
4350 return response;
4351 }
4352 /**
4353 * Retrieve the next batch from the feed.
4354 *
4355 * This may or may not fetch more pages from the backend depending on your settings
4356 * and the type of query. Aggregate queries will generally fetch all backend pages
4357 * before returning the first batch of responses.
4358 */
4359 async fetchNext() {
4360 this.queryPlanPromise = this.fetchQueryPlan();
4361 if (!this.isInitialized) {
4362 await this.init();
4363 }
4364 let response;
4365 try {
4366 response = await this.queryExecutionContext.fetchMore();
4367 }
4368 catch (error) {
4369 if (this.needsQueryPlan(error)) {
4370 await this.createPipelinedExecutionContext();
4371 try {
4372 response = await this.queryExecutionContext.fetchMore();
4373 }
4374 catch (queryError) {
4375 this.handleSplitError(queryError);
4376 }
4377 }
4378 else {
4379 throw error;
4380 }
4381 }
4382 return new FeedResponse(response.result, response.headers, this.queryExecutionContext.hasMoreResults());
4383 }
4384 /**
4385 * Reset the QueryIterator to the beginning and clear all the resources inside it
4386 */
4387 reset() {
4388 this.queryPlanPromise = undefined;
4389 this.queryExecutionContext = new DefaultQueryExecutionContext(this.options, this.fetchFunctions);
4390 }
4391 async toArrayImplementation() {
4392 this.queryPlanPromise = this.fetchQueryPlan();
4393 if (!this.isInitialized) {
4394 await this.init();
4395 }
4396 while (this.queryExecutionContext.hasMoreResults()) {
4397 let response;
4398 try {
4399 response = await this.queryExecutionContext.nextItem();
4400 }
4401 catch (error) {
4402 if (this.needsQueryPlan(error)) {
4403 await this.createPipelinedExecutionContext();
4404 response = await this.queryExecutionContext.nextItem();
4405 }
4406 else {
4407 throw error;
4408 }
4409 }
4410 const { result, headers } = response;
4411 // concatenate the results and fetch more
4412 mergeHeaders(this.fetchAllLastResHeaders, headers);
4413 if (result !== undefined) {
4414 this.fetchAllTempResources.push(result);
4415 }
4416 }
4417 return new FeedResponse(this.fetchAllTempResources, this.fetchAllLastResHeaders, this.queryExecutionContext.hasMoreResults());
4418 }
4419 async createPipelinedExecutionContext() {
4420 const queryPlanResponse = await this.queryPlanPromise;
4421 // We always coerce queryPlanPromise to resolved. So if it errored, we need to manually inspect the resolved value
4422 if (queryPlanResponse instanceof Error) {
4423 throw queryPlanResponse;
4424 }
4425 const queryPlan = queryPlanResponse.result;
4426 const queryInfo = queryPlan.queryInfo;
4427 if (queryInfo.aggregates.length > 0 && queryInfo.hasSelectValue === false) {
4428 throw new Error("Aggregate queries must use the VALUE keyword");
4429 }
4430 this.queryExecutionContext = new PipelinedQueryExecutionContext(this.clientContext, this.resourceLink, this.query, this.options, queryPlan);
4431 }
4432 async fetchQueryPlan() {
4433 if (!this.queryPlanPromise && this.resourceType === exports.ResourceType.item) {
4434 return this.clientContext
4435 .getQueryPlan(getPathFromLink(this.resourceLink) + "/docs", exports.ResourceType.item, this.resourceLink, this.query, this.options)
4436 .catch((error) => error); // Without this catch, node reports an unhandled rejection. So we stash the promise as resolved even if it errored.
4437 }
4438 return this.queryPlanPromise;
4439 }
4440 needsQueryPlan(error) {
4441 var _a;
4442 if (((_a = error.body) === null || _a === void 0 ? void 0 : _a.additionalErrorInfo) ||
4443 error.message.includes("Cross partition query only supports")) {
4444 return error.code === StatusCodes.BadRequest && this.resourceType === exports.ResourceType.item;
4445 }
4446 else {
4447 throw error;
4448 }
4449 }
4450 async init() {
4451 if (this.isInitialized === true) {
4452 return;
4453 }
4454 if (this.initPromise === undefined) {
4455 this.initPromise = this._init();
4456 }
4457 return this.initPromise;
4458 }
4459 async _init() {
4460 if (this.options.forceQueryPlan === true && this.resourceType === exports.ResourceType.item) {
4461 await this.createPipelinedExecutionContext();
4462 }
4463 this.isInitialized = true;
4464 }
4465 handleSplitError(err) {
4466 if (err.code === 410) {
4467 const error = new Error("Encountered partition split and could not recover. This request is retryable");
4468 error.code = 503;
4469 error.originalError = err;
4470 throw error;
4471 }
4472 else {
4473 throw err;
4474 }
4475 }
4476}
4477
4478class ConflictResponse extends ResourceResponse {
4479 constructor(resource, headers, statusCode, conflict) {
4480 super(resource, headers, statusCode);
4481 this.conflict = conflict;
4482 }
4483}
4484
4485/**
4486 * Use to read or delete a given {@link Conflict} by id.
4487 *
4488 * @see {@link Conflicts} to query or read all conflicts.
4489 */
4490class Conflict {
4491 /**
4492 * @hidden
4493 * @param container - The parent {@link Container}.
4494 * @param id - The id of the given {@link Conflict}.
4495 */
4496 constructor(container, id, clientContext, partitionKey) {
4497 this.container = container;
4498 this.id = id;
4499 this.clientContext = clientContext;
4500 this.partitionKey = partitionKey;
4501 this.partitionKey = partitionKey;
4502 }
4503 /**
4504 * Returns a reference URL to the resource. Used for linking in Permissions.
4505 */
4506 get url() {
4507 return `/${this.container.url}/${Constants.Path.ConflictsPathSegment}/${this.id}`;
4508 }
4509 /**
4510 * Read the {@link ConflictDefinition} for the given {@link Conflict}.
4511 */
4512 async read(options) {
4513 const path = getPathFromLink(this.url, exports.ResourceType.conflicts);
4514 const id = getIdFromLink(this.url);
4515 const response = await this.clientContext.read({
4516 path,
4517 resourceType: exports.ResourceType.user,
4518 resourceId: id,
4519 options,
4520 });
4521 return new ConflictResponse(response.result, response.headers, response.code, this);
4522 }
4523 /**
4524 * Delete the given {@link ConflictDefinition}.
4525 */
4526 async delete(options) {
4527 if (this.partitionKey === undefined) {
4528 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
4529 this.partitionKey = undefinedPartitionKey(partitionKeyDefinition);
4530 }
4531 const path = getPathFromLink(this.url);
4532 const id = getIdFromLink(this.url);
4533 const response = await this.clientContext.delete({
4534 path,
4535 resourceType: exports.ResourceType.conflicts,
4536 resourceId: id,
4537 options,
4538 partitionKey: this.partitionKey,
4539 });
4540 return new ConflictResponse(response.result, response.headers, response.code, this);
4541 }
4542}
4543
4544/**
4545 * Use to query or read all conflicts.
4546 *
4547 * @see {@link Conflict} to read or delete a given {@link Conflict} by id.
4548 */
4549class Conflicts {
4550 constructor(container, clientContext) {
4551 this.container = container;
4552 this.clientContext = clientContext;
4553 }
4554 query(query, options) {
4555 const path = getPathFromLink(this.container.url, exports.ResourceType.conflicts);
4556 const id = getIdFromLink(this.container.url);
4557 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
4558 return this.clientContext.queryFeed({
4559 path,
4560 resourceType: exports.ResourceType.conflicts,
4561 resourceId: id,
4562 resultFn: (result) => result.Conflicts,
4563 query,
4564 options: innerOptions,
4565 });
4566 });
4567 }
4568 /**
4569 * Reads all conflicts
4570 * @param options - Use to set options like response page size, continuation tokens, etc.
4571 */
4572 readAll(options) {
4573 return this.query(undefined, options);
4574 }
4575}
4576
4577// Copyright (c) Microsoft Corporation.
4578// Licensed under the MIT license.
4579exports.ConflictResolutionMode = void 0;
4580(function (ConflictResolutionMode) {
4581 ConflictResolutionMode["Custom"] = "Custom";
4582 ConflictResolutionMode["LastWriterWins"] = "LastWriterWins";
4583})(exports.ConflictResolutionMode || (exports.ConflictResolutionMode = {}));
4584
4585class ItemResponse extends ResourceResponse {
4586 constructor(resource, headers, statusCode, subsstatusCode, item) {
4587 super(resource, headers, statusCode, subsstatusCode);
4588 this.item = item;
4589 }
4590}
4591
4592/**
4593 * Used to perform operations on a specific item.
4594 *
4595 * @see {@link Items} for operations on all items; see `container.items`.
4596 */
4597class Item {
4598 /**
4599 * @hidden
4600 * @param container - The parent {@link Container}.
4601 * @param id - The id of the given {@link Item}.
4602 * @param partitionKey - The primary key of the given {@link Item} (only for partitioned containers).
4603 */
4604 constructor(container, id, partitionKey, clientContext) {
4605 this.container = container;
4606 this.id = id;
4607 this.clientContext = clientContext;
4608 this.partitionKey = partitionKey;
4609 }
4610 /**
4611 * Returns a reference URL to the resource. Used for linking in Permissions.
4612 */
4613 get url() {
4614 return createDocumentUri(this.container.database.id, this.container.id, this.id);
4615 }
4616 /**
4617 * Read the item's definition.
4618 *
4619 * Any provided type, T, is not necessarily enforced by the SDK.
4620 * You may get more or less properties and it's up to your logic to enforce it.
4621 * If the type, T, is a class, it won't pass `typeof` comparisons, because it won't have a match prototype.
4622 * It's recommended to only use interfaces.
4623 *
4624 * There is no set schema for JSON items. They may contain any number of custom properties.
4625 *
4626 * @param options - Additional options for the request
4627 *
4628 * @example Using custom type for response
4629 * ```typescript
4630 * interface TodoItem {
4631 * title: string;
4632 * done: bool;
4633 * id: string;
4634 * }
4635 *
4636 * let item: TodoItem;
4637 * ({body: item} = await item.read<TodoItem>());
4638 * ```
4639 */
4640 async read(options = {}) {
4641 if (this.partitionKey === undefined) {
4642 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
4643 this.partitionKey = undefinedPartitionKey(partitionKeyDefinition);
4644 }
4645 const path = getPathFromLink(this.url);
4646 const id = getIdFromLink(this.url);
4647 let response;
4648 try {
4649 response = await this.clientContext.read({
4650 path,
4651 resourceType: exports.ResourceType.item,
4652 resourceId: id,
4653 options,
4654 partitionKey: this.partitionKey,
4655 });
4656 }
4657 catch (error) {
4658 if (error.code !== StatusCodes.NotFound) {
4659 throw error;
4660 }
4661 response = error;
4662 }
4663 return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
4664 }
4665 async replace(body, options = {}) {
4666 if (this.partitionKey === undefined) {
4667 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
4668 this.partitionKey = extractPartitionKey(body, partitionKeyDefinition);
4669 }
4670 const err = {};
4671 if (!isItemResourceValid(body, err)) {
4672 throw err;
4673 }
4674 const path = getPathFromLink(this.url);
4675 const id = getIdFromLink(this.url);
4676 const response = await this.clientContext.replace({
4677 body,
4678 path,
4679 resourceType: exports.ResourceType.item,
4680 resourceId: id,
4681 options,
4682 partitionKey: this.partitionKey,
4683 });
4684 return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
4685 }
4686 /**
4687 * Delete the item.
4688 *
4689 * Any provided type, T, is not necessarily enforced by the SDK.
4690 * You may get more or less properties and it's up to your logic to enforce it.
4691 *
4692 * @param options - Additional options for the request
4693 */
4694 async delete(options = {}) {
4695 if (this.partitionKey === undefined) {
4696 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
4697 this.partitionKey = undefinedPartitionKey(partitionKeyDefinition);
4698 }
4699 const path = getPathFromLink(this.url);
4700 const id = getIdFromLink(this.url);
4701 const response = await this.clientContext.delete({
4702 path,
4703 resourceType: exports.ResourceType.item,
4704 resourceId: id,
4705 options,
4706 partitionKey: this.partitionKey,
4707 });
4708 return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
4709 }
4710 /**
4711 * Perform a JSONPatch on the item.
4712 *
4713 * Any provided type, T, is not necessarily enforced by the SDK.
4714 * You may get more or less properties and it's up to your logic to enforce it.
4715 *
4716 * @param options - Additional options for the request
4717 */
4718 async patch(body, options = {}) {
4719 if (this.partitionKey === undefined) {
4720 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
4721 this.partitionKey = extractPartitionKey(body, partitionKeyDefinition);
4722 }
4723 const path = getPathFromLink(this.url);
4724 const id = getIdFromLink(this.url);
4725 const response = await this.clientContext.patch({
4726 body,
4727 path,
4728 resourceType: exports.ResourceType.item,
4729 resourceId: id,
4730 options,
4731 partitionKey: this.partitionKey,
4732 });
4733 return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
4734 }
4735}
4736
4737// Copyright (c) Microsoft Corporation.
4738/**
4739 * A single response page from the Azure Cosmos DB Change Feed
4740 */
4741class ChangeFeedResponse {
4742 /**
4743 * @internal
4744 */
4745 constructor(
4746 /**
4747 * Gets the items returned in the response from Azure Cosmos DB
4748 */
4749 result,
4750 /**
4751 * Gets the number of items returned in the response from Azure Cosmos DB
4752 */
4753 count,
4754 /**
4755 * Gets the status code of the response from Azure Cosmos DB
4756 */
4757 statusCode, headers) {
4758 this.result = result;
4759 this.count = count;
4760 this.statusCode = statusCode;
4761 this.headers = Object.freeze(headers);
4762 }
4763 /**
4764 * Gets the request charge for this request from the Azure Cosmos DB service.
4765 */
4766 get requestCharge() {
4767 const rus = this.headers[Constants.HttpHeaders.RequestCharge];
4768 return rus ? parseInt(rus, 10) : null;
4769 }
4770 /**
4771 * Gets the activity ID for the request from the Azure Cosmos DB service.
4772 */
4773 get activityId() {
4774 return this.headers[Constants.HttpHeaders.ActivityId];
4775 }
4776 /**
4777 * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.
4778 *
4779 * This is equivalent to the `etag` property.
4780 */
4781 get continuation() {
4782 return this.etag;
4783 }
4784 /**
4785 * Gets the session token for use in session consistency reads from the Azure Cosmos DB service.
4786 */
4787 get sessionToken() {
4788 return this.headers[Constants.HttpHeaders.SessionToken];
4789 }
4790 /**
4791 * Gets the entity tag associated with last transaction in the Azure Cosmos DB service,
4792 * which can be used as If-Non-Match Access condition for ReadFeed REST request or
4793 * `continuation` property of `ChangeFeedOptions` parameter for
4794 * `Items.changeFeed()`
4795 * to get feed changes since the transaction specified by this entity tag.
4796 *
4797 * This is equivalent to the `continuation` property.
4798 */
4799 get etag() {
4800 return this.headers[Constants.HttpHeaders.ETag];
4801 }
4802}
4803
4804/**
4805 * Provides iterator for change feed.
4806 *
4807 * Use `Items.changeFeed()` to get an instance of the iterator.
4808 */
4809class ChangeFeedIterator {
4810 /**
4811 * @internal
4812 */
4813 constructor(clientContext, resourceId, resourceLink, partitionKey, changeFeedOptions) {
4814 this.clientContext = clientContext;
4815 this.resourceId = resourceId;
4816 this.resourceLink = resourceLink;
4817 this.partitionKey = partitionKey;
4818 this.changeFeedOptions = changeFeedOptions;
4819 // partition key XOR partition key range id
4820 const partitionKeyValid = partitionKey !== undefined;
4821 this.isPartitionSpecified = partitionKeyValid;
4822 let canUseStartFromBeginning = true;
4823 if (changeFeedOptions.continuation) {
4824 this.nextIfNoneMatch = changeFeedOptions.continuation;
4825 canUseStartFromBeginning = false;
4826 }
4827 if (changeFeedOptions.startTime) {
4828 // .toUTCString() is platform specific, but most platforms use RFC 1123.
4829 // In ECMAScript 2018, this was standardized to RFC 1123.
4830 // See for more info: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString
4831 this.ifModifiedSince = changeFeedOptions.startTime.toUTCString();
4832 canUseStartFromBeginning = false;
4833 }
4834 if (canUseStartFromBeginning && !changeFeedOptions.startFromBeginning) {
4835 this.nextIfNoneMatch = ChangeFeedIterator.IfNoneMatchAllHeaderValue;
4836 }
4837 }
4838 /**
4839 * Gets a value indicating whether there are potentially additional results that can be retrieved.
4840 *
4841 * Initially returns true. This value is set based on whether the last execution returned a continuation token.
4842 *
4843 * @returns Boolean value representing if whether there are potentially additional results that can be retrieved.
4844 */
4845 get hasMoreResults() {
4846 return this.lastStatusCode !== StatusCodes.NotModified;
4847 }
4848 /**
4849 * Gets an async iterator which will yield pages of results from Azure Cosmos DB.
4850 */
4851 getAsyncIterator() {
4852 return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() {
4853 do {
4854 const result = yield tslib.__await(this.fetchNext());
4855 if (result.count > 0) {
4856 yield yield tslib.__await(result);
4857 }
4858 } while (this.hasMoreResults);
4859 });
4860 }
4861 /**
4862 * Read feed and retrieves the next page of results in Azure Cosmos DB.
4863 */
4864 async fetchNext() {
4865 const response = await this.getFeedResponse();
4866 this.lastStatusCode = response.statusCode;
4867 this.nextIfNoneMatch = response.headers[Constants.HttpHeaders.ETag];
4868 return response;
4869 }
4870 async getFeedResponse() {
4871 if (!this.isPartitionSpecified) {
4872 throw new Error("Container is partitioned, but no partition key or partition key range id was specified.");
4873 }
4874 const feedOptions = { initialHeaders: {}, useIncrementalFeed: true };
4875 if (typeof this.changeFeedOptions.maxItemCount === "number") {
4876 feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount;
4877 }
4878 if (this.changeFeedOptions.sessionToken) {
4879 feedOptions.sessionToken = this.changeFeedOptions.sessionToken;
4880 }
4881 if (this.nextIfNoneMatch) {
4882 feedOptions.accessCondition = {
4883 type: Constants.HttpHeaders.IfNoneMatch,
4884 condition: this.nextIfNoneMatch,
4885 };
4886 }
4887 if (this.ifModifiedSince) {
4888 feedOptions.initialHeaders[Constants.HttpHeaders.IfModifiedSince] = this.ifModifiedSince;
4889 }
4890 const response = await this.clientContext.queryFeed({
4891 path: this.resourceLink,
4892 resourceType: exports.ResourceType.item,
4893 resourceId: this.resourceId,
4894 resultFn: (result) => (result ? result.Documents : []),
4895 query: undefined,
4896 options: feedOptions,
4897 partitionKey: this.partitionKey,
4898 }); // TODO: some funky issues with query feed. Probably need to change it up.
4899 return new ChangeFeedResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers);
4900 }
4901}
4902ChangeFeedIterator.IfNoneMatchAllHeaderValue = "*";
4903
4904// Copyright (c) Microsoft Corporation.
4905// Licensed under the MIT license.
4906const BytePrefix = {
4907 Undefined: "00",
4908 Null: "01",
4909 False: "02",
4910 True: "03",
4911 MinNumber: "04",
4912 Number: "05",
4913 MaxNumber: "06",
4914 MinString: "07",
4915 String: "08",
4916 MaxString: "09",
4917 Int64: "0a",
4918 Int32: "0b",
4919 Int16: "0c",
4920 Int8: "0d",
4921 Uint64: "0e",
4922 Uint32: "0f",
4923 Uint16: "10",
4924 Uint8: "11",
4925 Binary: "12",
4926 Guid: "13",
4927 Float: "14",
4928 Infinity: "FF",
4929};
4930
4931// Copyright (c) Microsoft Corporation.
4932function writeNumberForBinaryEncodingJSBI(hash) {
4933 let payload = encodeNumberAsUInt64JSBI(hash);
4934 let outputStream = Buffer.from(BytePrefix.Number, "hex");
4935 const firstChunk = JSBI__default["default"].asUintN(64, JSBI__default["default"].signedRightShift(payload, JSBI__default["default"].BigInt(56)));
4936 outputStream = Buffer.concat([outputStream, Buffer.from(firstChunk.toString(16), "hex")]);
4937 payload = JSBI__default["default"].asUintN(64, JSBI__default["default"].leftShift(JSBI__default["default"].BigInt(payload), JSBI__default["default"].BigInt(0x8)));
4938 let byteToWrite = JSBI__default["default"].BigInt(0);
4939 let shifted;
4940 let padded;
4941 do {
4942 {
4943 // we pad because after shifting because we will produce characters like "f" or similar,
4944 // which cannot be encoded as hex in a buffer because they are invalid hex
4945 // https://github.com/nodejs/node/issues/24491
4946 padded = byteToWrite.toString(16).padStart(2, "0");
4947 if (padded !== "00") {
4948 outputStream = Buffer.concat([outputStream, Buffer.from(padded, "hex")]);
4949 }
4950 }
4951 shifted = JSBI__default["default"].asUintN(64, JSBI__default["default"].signedRightShift(payload, JSBI__default["default"].BigInt(56)));
4952 byteToWrite = JSBI__default["default"].asUintN(64, JSBI__default["default"].bitwiseOr(shifted, JSBI__default["default"].BigInt(0x01)));
4953 payload = JSBI__default["default"].asUintN(64, JSBI__default["default"].leftShift(payload, JSBI__default["default"].BigInt(7)));
4954 } while (JSBI__default["default"].notEqual(payload, JSBI__default["default"].BigInt(0)));
4955 const lastChunk = JSBI__default["default"].asUintN(64, JSBI__default["default"].bitwiseAnd(byteToWrite, JSBI__default["default"].BigInt(0xfe)));
4956 // we pad because after shifting because we will produce characters like "f" or similar,
4957 // which cannot be encoded as hex in a buffer because they are invalid hex
4958 // https://github.com/nodejs/node/issues/24491
4959 padded = lastChunk.toString(16).padStart(2, "0");
4960 if (padded !== "00") {
4961 outputStream = Buffer.concat([outputStream, Buffer.from(padded, "hex")]);
4962 }
4963 return outputStream;
4964}
4965function encodeNumberAsUInt64JSBI(value) {
4966 const rawValueBits = getRawBitsJSBI(value);
4967 const mask = JSBI__default["default"].BigInt(0x8000000000000000);
4968 const returned = rawValueBits < mask
4969 ? JSBI__default["default"].bitwiseXor(rawValueBits, mask)
4970 : JSBI__default["default"].add(JSBI__default["default"].bitwiseNot(rawValueBits), JSBI__default["default"].BigInt(1));
4971 return returned;
4972}
4973function doubleToByteArrayJSBI(double) {
4974 const output = Buffer.alloc(8);
4975 const lng = getRawBitsJSBI(double);
4976 for (let i = 0; i < 8; i++) {
4977 output[i] = JSBI__default["default"].toNumber(JSBI__default["default"].bitwiseAnd(JSBI__default["default"].signedRightShift(lng, JSBI__default["default"].multiply(JSBI__default["default"].BigInt(i), JSBI__default["default"].BigInt(8))), JSBI__default["default"].BigInt(0xff)));
4978 }
4979 return output;
4980}
4981function getRawBitsJSBI(value) {
4982 const view = new DataView(new ArrayBuffer(8));
4983 view.setFloat64(0, value);
4984 return JSBI__default["default"].BigInt(`0x${buf2hex(view.buffer)}`);
4985}
4986function buf2hex(buffer) {
4987 return Array.prototype.map
4988 .call(new Uint8Array(buffer), (x) => ("00" + x.toString(16)).slice(-2))
4989 .join("");
4990}
4991
4992// Copyright (c) Microsoft Corporation.
4993function writeStringForBinaryEncoding(payload) {
4994 let outputStream = Buffer.from(BytePrefix.String, "hex");
4995 const MAX_STRING_BYTES_TO_APPEND = 100;
4996 const byteArray = [...Buffer.from(payload)];
4997 const isShortString = payload.length <= MAX_STRING_BYTES_TO_APPEND;
4998 for (let index = 0; index < (isShortString ? byteArray.length : MAX_STRING_BYTES_TO_APPEND + 1); index++) {
4999 let charByte = byteArray[index];
5000 if (charByte < 0xff) {
5001 charByte++;
5002 }
5003 outputStream = Buffer.concat([outputStream, Buffer.from(charByte.toString(16), "hex")]);
5004 }
5005 if (isShortString) {
5006 outputStream = Buffer.concat([outputStream, Buffer.from(BytePrefix.Undefined, "hex")]);
5007 }
5008 return outputStream;
5009}
5010
5011// +----------------------------------------------------------------------+
5012// | murmurHash3js.js v3.0.1 // https://github.com/pid/murmurHash3js
5013// | A javascript implementation of MurmurHash3's x86 hashing algorithms. |
5014// |----------------------------------------------------------------------|
5015// | Copyright (c) 2012-2015 Karan Lyons |
5016// | https://github.com/karanlyons/murmurHash3.js/blob/c1778f75792abef7bdd74bc85d2d4e1a3d25cfe9/murmurHash3.js |
5017// | Freely distributable under the MIT license. |
5018// +----------------------------------------------------------------------+
5019// PRIVATE FUNCTIONS
5020// -----------------
5021function _x86Multiply(m, n) {
5022 //
5023 // Given two 32bit ints, returns the two multiplied together as a
5024 // 32bit int.
5025 //
5026 return (m & 0xffff) * n + ((((m >>> 16) * n) & 0xffff) << 16);
5027}
5028function _x86Rotl(m, n) {
5029 //
5030 // Given a 32bit int and an int representing a number of bit positions,
5031 // returns the 32bit int rotated left by that number of positions.
5032 //
5033 return (m << n) | (m >>> (32 - n));
5034}
5035function _x86Fmix(h) {
5036 //
5037 // Given a block, returns murmurHash3's final x86 mix of that block.
5038 //
5039 h ^= h >>> 16;
5040 h = _x86Multiply(h, 0x85ebca6b);
5041 h ^= h >>> 13;
5042 h = _x86Multiply(h, 0xc2b2ae35);
5043 h ^= h >>> 16;
5044 return h;
5045}
5046function _x64Add(m, n) {
5047 //
5048 // Given two 64bit ints (as an array of two 32bit ints) returns the two
5049 // added together as a 64bit int (as an array of two 32bit ints).
5050 //
5051 m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff];
5052 n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff];
5053 const o = [0, 0, 0, 0];
5054 o[3] += m[3] + n[3];
5055 o[2] += o[3] >>> 16;
5056 o[3] &= 0xffff;
5057 o[2] += m[2] + n[2];
5058 o[1] += o[2] >>> 16;
5059 o[2] &= 0xffff;
5060 o[1] += m[1] + n[1];
5061 o[0] += o[1] >>> 16;
5062 o[1] &= 0xffff;
5063 o[0] += m[0] + n[0];
5064 o[0] &= 0xffff;
5065 return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]];
5066}
5067function _x64Multiply(m, n) {
5068 //
5069 // Given two 64bit ints (as an array of two 32bit ints) returns the two
5070 // multiplied together as a 64bit int (as an array of two 32bit ints).
5071 //
5072 m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff];
5073 n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff];
5074 const o = [0, 0, 0, 0];
5075 o[3] += m[3] * n[3];
5076 o[2] += o[3] >>> 16;
5077 o[3] &= 0xffff;
5078 o[2] += m[2] * n[3];
5079 o[1] += o[2] >>> 16;
5080 o[2] &= 0xffff;
5081 o[2] += m[3] * n[2];
5082 o[1] += o[2] >>> 16;
5083 o[2] &= 0xffff;
5084 o[1] += m[1] * n[3];
5085 o[0] += o[1] >>> 16;
5086 o[1] &= 0xffff;
5087 o[1] += m[2] * n[2];
5088 o[0] += o[1] >>> 16;
5089 o[1] &= 0xffff;
5090 o[1] += m[3] * n[1];
5091 o[0] += o[1] >>> 16;
5092 o[1] &= 0xffff;
5093 o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0];
5094 o[0] &= 0xffff;
5095 return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]];
5096}
5097function _x64Rotl(m, n) {
5098 //
5099 // Given a 64bit int (as an array of two 32bit ints) and an int
5100 // representing a number of bit positions, returns the 64bit int (as an
5101 // array of two 32bit ints) rotated left by that number of positions.
5102 //
5103 n %= 64;
5104 if (n === 32) {
5105 return [m[1], m[0]];
5106 }
5107 else if (n < 32) {
5108 return [(m[0] << n) | (m[1] >>> (32 - n)), (m[1] << n) | (m[0] >>> (32 - n))];
5109 }
5110 else {
5111 n -= 32;
5112 return [(m[1] << n) | (m[0] >>> (32 - n)), (m[0] << n) | (m[1] >>> (32 - n))];
5113 }
5114}
5115function _x64LeftShift(m, n) {
5116 //
5117 // Given a 64bit int (as an array of two 32bit ints) and an int
5118 // representing a number of bit positions, returns the 64bit int (as an
5119 // array of two 32bit ints) shifted left by that number of positions.
5120 //
5121 n %= 64;
5122 if (n === 0) {
5123 return m;
5124 }
5125 else if (n < 32) {
5126 return [(m[0] << n) | (m[1] >>> (32 - n)), m[1] << n];
5127 }
5128 else {
5129 return [m[1] << (n - 32), 0];
5130 }
5131}
5132function _x64Xor(m, n) {
5133 //
5134 // Given two 64bit ints (as an array of two 32bit ints) returns the two
5135 // xored together as a 64bit int (as an array of two 32bit ints).
5136 //
5137 return [m[0] ^ n[0], m[1] ^ n[1]];
5138}
5139function _x64Fmix(h) {
5140 //
5141 // Given a block, returns murmurHash3's final x64 mix of that block.
5142 // (`[0, h[0] >>> 1]` is a 33 bit unsigned right shift. This is the
5143 // only place where we need to right shift 64bit ints.)
5144 //
5145 h = _x64Xor(h, [0, h[0] >>> 1]);
5146 h = _x64Multiply(h, [0xff51afd7, 0xed558ccd]);
5147 h = _x64Xor(h, [0, h[0] >>> 1]);
5148 h = _x64Multiply(h, [0xc4ceb9fe, 0x1a85ec53]);
5149 h = _x64Xor(h, [0, h[0] >>> 1]);
5150 return h;
5151}
5152// PUBLIC FUNCTIONS
5153// ----------------
5154function x86Hash32(bytes, seed) {
5155 //
5156 // Given a string and an optional seed as an int, returns a 32 bit hash
5157 // using the x86 flavor of MurmurHash3, as an unsigned int.
5158 //
5159 seed = seed || 0;
5160 const remainder = bytes.length % 4;
5161 const blocks = bytes.length - remainder;
5162 let h1 = seed;
5163 let k1 = 0;
5164 const c1 = 0xcc9e2d51;
5165 const c2 = 0x1b873593;
5166 let j = 0;
5167 for (let i = 0; i < blocks; i = i + 4) {
5168 k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24);
5169 k1 = _x86Multiply(k1, c1);
5170 k1 = _x86Rotl(k1, 15);
5171 k1 = _x86Multiply(k1, c2);
5172 h1 ^= k1;
5173 h1 = _x86Rotl(h1, 13);
5174 h1 = _x86Multiply(h1, 5) + 0xe6546b64;
5175 j = i + 4;
5176 }
5177 k1 = 0;
5178 switch (remainder) {
5179 case 3:
5180 k1 ^= bytes[j + 2] << 16;
5181 case 2:
5182 k1 ^= bytes[j + 1] << 8;
5183 case 1:
5184 k1 ^= bytes[j];
5185 k1 = _x86Multiply(k1, c1);
5186 k1 = _x86Rotl(k1, 15);
5187 k1 = _x86Multiply(k1, c2);
5188 h1 ^= k1;
5189 }
5190 h1 ^= bytes.length;
5191 h1 = _x86Fmix(h1);
5192 return h1 >>> 0;
5193}
5194function x86Hash128(bytes, seed) {
5195 //
5196 // Given a string and an optional seed as an int, returns a 128 bit
5197 // hash using the x86 flavor of MurmurHash3, as an unsigned hex.
5198 //
5199 seed = seed || 0;
5200 const remainder = bytes.length % 16;
5201 const blocks = bytes.length - remainder;
5202 let h1 = seed;
5203 let h2 = seed;
5204 let h3 = seed;
5205 let h4 = seed;
5206 let k1 = 0;
5207 let k2 = 0;
5208 let k3 = 0;
5209 let k4 = 0;
5210 const c1 = 0x239b961b;
5211 const c2 = 0xab0e9789;
5212 const c3 = 0x38b34ae5;
5213 const c4 = 0xa1e38b93;
5214 let j = 0;
5215 for (let i = 0; i < blocks; i = i + 16) {
5216 k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24);
5217 k2 = bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24);
5218 k3 = bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24);
5219 k4 = bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24);
5220 k1 = _x86Multiply(k1, c1);
5221 k1 = _x86Rotl(k1, 15);
5222 k1 = _x86Multiply(k1, c2);
5223 h1 ^= k1;
5224 h1 = _x86Rotl(h1, 19);
5225 h1 += h2;
5226 h1 = _x86Multiply(h1, 5) + 0x561ccd1b;
5227 k2 = _x86Multiply(k2, c2);
5228 k2 = _x86Rotl(k2, 16);
5229 k2 = _x86Multiply(k2, c3);
5230 h2 ^= k2;
5231 h2 = _x86Rotl(h2, 17);
5232 h2 += h3;
5233 h2 = _x86Multiply(h2, 5) + 0x0bcaa747;
5234 k3 = _x86Multiply(k3, c3);
5235 k3 = _x86Rotl(k3, 17);
5236 k3 = _x86Multiply(k3, c4);
5237 h3 ^= k3;
5238 h3 = _x86Rotl(h3, 15);
5239 h3 += h4;
5240 h3 = _x86Multiply(h3, 5) + 0x96cd1c35;
5241 k4 = _x86Multiply(k4, c4);
5242 k4 = _x86Rotl(k4, 18);
5243 k4 = _x86Multiply(k4, c1);
5244 h4 ^= k4;
5245 h4 = _x86Rotl(h4, 13);
5246 h4 += h1;
5247 h4 = _x86Multiply(h4, 5) + 0x32ac3b17;
5248 j = i + 16;
5249 }
5250 k1 = 0;
5251 k2 = 0;
5252 k3 = 0;
5253 k4 = 0;
5254 switch (remainder) {
5255 case 15:
5256 k4 ^= bytes[j + 14] << 16;
5257 case 14:
5258 k4 ^= bytes[j + 13] << 8;
5259 case 13:
5260 k4 ^= bytes[j + 12];
5261 k4 = _x86Multiply(k4, c4);
5262 k4 = _x86Rotl(k4, 18);
5263 k4 = _x86Multiply(k4, c1);
5264 h4 ^= k4;
5265 case 12:
5266 k3 ^= bytes[j + 11] << 24;
5267 case 11:
5268 k3 ^= bytes[j + 10] << 16;
5269 case 10:
5270 k3 ^= bytes[j + 9] << 8;
5271 case 9:
5272 k3 ^= bytes[j + 8];
5273 k3 = _x86Multiply(k3, c3);
5274 k3 = _x86Rotl(k3, 17);
5275 k3 = _x86Multiply(k3, c4);
5276 h3 ^= k3;
5277 case 8:
5278 k2 ^= bytes[j + 7] << 24;
5279 case 7:
5280 k2 ^= bytes[j + 6] << 16;
5281 case 6:
5282 k2 ^= bytes[j + 5] << 8;
5283 case 5:
5284 k2 ^= bytes[j + 4];
5285 k2 = _x86Multiply(k2, c2);
5286 k2 = _x86Rotl(k2, 16);
5287 k2 = _x86Multiply(k2, c3);
5288 h2 ^= k2;
5289 case 4:
5290 k1 ^= bytes[j + 3] << 24;
5291 case 3:
5292 k1 ^= bytes[j + 2] << 16;
5293 case 2:
5294 k1 ^= bytes[j + 1] << 8;
5295 case 1:
5296 k1 ^= bytes[j];
5297 k1 = _x86Multiply(k1, c1);
5298 k1 = _x86Rotl(k1, 15);
5299 k1 = _x86Multiply(k1, c2);
5300 h1 ^= k1;
5301 }
5302 h1 ^= bytes.length;
5303 h2 ^= bytes.length;
5304 h3 ^= bytes.length;
5305 h4 ^= bytes.length;
5306 h1 += h2;
5307 h1 += h3;
5308 h1 += h4;
5309 h2 += h1;
5310 h3 += h1;
5311 h4 += h1;
5312 h1 = _x86Fmix(h1);
5313 h2 = _x86Fmix(h2);
5314 h3 = _x86Fmix(h3);
5315 h4 = _x86Fmix(h4);
5316 h1 += h2;
5317 h1 += h3;
5318 h1 += h4;
5319 h2 += h1;
5320 h3 += h1;
5321 h4 += h1;
5322 return (("00000000" + (h1 >>> 0).toString(16)).slice(-8) +
5323 ("00000000" + (h2 >>> 0).toString(16)).slice(-8) +
5324 ("00000000" + (h3 >>> 0).toString(16)).slice(-8) +
5325 ("00000000" + (h4 >>> 0).toString(16)).slice(-8));
5326}
5327function x64Hash128(bytes, seed) {
5328 //
5329 // Given a string and an optional seed as an int, returns a 128 bit
5330 // hash using the x64 flavor of MurmurHash3, as an unsigned hex.
5331 //
5332 seed = seed || 0;
5333 const remainder = bytes.length % 16;
5334 const blocks = bytes.length - remainder;
5335 let h1 = [0, seed];
5336 let h2 = [0, seed];
5337 let k1 = [0, 0];
5338 let k2 = [0, 0];
5339 const c1 = [0x87c37b91, 0x114253d5];
5340 const c2 = [0x4cf5ad43, 0x2745937f];
5341 let j = 0;
5342 for (let i = 0; i < blocks; i = i + 16) {
5343 k1 = [
5344 bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24),
5345 bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24),
5346 ];
5347 k2 = [
5348 bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24),
5349 bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24),
5350 ];
5351 k1 = _x64Multiply(k1, c1);
5352 k1 = _x64Rotl(k1, 31);
5353 k1 = _x64Multiply(k1, c2);
5354 h1 = _x64Xor(h1, k1);
5355 h1 = _x64Rotl(h1, 27);
5356 h1 = _x64Add(h1, h2);
5357 h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 0x52dce729]);
5358 k2 = _x64Multiply(k2, c2);
5359 k2 = _x64Rotl(k2, 33);
5360 k2 = _x64Multiply(k2, c1);
5361 h2 = _x64Xor(h2, k2);
5362 h2 = _x64Rotl(h2, 31);
5363 h2 = _x64Add(h2, h1);
5364 h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 0x38495ab5]);
5365 j = i + 16;
5366 }
5367 k1 = [0, 0];
5368 k2 = [0, 0];
5369 switch (remainder) {
5370 case 15:
5371 k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 14]], 48));
5372 case 14:
5373 k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 13]], 40));
5374 case 13:
5375 k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 12]], 32));
5376 case 12:
5377 k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 11]], 24));
5378 case 11:
5379 k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 10]], 16));
5380 case 10:
5381 k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 9]], 8));
5382 case 9:
5383 k2 = _x64Xor(k2, [0, bytes[j + 8]]);
5384 k2 = _x64Multiply(k2, c2);
5385 k2 = _x64Rotl(k2, 33);
5386 k2 = _x64Multiply(k2, c1);
5387 h2 = _x64Xor(h2, k2);
5388 case 8:
5389 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 7]], 56));
5390 case 7:
5391 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 6]], 48));
5392 case 6:
5393 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 5]], 40));
5394 case 5:
5395 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 4]], 32));
5396 case 4:
5397 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 3]], 24));
5398 case 3:
5399 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 2]], 16));
5400 case 2:
5401 k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 1]], 8));
5402 case 1:
5403 k1 = _x64Xor(k1, [0, bytes[j]]);
5404 k1 = _x64Multiply(k1, c1);
5405 k1 = _x64Rotl(k1, 31);
5406 k1 = _x64Multiply(k1, c2);
5407 h1 = _x64Xor(h1, k1);
5408 }
5409 h1 = _x64Xor(h1, [0, bytes.length]);
5410 h2 = _x64Xor(h2, [0, bytes.length]);
5411 h1 = _x64Add(h1, h2);
5412 h2 = _x64Add(h2, h1);
5413 h1 = _x64Fmix(h1);
5414 h2 = _x64Fmix(h2);
5415 h1 = _x64Add(h1, h2);
5416 h2 = _x64Add(h2, h1);
5417 // Here we reverse h1 and h2 in Cosmos
5418 // This is an implementation detail and not part of the public spec
5419 const h1Buff = Buffer.from(("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) +
5420 ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8), "hex");
5421 const h1Reversed = reverse$1(h1Buff).toString("hex");
5422 const h2Buff = Buffer.from(("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) +
5423 ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8), "hex");
5424 const h2Reversed = reverse$1(h2Buff).toString("hex");
5425 return h1Reversed + h2Reversed;
5426}
5427function reverse$1(buff) {
5428 const buffer = Buffer.allocUnsafe(buff.length);
5429 for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) {
5430 buffer[i] = buff[j];
5431 buffer[j] = buff[i];
5432 }
5433 return buffer;
5434}
5435var MurmurHash = {
5436 version: "3.0.0",
5437 x86: {
5438 hash32: x86Hash32,
5439 hash128: x86Hash128,
5440 },
5441 x64: {
5442 hash128: x64Hash128,
5443 },
5444 inputValidation: true,
5445};
5446
5447// Copyright (c) Microsoft Corporation.
5448const MAX_STRING_CHARS = 100;
5449function hashV1PartitionKey(partitionKey) {
5450 const toHash = prefixKeyByType$1(partitionKey);
5451 const hash = MurmurHash.x86.hash32(toHash);
5452 const encodedJSBI = writeNumberForBinaryEncodingJSBI(hash);
5453 const encodedValue = encodeByType(partitionKey);
5454 return Buffer.concat([encodedJSBI, encodedValue]).toString("hex").toUpperCase();
5455}
5456function prefixKeyByType$1(key) {
5457 let bytes;
5458 switch (typeof key) {
5459 case "string": {
5460 const truncated = key.substr(0, MAX_STRING_CHARS);
5461 bytes = Buffer.concat([
5462 Buffer.from(BytePrefix.String, "hex"),
5463 Buffer.from(truncated),
5464 Buffer.from(BytePrefix.Undefined, "hex"),
5465 ]);
5466 return bytes;
5467 }
5468 case "number": {
5469 const numberBytes = doubleToByteArrayJSBI(key);
5470 bytes = Buffer.concat([Buffer.from(BytePrefix.Number, "hex"), numberBytes]);
5471 return bytes;
5472 }
5473 case "boolean": {
5474 const prefix = key ? BytePrefix.True : BytePrefix.False;
5475 return Buffer.from(prefix, "hex");
5476 }
5477 case "object": {
5478 if (key === null) {
5479 return Buffer.from(BytePrefix.Null, "hex");
5480 }
5481 return Buffer.from(BytePrefix.Undefined, "hex");
5482 }
5483 case "undefined": {
5484 return Buffer.from(BytePrefix.Undefined, "hex");
5485 }
5486 default:
5487 throw new Error(`Unexpected type: ${typeof key}`);
5488 }
5489}
5490function encodeByType(key) {
5491 switch (typeof key) {
5492 case "string": {
5493 const truncated = key.substr(0, MAX_STRING_CHARS);
5494 return writeStringForBinaryEncoding(truncated);
5495 }
5496 case "number": {
5497 const encodedJSBI = writeNumberForBinaryEncodingJSBI(key);
5498 return encodedJSBI;
5499 }
5500 case "boolean": {
5501 const prefix = key ? BytePrefix.True : BytePrefix.False;
5502 return Buffer.from(prefix, "hex");
5503 }
5504 case "object":
5505 if (key === null) {
5506 return Buffer.from(BytePrefix.Null, "hex");
5507 }
5508 return Buffer.from(BytePrefix.Undefined, "hex");
5509 case "undefined":
5510 return Buffer.from(BytePrefix.Undefined, "hex");
5511 default:
5512 throw new Error(`Unexpected type: ${typeof key}`);
5513 }
5514}
5515
5516// Copyright (c) Microsoft Corporation.
5517function hashV2PartitionKey(partitionKey) {
5518 const toHash = prefixKeyByType(partitionKey);
5519 const hash = MurmurHash.x64.hash128(toHash);
5520 const reverseBuff = reverse(Buffer.from(hash, "hex"));
5521 reverseBuff[0] &= 0x3f;
5522 return reverseBuff.toString("hex").toUpperCase();
5523}
5524function prefixKeyByType(key) {
5525 let bytes;
5526 switch (typeof key) {
5527 case "string": {
5528 bytes = Buffer.concat([
5529 Buffer.from(BytePrefix.String, "hex"),
5530 Buffer.from(key),
5531 Buffer.from(BytePrefix.Infinity, "hex"),
5532 ]);
5533 return bytes;
5534 }
5535 case "number": {
5536 const numberBytes = doubleToByteArrayJSBI(key);
5537 bytes = Buffer.concat([Buffer.from(BytePrefix.Number, "hex"), numberBytes]);
5538 return bytes;
5539 }
5540 case "boolean": {
5541 const prefix = key ? BytePrefix.True : BytePrefix.False;
5542 return Buffer.from(prefix, "hex");
5543 }
5544 case "object": {
5545 if (key === null) {
5546 return Buffer.from(BytePrefix.Null, "hex");
5547 }
5548 return Buffer.from(BytePrefix.Undefined, "hex");
5549 }
5550 case "undefined": {
5551 return Buffer.from(BytePrefix.Undefined, "hex");
5552 }
5553 default:
5554 throw new Error(`Unexpected type: ${typeof key}`);
5555 }
5556}
5557function reverse(buff) {
5558 const buffer = Buffer.allocUnsafe(buff.length);
5559 for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) {
5560 buffer[i] = buff[j];
5561 buffer[j] = buff[i];
5562 }
5563 return buffer;
5564}
5565
5566// Copyright (c) Microsoft Corporation.
5567const uuid$1 = uuid$3.v4;
5568/**
5569 * @hidden
5570 */
5571function isChangeFeedOptions(options) {
5572 const optionsType = typeof options;
5573 return (options && !(optionsType === "string" || optionsType === "boolean" || optionsType === "number"));
5574}
5575/**
5576 * Operations for creating new items, and reading/querying all items
5577 *
5578 * @see {@link Item} for reading, replacing, or deleting an existing container; use `.item(id)`.
5579 */
5580class Items {
5581 /**
5582 * Create an instance of {@link Items} linked to the parent {@link Container}.
5583 * @param container - The parent container.
5584 * @hidden
5585 */
5586 constructor(container, clientContext) {
5587 this.container = container;
5588 this.clientContext = clientContext;
5589 }
5590 query(query, options = {}) {
5591 const path = getPathFromLink(this.container.url, exports.ResourceType.item);
5592 const id = getIdFromLink(this.container.url);
5593 const fetchFunction = (innerOptions) => {
5594 return this.clientContext.queryFeed({
5595 path,
5596 resourceType: exports.ResourceType.item,
5597 resourceId: id,
5598 resultFn: (result) => (result ? result.Documents : []),
5599 query,
5600 options: innerOptions,
5601 partitionKey: options.partitionKey,
5602 });
5603 };
5604 return new QueryIterator(this.clientContext, query, options, fetchFunction, this.container.url, exports.ResourceType.item);
5605 }
5606 readChangeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions) {
5607 if (isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) {
5608 return this.changeFeed(partitionKeyOrChangeFeedOptions);
5609 }
5610 else {
5611 return this.changeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions);
5612 }
5613 }
5614 changeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions) {
5615 let partitionKey;
5616 if (!changeFeedOptions && isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) {
5617 partitionKey = undefined;
5618 changeFeedOptions = partitionKeyOrChangeFeedOptions;
5619 }
5620 else if (partitionKeyOrChangeFeedOptions !== undefined &&
5621 !isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) {
5622 partitionKey = partitionKeyOrChangeFeedOptions;
5623 }
5624 if (!changeFeedOptions) {
5625 changeFeedOptions = {};
5626 }
5627 const path = getPathFromLink(this.container.url, exports.ResourceType.item);
5628 const id = getIdFromLink(this.container.url);
5629 return new ChangeFeedIterator(this.clientContext, id, path, partitionKey, changeFeedOptions);
5630 }
5631 readAll(options) {
5632 return this.query("SELECT * from c", options);
5633 }
5634 /**
5635 * Create an item.
5636 *
5637 * Any provided type, T, is not necessarily enforced by the SDK.
5638 * You may get more or less properties and it's up to your logic to enforce it.
5639 *
5640 * There is no set schema for JSON items. They may contain any number of custom properties.
5641 *
5642 * @param body - Represents the body of the item. Can contain any number of user defined properties.
5643 * @param options - Used for modifying the request (for instance, specifying the partition key).
5644 */
5645 async create(body, options = {}) {
5646 // Generate random document id if the id is missing in the payload and
5647 // options.disableAutomaticIdGeneration != true
5648 if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) {
5649 body.id = uuid$1();
5650 }
5651 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
5652 const partitionKey = extractPartitionKey(body, partitionKeyDefinition);
5653 const err = {};
5654 if (!isItemResourceValid(body, err)) {
5655 throw err;
5656 }
5657 const path = getPathFromLink(this.container.url, exports.ResourceType.item);
5658 const id = getIdFromLink(this.container.url);
5659 const response = await this.clientContext.create({
5660 body,
5661 path,
5662 resourceType: exports.ResourceType.item,
5663 resourceId: id,
5664 options,
5665 partitionKey,
5666 });
5667 const ref = new Item(this.container, response.result.id, partitionKey, this.clientContext);
5668 return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref);
5669 }
5670 async upsert(body, options = {}) {
5671 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
5672 const partitionKey = extractPartitionKey(body, partitionKeyDefinition);
5673 // Generate random document id if the id is missing in the payload and
5674 // options.disableAutomaticIdGeneration != true
5675 if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) {
5676 body.id = uuid$1();
5677 }
5678 const err = {};
5679 if (!isItemResourceValid(body, err)) {
5680 throw err;
5681 }
5682 const path = getPathFromLink(this.container.url, exports.ResourceType.item);
5683 const id = getIdFromLink(this.container.url);
5684 const response = await this.clientContext.upsert({
5685 body,
5686 path,
5687 resourceType: exports.ResourceType.item,
5688 resourceId: id,
5689 options,
5690 partitionKey,
5691 });
5692 const ref = new Item(this.container, response.result.id, partitionKey, this.clientContext);
5693 return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref);
5694 }
5695 /**
5696 * Execute bulk operations on items.
5697 *
5698 * Bulk takes an array of Operations which are typed based on what the operation does.
5699 * The choices are: Create, Upsert, Read, Replace, and Delete
5700 *
5701 * Usage example:
5702 * ```typescript
5703 * // partitionKey is optional at the top level if present in the resourceBody
5704 * const operations: OperationInput[] = [
5705 * {
5706 * operationType: "Create",
5707 * resourceBody: { id: "doc1", name: "sample", key: "A" }
5708 * },
5709 * {
5710 * operationType: "Upsert",
5711 * partitionKey: 'A',
5712 * resourceBody: { id: "doc2", name: "other", key: "A" }
5713 * }
5714 * ]
5715 *
5716 * await database.container.items.bulk(operations)
5717 * ```
5718 *
5719 * @param operations - List of operations. Limit 100
5720 * @param bulkOptions - Optional options object to modify bulk behavior. Pass \{ continueOnError: true \} to continue executing operations when one fails. (Defaults to false) ** NOTE: THIS WILL DEFAULT TO TRUE IN THE 4.0 RELEASE
5721 * @param options - Used for modifying the request.
5722 */
5723 async bulk(operations, bulkOptions, options) {
5724 const { resources: partitionKeyRanges } = await this.container
5725 .readPartitionKeyRanges()
5726 .fetchAll();
5727 const { resource: definition } = await this.container.getPartitionKeyDefinition();
5728 const batches = partitionKeyRanges.map((keyRange) => {
5729 return {
5730 min: keyRange.minInclusive,
5731 max: keyRange.maxExclusive,
5732 rangeId: keyRange.id,
5733 indexes: [],
5734 operations: [],
5735 };
5736 });
5737 operations
5738 .map((operation) => decorateOperation(operation, definition, options))
5739 .forEach((operation, index) => {
5740 const partitionProp = definition.paths[0].replace("/", "");
5741 const isV2 = definition.version && definition.version === 2;
5742 const toHashKey = getPartitionKeyToHash(operation, partitionProp);
5743 const hashed = isV2 ? hashV2PartitionKey(toHashKey) : hashV1PartitionKey(toHashKey);
5744 const batchForKey = batches.find((batch) => {
5745 return isKeyInRange(batch.min, batch.max, hashed);
5746 });
5747 batchForKey.operations.push(operation);
5748 batchForKey.indexes.push(index);
5749 });
5750 const path = getPathFromLink(this.container.url, exports.ResourceType.item);
5751 const orderedResponses = [];
5752 await Promise.all(batches
5753 .filter((batch) => batch.operations.length)
5754 .flatMap((batch) => splitBatchBasedOnBodySize(batch))
5755 .map(async (batch) => {
5756 if (batch.operations.length > 100) {
5757 throw new Error("Cannot run bulk request with more than 100 operations per partition");
5758 }
5759 try {
5760 const response = await this.clientContext.bulk({
5761 body: batch.operations,
5762 partitionKeyRangeId: batch.rangeId,
5763 path,
5764 resourceId: this.container.url,
5765 bulkOptions,
5766 options,
5767 });
5768 response.result.forEach((operationResponse, index) => {
5769 orderedResponses[batch.indexes[index]] = operationResponse;
5770 });
5771 }
5772 catch (err) {
5773 // In the case of 410 errors, we need to recompute the partition key ranges
5774 // and redo the batch request, however, 410 errors occur for unsupported
5775 // partition key types as well since we don't support them, so for now we throw
5776 if (err.code === 410) {
5777 throw new Error("Partition key error. Either the partitions have split or an operation has an unsupported partitionKey type");
5778 }
5779 throw new Error(`Bulk request errored with: ${err.message}`);
5780 }
5781 }));
5782 return orderedResponses;
5783 }
5784 /**
5785 * Execute transactional batch operations on items.
5786 *
5787 * Batch takes an array of Operations which are typed based on what the operation does. Batch is transactional and will rollback all operations if one fails.
5788 * The choices are: Create, Upsert, Read, Replace, and Delete
5789 *
5790 * Usage example:
5791 * ```typescript
5792 * // partitionKey is required as a second argument to batch, but defaults to the default partition key
5793 * const operations: OperationInput[] = [
5794 * {
5795 * operationType: "Create",
5796 * resourceBody: { id: "doc1", name: "sample", key: "A" }
5797 * },
5798 * {
5799 * operationType: "Upsert",
5800 * partitionKey: 'A',
5801 * resourceBody: { id: "doc2", name: "other", key: "A" }
5802 * }
5803 * ]
5804 *
5805 * await database.container.items.batch(operations)
5806 * ```
5807 *
5808 * @param operations - List of operations. Limit 100
5809 * @param options - Used for modifying the request
5810 */
5811 async batch(operations, partitionKey = "[{}]", options) {
5812 operations.map((operation) => decorateBatchOperation(operation, options));
5813 const path = getPathFromLink(this.container.url, exports.ResourceType.item);
5814 if (operations.length > 100) {
5815 throw new Error("Cannot run batch request with more than 100 operations per partition");
5816 }
5817 try {
5818 const response = await this.clientContext.batch({
5819 body: operations,
5820 partitionKey,
5821 path,
5822 resourceId: this.container.url,
5823 options,
5824 });
5825 return response;
5826 }
5827 catch (err) {
5828 throw new Error(`Batch request error: ${err.message}`);
5829 }
5830 }
5831}
5832
5833class StoredProcedureResponse extends ResourceResponse {
5834 constructor(resource, headers, statusCode, storedProcedure) {
5835 super(resource, headers, statusCode);
5836 this.storedProcedure = storedProcedure;
5837 }
5838 /**
5839 * Alias for storedProcedure.
5840 *
5841 * A reference to the {@link StoredProcedure} which the {@link StoredProcedureDefinition} corresponds to.
5842 */
5843 get sproc() {
5844 return this.storedProcedure;
5845 }
5846}
5847
5848/**
5849 * Operations for reading, replacing, deleting, or executing a specific, existing stored procedure by id.
5850 *
5851 * For operations to create, read all, or query Stored Procedures,
5852 */
5853class StoredProcedure {
5854 /**
5855 * Creates a new instance of {@link StoredProcedure} linked to the parent {@link Container}.
5856 * @param container - The parent {@link Container}.
5857 * @param id - The id of the given {@link StoredProcedure}.
5858 * @hidden
5859 */
5860 constructor(container, id, clientContext) {
5861 this.container = container;
5862 this.id = id;
5863 this.clientContext = clientContext;
5864 }
5865 /**
5866 * Returns a reference URL to the resource. Used for linking in Permissions.
5867 */
5868 get url() {
5869 return createStoredProcedureUri(this.container.database.id, this.container.id, this.id);
5870 }
5871 /**
5872 * Read the {@link StoredProcedureDefinition} for the given {@link StoredProcedure}.
5873 */
5874 async read(options) {
5875 const path = getPathFromLink(this.url);
5876 const id = getIdFromLink(this.url);
5877 const response = await this.clientContext.read({
5878 path,
5879 resourceType: exports.ResourceType.sproc,
5880 resourceId: id,
5881 options,
5882 });
5883 return new StoredProcedureResponse(response.result, response.headers, response.code, this);
5884 }
5885 /**
5886 * Replace the given {@link StoredProcedure} with the specified {@link StoredProcedureDefinition}.
5887 * @param body - The specified {@link StoredProcedureDefinition} to replace the existing definition.
5888 */
5889 async replace(body, options) {
5890 if (body.body) {
5891 body.body = body.body.toString();
5892 }
5893 const err = {};
5894 if (!isResourceValid(body, err)) {
5895 throw err;
5896 }
5897 const path = getPathFromLink(this.url);
5898 const id = getIdFromLink(this.url);
5899 const response = await this.clientContext.replace({
5900 body,
5901 path,
5902 resourceType: exports.ResourceType.sproc,
5903 resourceId: id,
5904 options,
5905 });
5906 return new StoredProcedureResponse(response.result, response.headers, response.code, this);
5907 }
5908 /**
5909 * Delete the given {@link StoredProcedure}.
5910 */
5911 async delete(options) {
5912 const path = getPathFromLink(this.url);
5913 const id = getIdFromLink(this.url);
5914 const response = await this.clientContext.delete({
5915 path,
5916 resourceType: exports.ResourceType.sproc,
5917 resourceId: id,
5918 options,
5919 });
5920 return new StoredProcedureResponse(response.result, response.headers, response.code, this);
5921 }
5922 /**
5923 * Execute the given {@link StoredProcedure}.
5924 *
5925 * The specified type, T, is not enforced by the client.
5926 * Be sure to validate the response from the stored procedure matches the type, T, you provide.
5927 *
5928 * @param partitionKey - The partition key to use when executing the stored procedure
5929 * @param params - Array of parameters to pass as arguments to the given {@link StoredProcedure}.
5930 * @param options - Additional options, such as the partition key to invoke the {@link StoredProcedure} on.
5931 */
5932 async execute(partitionKey, params, options) {
5933 if (partitionKey === undefined) {
5934 const { resource: partitionKeyDefinition } = await this.container.readPartitionKeyDefinition();
5935 partitionKey = undefinedPartitionKey(partitionKeyDefinition);
5936 }
5937 const response = await this.clientContext.execute({
5938 sprocLink: this.url,
5939 params,
5940 options,
5941 partitionKey,
5942 });
5943 return new ResourceResponse(response.result, response.headers, response.code);
5944 }
5945}
5946
5947/**
5948 * Operations for creating, upserting, or reading/querying all Stored Procedures.
5949 *
5950 * For operations to read, replace, delete, or execute a specific, existing stored procedure by id, see `container.storedProcedure()`.
5951 */
5952class StoredProcedures {
5953 /**
5954 * @param container - The parent {@link Container}.
5955 * @hidden
5956 */
5957 constructor(container, clientContext) {
5958 this.container = container;
5959 this.clientContext = clientContext;
5960 }
5961 query(query, options) {
5962 const path = getPathFromLink(this.container.url, exports.ResourceType.sproc);
5963 const id = getIdFromLink(this.container.url);
5964 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
5965 return this.clientContext.queryFeed({
5966 path,
5967 resourceType: exports.ResourceType.sproc,
5968 resourceId: id,
5969 resultFn: (result) => result.StoredProcedures,
5970 query,
5971 options: innerOptions,
5972 });
5973 });
5974 }
5975 /**
5976 * Read all stored procedures.
5977 * @example Read all stored procedures to array.
5978 * ```typescript
5979 * const {body: sprocList} = await containers.storedProcedures.readAll().fetchAll();
5980 * ```
5981 */
5982 readAll(options) {
5983 return this.query(undefined, options);
5984 }
5985 /**
5986 * Create a StoredProcedure.
5987 *
5988 * Azure Cosmos DB allows stored procedures to be executed in the storage tier,
5989 * directly against an item container. The script
5990 * gets executed under ACID transactions on the primary storage partition of the
5991 * specified container. For additional details,
5992 * refer to the server-side JavaScript API documentation.
5993 */
5994 async create(body, options) {
5995 if (body.body) {
5996 body.body = body.body.toString();
5997 }
5998 const err = {};
5999 if (!isResourceValid(body, err)) {
6000 throw err;
6001 }
6002 const path = getPathFromLink(this.container.url, exports.ResourceType.sproc);
6003 const id = getIdFromLink(this.container.url);
6004 const response = await this.clientContext.create({
6005 body,
6006 path,
6007 resourceType: exports.ResourceType.sproc,
6008 resourceId: id,
6009 options,
6010 });
6011 const ref = new StoredProcedure(this.container, response.result.id, this.clientContext);
6012 return new StoredProcedureResponse(response.result, response.headers, response.code, ref);
6013 }
6014}
6015
6016class TriggerResponse extends ResourceResponse {
6017 constructor(resource, headers, statusCode, trigger) {
6018 super(resource, headers, statusCode);
6019 this.trigger = trigger;
6020 }
6021}
6022
6023/**
6024 * Operations to read, replace, or delete a {@link Trigger}.
6025 *
6026 * Use `container.triggers` to create, upsert, query, or read all.
6027 */
6028class Trigger {
6029 /**
6030 * @hidden
6031 * @param container - The parent {@link Container}.
6032 * @param id - The id of the given {@link Trigger}.
6033 */
6034 constructor(container, id, clientContext) {
6035 this.container = container;
6036 this.id = id;
6037 this.clientContext = clientContext;
6038 }
6039 /**
6040 * Returns a reference URL to the resource. Used for linking in Permissions.
6041 */
6042 get url() {
6043 return createTriggerUri(this.container.database.id, this.container.id, this.id);
6044 }
6045 /**
6046 * Read the {@link TriggerDefinition} for the given {@link Trigger}.
6047 */
6048 async read(options) {
6049 const path = getPathFromLink(this.url);
6050 const id = getIdFromLink(this.url);
6051 const response = await this.clientContext.read({
6052 path,
6053 resourceType: exports.ResourceType.trigger,
6054 resourceId: id,
6055 options,
6056 });
6057 return new TriggerResponse(response.result, response.headers, response.code, this);
6058 }
6059 /**
6060 * Replace the given {@link Trigger} with the specified {@link TriggerDefinition}.
6061 * @param body - The specified {@link TriggerDefinition} to replace the existing definition with.
6062 */
6063 async replace(body, options) {
6064 if (body.body) {
6065 body.body = body.body.toString();
6066 }
6067 const err = {};
6068 if (!isResourceValid(body, err)) {
6069 throw err;
6070 }
6071 const path = getPathFromLink(this.url);
6072 const id = getIdFromLink(this.url);
6073 const response = await this.clientContext.replace({
6074 body,
6075 path,
6076 resourceType: exports.ResourceType.trigger,
6077 resourceId: id,
6078 options,
6079 });
6080 return new TriggerResponse(response.result, response.headers, response.code, this);
6081 }
6082 /**
6083 * Delete the given {@link Trigger}.
6084 */
6085 async delete(options) {
6086 const path = getPathFromLink(this.url);
6087 const id = getIdFromLink(this.url);
6088 const response = await this.clientContext.delete({
6089 path,
6090 resourceType: exports.ResourceType.trigger,
6091 resourceId: id,
6092 options,
6093 });
6094 return new TriggerResponse(response.result, response.headers, response.code, this);
6095 }
6096}
6097
6098/**
6099 * Operations to create, upsert, query, and read all triggers.
6100 *
6101 * Use `container.triggers` to read, replace, or delete a {@link Trigger}.
6102 */
6103class Triggers {
6104 /**
6105 * @hidden
6106 * @param container - The parent {@link Container}.
6107 */
6108 constructor(container, clientContext) {
6109 this.container = container;
6110 this.clientContext = clientContext;
6111 }
6112 query(query, options) {
6113 const path = getPathFromLink(this.container.url, exports.ResourceType.trigger);
6114 const id = getIdFromLink(this.container.url);
6115 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
6116 return this.clientContext.queryFeed({
6117 path,
6118 resourceType: exports.ResourceType.trigger,
6119 resourceId: id,
6120 resultFn: (result) => result.Triggers,
6121 query,
6122 options: innerOptions,
6123 });
6124 });
6125 }
6126 /**
6127 * Read all Triggers.
6128 * @example Read all trigger to array.
6129 * ```typescript
6130 * const {body: triggerList} = await container.triggers.readAll().fetchAll();
6131 * ```
6132 */
6133 readAll(options) {
6134 return this.query(undefined, options);
6135 }
6136 /**
6137 * Create a trigger.
6138 *
6139 * Azure Cosmos DB supports pre and post triggers defined in JavaScript to be executed
6140 * on creates, updates and deletes.
6141 *
6142 * For additional details, refer to the server-side JavaScript API documentation.
6143 */
6144 async create(body, options) {
6145 if (body.body) {
6146 body.body = body.body.toString();
6147 }
6148 const err = {};
6149 if (!isResourceValid(body, err)) {
6150 throw err;
6151 }
6152 const path = getPathFromLink(this.container.url, exports.ResourceType.trigger);
6153 const id = getIdFromLink(this.container.url);
6154 const response = await this.clientContext.create({
6155 body,
6156 path,
6157 resourceType: exports.ResourceType.trigger,
6158 resourceId: id,
6159 options,
6160 });
6161 const ref = new Trigger(this.container, response.result.id, this.clientContext);
6162 return new TriggerResponse(response.result, response.headers, response.code, ref);
6163 }
6164}
6165
6166class UserDefinedFunctionResponse extends ResourceResponse {
6167 constructor(resource, headers, statusCode, udf) {
6168 super(resource, headers, statusCode);
6169 this.userDefinedFunction = udf;
6170 }
6171 /**
6172 * Alias for `userDefinedFunction(id)`.
6173 *
6174 * A reference to the {@link UserDefinedFunction} corresponding to the returned {@link UserDefinedFunctionDefinition}.
6175 */
6176 get udf() {
6177 return this.userDefinedFunction;
6178 }
6179}
6180
6181/**
6182 * Used to read, replace, or delete a specified User Definied Function by id.
6183 *
6184 * @see {@link UserDefinedFunction} to create, upsert, query, read all User Defined Functions.
6185 */
6186class UserDefinedFunction {
6187 /**
6188 * @hidden
6189 * @param container - The parent {@link Container}.
6190 * @param id - The id of the given {@link UserDefinedFunction}.
6191 */
6192 constructor(container, id, clientContext) {
6193 this.container = container;
6194 this.id = id;
6195 this.clientContext = clientContext;
6196 }
6197 /**
6198 * Returns a reference URL to the resource. Used for linking in Permissions.
6199 */
6200 get url() {
6201 return createUserDefinedFunctionUri(this.container.database.id, this.container.id, this.id);
6202 }
6203 /**
6204 * Read the {@link UserDefinedFunctionDefinition} for the given {@link UserDefinedFunction}.
6205 */
6206 async read(options) {
6207 const path = getPathFromLink(this.url);
6208 const id = getIdFromLink(this.url);
6209 const response = await this.clientContext.read({
6210 path,
6211 resourceType: exports.ResourceType.udf,
6212 resourceId: id,
6213 options,
6214 });
6215 return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this);
6216 }
6217 /**
6218 * Replace the given {@link UserDefinedFunction} with the specified {@link UserDefinedFunctionDefinition}.
6219 * @param options -
6220 */
6221 async replace(body, options) {
6222 if (body.body) {
6223 body.body = body.body.toString();
6224 }
6225 const err = {};
6226 if (!isResourceValid(body, err)) {
6227 throw err;
6228 }
6229 const path = getPathFromLink(this.url);
6230 const id = getIdFromLink(this.url);
6231 const response = await this.clientContext.replace({
6232 body,
6233 path,
6234 resourceType: exports.ResourceType.udf,
6235 resourceId: id,
6236 options,
6237 });
6238 return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this);
6239 }
6240 /**
6241 * Delete the given {@link UserDefined}.
6242 */
6243 async delete(options) {
6244 const path = getPathFromLink(this.url);
6245 const id = getIdFromLink(this.url);
6246 const response = await this.clientContext.delete({
6247 path,
6248 resourceType: exports.ResourceType.udf,
6249 resourceId: id,
6250 options,
6251 });
6252 return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this);
6253 }
6254}
6255
6256/**
6257 * Used to create, upsert, query, or read all User Defined Functions.
6258 *
6259 * @see {@link UserDefinedFunction} to read, replace, or delete a given User Defined Function by id.
6260 */
6261class UserDefinedFunctions {
6262 /**
6263 * @hidden
6264 * @param container - The parent {@link Container}.
6265 */
6266 constructor(container, clientContext) {
6267 this.container = container;
6268 this.clientContext = clientContext;
6269 }
6270 query(query, options) {
6271 const path = getPathFromLink(this.container.url, exports.ResourceType.udf);
6272 const id = getIdFromLink(this.container.url);
6273 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
6274 return this.clientContext.queryFeed({
6275 path,
6276 resourceType: exports.ResourceType.udf,
6277 resourceId: id,
6278 resultFn: (result) => result.UserDefinedFunctions,
6279 query,
6280 options: innerOptions,
6281 });
6282 });
6283 }
6284 /**
6285 * Read all User Defined Functions.
6286 * @example Read all User Defined Functions to array.
6287 * ```typescript
6288 * const {body: udfList} = await container.userDefinedFunctions.readAll().fetchAll();
6289 * ```
6290 */
6291 readAll(options) {
6292 return this.query(undefined, options);
6293 }
6294 /**
6295 * Create a UserDefinedFunction.
6296 *
6297 * Azure Cosmos DB supports JavaScript UDFs which can be used inside queries, stored procedures and triggers.
6298 *
6299 * For additional details, refer to the server-side JavaScript API documentation.
6300 *
6301 */
6302 async create(body, options) {
6303 if (body.body) {
6304 body.body = body.body.toString();
6305 }
6306 const err = {};
6307 if (!isResourceValid(body, err)) {
6308 throw err;
6309 }
6310 const path = getPathFromLink(this.container.url, exports.ResourceType.udf);
6311 const id = getIdFromLink(this.container.url);
6312 const response = await this.clientContext.create({
6313 body,
6314 path,
6315 resourceType: exports.ResourceType.udf,
6316 resourceId: id,
6317 options,
6318 });
6319 const ref = new UserDefinedFunction(this.container, response.result.id, this.clientContext);
6320 return new UserDefinedFunctionResponse(response.result, response.headers, response.code, ref);
6321 }
6322}
6323
6324// Copyright (c) Microsoft Corporation.
6325class Scripts {
6326 /**
6327 * @param container - The parent {@link Container}.
6328 * @hidden
6329 */
6330 constructor(container, clientContext) {
6331 this.container = container;
6332 this.clientContext = clientContext;
6333 }
6334 /**
6335 * Used to read, replace, or delete a specific, existing {@link StoredProcedure} by id.
6336 *
6337 * Use `.storedProcedures` for creating new stored procedures, or querying/reading all stored procedures.
6338 * @param id - The id of the {@link StoredProcedure}.
6339 */
6340 storedProcedure(id) {
6341 return new StoredProcedure(this.container, id, this.clientContext);
6342 }
6343 /**
6344 * Used to read, replace, or delete a specific, existing {@link Trigger} by id.
6345 *
6346 * Use `.triggers` for creating new triggers, or querying/reading all triggers.
6347 * @param id - The id of the {@link Trigger}.
6348 */
6349 trigger(id) {
6350 return new Trigger(this.container, id, this.clientContext);
6351 }
6352 /**
6353 * Used to read, replace, or delete a specific, existing {@link UserDefinedFunction} by id.
6354 *
6355 * Use `.userDefinedFunctions` for creating new user defined functions, or querying/reading all user defined functions.
6356 * @param id - The id of the {@link UserDefinedFunction}.
6357 */
6358 userDefinedFunction(id) {
6359 return new UserDefinedFunction(this.container, id, this.clientContext);
6360 }
6361 /**
6362 * Operations for creating new stored procedures, and reading/querying all stored procedures.
6363 *
6364 * For reading, replacing, or deleting an existing stored procedure, use `.storedProcedure(id)`.
6365 */
6366 get storedProcedures() {
6367 if (!this.$sprocs) {
6368 this.$sprocs = new StoredProcedures(this.container, this.clientContext);
6369 }
6370 return this.$sprocs;
6371 }
6372 /**
6373 * Operations for creating new triggers, and reading/querying all triggers.
6374 *
6375 * For reading, replacing, or deleting an existing trigger, use `.trigger(id)`.
6376 */
6377 get triggers() {
6378 if (!this.$triggers) {
6379 this.$triggers = new Triggers(this.container, this.clientContext);
6380 }
6381 return this.$triggers;
6382 }
6383 /**
6384 * Operations for creating new user defined functions, and reading/querying all user defined functions.
6385 *
6386 * For reading, replacing, or deleting an existing user defined function, use `.userDefinedFunction(id)`.
6387 */
6388 get userDefinedFunctions() {
6389 if (!this.$udfs) {
6390 this.$udfs = new UserDefinedFunctions(this.container, this.clientContext);
6391 }
6392 return this.$udfs;
6393 }
6394}
6395
6396/** Response object for Container operations */
6397class ContainerResponse extends ResourceResponse {
6398 constructor(resource, headers, statusCode, container) {
6399 super(resource, headers, statusCode);
6400 this.container = container;
6401 }
6402}
6403
6404class OfferResponse extends ResourceResponse {
6405 constructor(resource, headers, statusCode, offer) {
6406 super(resource, headers, statusCode);
6407 this.offer = offer;
6408 }
6409}
6410
6411/**
6412 * Use to read or replace an existing {@link Offer} by id.
6413 *
6414 * @see {@link Offers} to query or read all offers.
6415 */
6416class Offer {
6417 /**
6418 * @hidden
6419 * @param client - The parent {@link CosmosClient} for the Database Account.
6420 * @param id - The id of the given {@link Offer}.
6421 */
6422 constructor(client, id, clientContext) {
6423 this.client = client;
6424 this.id = id;
6425 this.clientContext = clientContext;
6426 }
6427 /**
6428 * Returns a reference URL to the resource. Used for linking in Permissions.
6429 */
6430 get url() {
6431 return `/${Constants.Path.OffersPathSegment}/${this.id}`;
6432 }
6433 /**
6434 * Read the {@link OfferDefinition} for the given {@link Offer}.
6435 */
6436 async read(options) {
6437 const response = await this.clientContext.read({
6438 path: this.url,
6439 resourceType: exports.ResourceType.offer,
6440 resourceId: this.id,
6441 options,
6442 });
6443 return new OfferResponse(response.result, response.headers, response.code, this);
6444 }
6445 /**
6446 * Replace the given {@link Offer} with the specified {@link OfferDefinition}.
6447 * @param body - The specified {@link OfferDefinition}
6448 */
6449 async replace(body, options) {
6450 const err = {};
6451 if (!isResourceValid(body, err)) {
6452 throw err;
6453 }
6454 const response = await this.clientContext.replace({
6455 body,
6456 path: this.url,
6457 resourceType: exports.ResourceType.offer,
6458 resourceId: this.id,
6459 options,
6460 });
6461 return new OfferResponse(response.result, response.headers, response.code, this);
6462 }
6463}
6464
6465/**
6466 * Use to query or read all Offers.
6467 *
6468 * @see {@link Offer} to read or replace an existing {@link Offer} by id.
6469 */
6470class Offers {
6471 /**
6472 * @hidden
6473 * @param client - The parent {@link CosmosClient} for the offers.
6474 */
6475 constructor(client, clientContext) {
6476 this.client = client;
6477 this.clientContext = clientContext;
6478 }
6479 query(query, options) {
6480 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
6481 return this.clientContext.queryFeed({
6482 path: "/offers",
6483 resourceType: exports.ResourceType.offer,
6484 resourceId: "",
6485 resultFn: (result) => result.Offers,
6486 query,
6487 options: innerOptions,
6488 });
6489 });
6490 }
6491 /**
6492 * Read all offers.
6493 * @example Read all offers to array.
6494 * ```typescript
6495 * const {body: offerList} = await client.offers.readAll().fetchAll();
6496 * ```
6497 */
6498 readAll(options) {
6499 return this.query(undefined, options);
6500 }
6501}
6502
6503/**
6504 * Operations for reading, replacing, or deleting a specific, existing container by id.
6505 *
6506 * @see {@link Containers} for creating new containers, and reading/querying all containers; use `.containers`.
6507 *
6508 * Note: all these operations make calls against a fixed budget.
6509 * You should design your system such that these calls scale sublinearly with your application.
6510 * For instance, do not call `container(id).read()` before every single `item.read()` call, to ensure the container exists;
6511 * do this once on application start up.
6512 */
6513class Container {
6514 /**
6515 * Returns a container instance. Note: You should get this from `database.container(id)`, rather than creating your own object.
6516 * @param database - The parent {@link Database}.
6517 * @param id - The id of the given container.
6518 * @hidden
6519 */
6520 constructor(database, id, clientContext) {
6521 this.database = database;
6522 this.id = id;
6523 this.clientContext = clientContext;
6524 }
6525 /**
6526 * Operations for creating new items, and reading/querying all items
6527 *
6528 * For reading, replacing, or deleting an existing item, use `.item(id)`.
6529 *
6530 * @example Create a new item
6531 * ```typescript
6532 * const {body: createdItem} = await container.items.create({id: "<item id>", properties: {}});
6533 * ```
6534 */
6535 get items() {
6536 if (!this.$items) {
6537 this.$items = new Items(this, this.clientContext);
6538 }
6539 return this.$items;
6540 }
6541 /**
6542 * All operations for Stored Procedures, Triggers, and User Defined Functions
6543 */
6544 get scripts() {
6545 if (!this.$scripts) {
6546 this.$scripts = new Scripts(this, this.clientContext);
6547 }
6548 return this.$scripts;
6549 }
6550 /**
6551 * Operations for reading and querying conflicts for the given container.
6552 *
6553 * For reading or deleting a specific conflict, use `.conflict(id)`.
6554 */
6555 get conflicts() {
6556 if (!this.$conflicts) {
6557 this.$conflicts = new Conflicts(this, this.clientContext);
6558 }
6559 return this.$conflicts;
6560 }
6561 /**
6562 * Returns a reference URL to the resource. Used for linking in Permissions.
6563 */
6564 get url() {
6565 return createDocumentCollectionUri(this.database.id, this.id);
6566 }
6567 /**
6568 * Used to read, replace, or delete a specific, existing {@link Item} by id.
6569 *
6570 * Use `.items` for creating new items, or querying/reading all items.
6571 *
6572 * @param id - The id of the {@link Item}.
6573 * @param partitionKeyValue - The value of the {@link Item} partition key
6574 * @example Replace an item
6575 * `const {body: replacedItem} = await container.item("<item id>", "<partition key value>").replace({id: "<item id>", title: "Updated post", authorID: 5});`
6576 */
6577 item(id, partitionKeyValue) {
6578 return new Item(this, id, partitionKeyValue, this.clientContext);
6579 }
6580 /**
6581 * Used to read, replace, or delete a specific, existing {@link Conflict} by id.
6582 *
6583 * Use `.conflicts` for creating new conflicts, or querying/reading all conflicts.
6584 * @param id - The id of the {@link Conflict}.
6585 */
6586 conflict(id, partitionKey) {
6587 return new Conflict(this, id, this.clientContext, partitionKey);
6588 }
6589 /** Read the container's definition */
6590 async read(options) {
6591 const path = getPathFromLink(this.url);
6592 const id = getIdFromLink(this.url);
6593 const response = await this.clientContext.read({
6594 path,
6595 resourceType: exports.ResourceType.container,
6596 resourceId: id,
6597 options,
6598 });
6599 this.clientContext.partitionKeyDefinitionCache[this.url] = response.result.partitionKey;
6600 return new ContainerResponse(response.result, response.headers, response.code, this);
6601 }
6602 /** Replace the container's definition */
6603 async replace(body, options) {
6604 const err = {};
6605 if (!isResourceValid(body, err)) {
6606 throw err;
6607 }
6608 const path = getPathFromLink(this.url);
6609 const id = getIdFromLink(this.url);
6610 const response = await this.clientContext.replace({
6611 body,
6612 path,
6613 resourceType: exports.ResourceType.container,
6614 resourceId: id,
6615 options,
6616 });
6617 return new ContainerResponse(response.result, response.headers, response.code, this);
6618 }
6619 /** Delete the container */
6620 async delete(options) {
6621 const path = getPathFromLink(this.url);
6622 const id = getIdFromLink(this.url);
6623 const response = await this.clientContext.delete({
6624 path,
6625 resourceType: exports.ResourceType.container,
6626 resourceId: id,
6627 options,
6628 });
6629 return new ContainerResponse(response.result, response.headers, response.code, this);
6630 }
6631 /**
6632 * Gets the partition key definition first by looking into the cache otherwise by reading the collection.
6633 * @deprecated This method has been renamed to readPartitionKeyDefinition.
6634 */
6635 async getPartitionKeyDefinition() {
6636 return this.readPartitionKeyDefinition();
6637 }
6638 /**
6639 * Gets the partition key definition first by looking into the cache otherwise by reading the collection.
6640 * @hidden
6641 */
6642 async readPartitionKeyDefinition() {
6643 // $ISSUE-felixfan-2016-03-17: Make name based path and link based path use the same key
6644 // $ISSUE-felixfan-2016-03-17: Refresh partitionKeyDefinitionCache when necessary
6645 if (this.url in this.clientContext.partitionKeyDefinitionCache) {
6646 return new ResourceResponse(this.clientContext.partitionKeyDefinitionCache[this.url], {}, 0);
6647 }
6648 const { headers, statusCode } = await this.read();
6649 return new ResourceResponse(this.clientContext.partitionKeyDefinitionCache[this.url], headers, statusCode);
6650 }
6651 /**
6652 * Gets offer on container. If none exists, returns an OfferResponse with undefined.
6653 */
6654 async readOffer(options = {}) {
6655 const { resource: container } = await this.read();
6656 const path = "/offers";
6657 const url = container._self;
6658 const response = await this.clientContext.queryFeed({
6659 path,
6660 resourceId: "",
6661 resourceType: exports.ResourceType.offer,
6662 query: `SELECT * from root where root.resource = "${url}"`,
6663 resultFn: (result) => result.Offers,
6664 options,
6665 });
6666 const offer = response.result[0]
6667 ? new Offer(this.database.client, response.result[0].id, this.clientContext)
6668 : undefined;
6669 return new OfferResponse(response.result[0], response.headers, response.code, offer);
6670 }
6671 async getQueryPlan(query) {
6672 const path = getPathFromLink(this.url);
6673 return this.clientContext.getQueryPlan(path + "/docs", exports.ResourceType.item, getIdFromLink(this.url), query);
6674 }
6675 readPartitionKeyRanges(feedOptions) {
6676 feedOptions = feedOptions || {};
6677 return this.clientContext.queryPartitionKeyRanges(this.url, undefined, feedOptions);
6678 }
6679 /**
6680 * Delete all documents belong to the container for the provided partition key value
6681 * @param partitionKey - The partition key value of the items to be deleted
6682 */
6683 async deleteAllItemsForPartitionKey(partitionKey, options) {
6684 let path = getPathFromLink(this.url);
6685 const id = getIdFromLink(this.url);
6686 path = path + "/operations/partitionkeydelete";
6687 const response = await this.clientContext.delete({
6688 path,
6689 resourceType: exports.ResourceType.container,
6690 resourceId: id,
6691 options,
6692 partitionKey: partitionKey,
6693 method: exports.HTTPMethod.post,
6694 });
6695 return new ContainerResponse(response.result, response.headers, response.code, this);
6696 }
6697}
6698
6699// Copyright (c) Microsoft Corporation.
6700// Licensed under the MIT license.
6701function validateOffer(body) {
6702 if (body.throughput) {
6703 if (body.maxThroughput) {
6704 console.log("should be erroring");
6705 throw new Error("Cannot specify `throughput` with `maxThroughput`");
6706 }
6707 if (body.autoUpgradePolicy) {
6708 throw new Error("Cannot specify autoUpgradePolicy with throughput. Use `maxThroughput` instead");
6709 }
6710 }
6711}
6712
6713/**
6714 * Operations for creating new containers, and reading/querying all containers
6715 *
6716 * @see {@link Container} for reading, replacing, or deleting an existing container; use `.container(id)`.
6717 *
6718 * Note: all these operations make calls against a fixed budget.
6719 * You should design your system such that these calls scale sublinearly with your application.
6720 * For instance, do not call `containers.readAll()` before every single `item.read()` call, to ensure the container exists;
6721 * do this once on application start up.
6722 */
6723class Containers {
6724 constructor(database, clientContext) {
6725 this.database = database;
6726 this.clientContext = clientContext;
6727 }
6728 query(query, options) {
6729 const path = getPathFromLink(this.database.url, exports.ResourceType.container);
6730 const id = getIdFromLink(this.database.url);
6731 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
6732 return this.clientContext.queryFeed({
6733 path,
6734 resourceType: exports.ResourceType.container,
6735 resourceId: id,
6736 resultFn: (result) => result.DocumentCollections,
6737 query,
6738 options: innerOptions,
6739 });
6740 });
6741 }
6742 /**
6743 * Creates a container.
6744 *
6745 * A container is a named logical container for items.
6746 *
6747 * A database may contain zero or more named containers and each container consists of
6748 * zero or more JSON items.
6749 *
6750 * Being schema-free, the items in a container do not need to share the same structure or fields.
6751 *
6752 *
6753 * Since containers are application resources, they can be authorized using either the
6754 * master key or resource keys.
6755 *
6756 * @param body - Represents the body of the container.
6757 * @param options - Use to set options like response page size, continuation tokens, etc.
6758 */
6759 async create(body, options = {}) {
6760 const err = {};
6761 if (!isResourceValid(body, err)) {
6762 throw err;
6763 }
6764 const path = getPathFromLink(this.database.url, exports.ResourceType.container);
6765 const id = getIdFromLink(this.database.url);
6766 validateOffer(body);
6767 if (body.maxThroughput) {
6768 const autoscaleParams = {
6769 maxThroughput: body.maxThroughput,
6770 };
6771 if (body.autoUpgradePolicy) {
6772 autoscaleParams.autoUpgradePolicy = body.autoUpgradePolicy;
6773 }
6774 const autoscaleHeader = JSON.stringify(autoscaleParams);
6775 options.initialHeaders = Object.assign({}, options.initialHeaders, {
6776 [Constants.HttpHeaders.AutoscaleSettings]: autoscaleHeader,
6777 });
6778 delete body.maxThroughput;
6779 delete body.autoUpgradePolicy;
6780 }
6781 if (body.throughput) {
6782 options.initialHeaders = Object.assign({}, options.initialHeaders, {
6783 [Constants.HttpHeaders.OfferThroughput]: body.throughput,
6784 });
6785 delete body.throughput;
6786 }
6787 if (typeof body.partitionKey === "string") {
6788 if (!body.partitionKey.startsWith("/")) {
6789 throw new Error("Partition key must start with '/'");
6790 }
6791 body.partitionKey = {
6792 paths: [body.partitionKey],
6793 };
6794 }
6795 // If they don't specify a partition key, use the default path
6796 if (!body.partitionKey || !body.partitionKey.paths) {
6797 body.partitionKey = {
6798 paths: [DEFAULT_PARTITION_KEY_PATH],
6799 };
6800 }
6801 const response = await this.clientContext.create({
6802 body,
6803 path,
6804 resourceType: exports.ResourceType.container,
6805 resourceId: id,
6806 options,
6807 });
6808 const ref = new Container(this.database, response.result.id, this.clientContext);
6809 return new ContainerResponse(response.result, response.headers, response.code, ref);
6810 }
6811 /**
6812 * Checks if a Container exists, and, if it doesn't, creates it.
6813 * This will make a read operation based on the id in the `body`, then if it is not found, a create operation.
6814 * You should confirm that the output matches the body you passed in for non-default properties (i.e. indexing policy/etc.)
6815 *
6816 * A container is a named logical container for items.
6817 *
6818 * A database may contain zero or more named containers and each container consists of
6819 * zero or more JSON items.
6820 *
6821 * Being schema-free, the items in a container do not need to share the same structure or fields.
6822 *
6823 *
6824 * Since containers are application resources, they can be authorized using either the
6825 * master key or resource keys.
6826 *
6827 * @param body - Represents the body of the container.
6828 * @param options - Use to set options like response page size, continuation tokens, etc.
6829 */
6830 async createIfNotExists(body, options) {
6831 if (!body || body.id === null || body.id === undefined) {
6832 throw new Error("body parameter must be an object with an id property");
6833 }
6834 /*
6835 1. Attempt to read the Container (based on an assumption that most containers will already exist, so its faster)
6836 2. If it fails with NotFound error, attempt to create the container. Else, return the read results.
6837 */
6838 try {
6839 const readResponse = await this.database.container(body.id).read(options);
6840 return readResponse;
6841 }
6842 catch (err) {
6843 if (err.code === StatusCodes.NotFound) {
6844 const createResponse = await this.create(body, options);
6845 // Must merge the headers to capture RU costskaty
6846 mergeHeaders(createResponse.headers, err.headers);
6847 return createResponse;
6848 }
6849 else {
6850 throw err;
6851 }
6852 }
6853 }
6854 /**
6855 * Read all containers.
6856 * @param options - Use to set options like response page size, continuation tokens, etc.
6857 * @returns {@link QueryIterator} Allows you to return all containers in an array or iterate over them one at a time.
6858 * @example Read all containers to array.
6859 * ```typescript
6860 * const {body: containerList} = await client.database("<db id>").containers.readAll().fetchAll();
6861 * ```
6862 */
6863 readAll(options) {
6864 return this.query(undefined, options);
6865 }
6866}
6867
6868class PermissionResponse extends ResourceResponse {
6869 constructor(resource, headers, statusCode, permission) {
6870 super(resource, headers, statusCode);
6871 this.permission = permission;
6872 }
6873}
6874
6875/**
6876 * Use to read, replace, or delete a given {@link Permission} by id.
6877 *
6878 * @see {@link Permissions} to create, upsert, query, or read all Permissions.
6879 */
6880class Permission {
6881 /**
6882 * @hidden
6883 * @param user - The parent {@link User}.
6884 * @param id - The id of the given {@link Permission}.
6885 */
6886 constructor(user, id, clientContext) {
6887 this.user = user;
6888 this.id = id;
6889 this.clientContext = clientContext;
6890 }
6891 /**
6892 * Returns a reference URL to the resource. Used for linking in Permissions.
6893 */
6894 get url() {
6895 return createPermissionUri(this.user.database.id, this.user.id, this.id);
6896 }
6897 /**
6898 * Read the {@link PermissionDefinition} of the given {@link Permission}.
6899 */
6900 async read(options) {
6901 const path = getPathFromLink(this.url);
6902 const id = getIdFromLink(this.url);
6903 const response = await this.clientContext.read({
6904 path,
6905 resourceType: exports.ResourceType.permission,
6906 resourceId: id,
6907 options,
6908 });
6909 return new PermissionResponse(response.result, response.headers, response.code, this);
6910 }
6911 /**
6912 * Replace the given {@link Permission} with the specified {@link PermissionDefinition}.
6913 * @param body - The specified {@link PermissionDefinition}.
6914 */
6915 async replace(body, options) {
6916 const err = {};
6917 if (!isResourceValid(body, err)) {
6918 throw err;
6919 }
6920 const path = getPathFromLink(this.url);
6921 const id = getIdFromLink(this.url);
6922 const response = await this.clientContext.replace({
6923 body,
6924 path,
6925 resourceType: exports.ResourceType.permission,
6926 resourceId: id,
6927 options,
6928 });
6929 return new PermissionResponse(response.result, response.headers, response.code, this);
6930 }
6931 /**
6932 * Delete the given {@link Permission}.
6933 */
6934 async delete(options) {
6935 const path = getPathFromLink(this.url);
6936 const id = getIdFromLink(this.url);
6937 const response = await this.clientContext.delete({
6938 path,
6939 resourceType: exports.ResourceType.permission,
6940 resourceId: id,
6941 options,
6942 });
6943 return new PermissionResponse(response.result, response.headers, response.code, this);
6944 }
6945}
6946
6947/**
6948 * Use to create, replace, query, and read all Permissions.
6949 *
6950 * @see {@link Permission} to read, replace, or delete a specific permission by id.
6951 */
6952class Permissions {
6953 /**
6954 * @hidden
6955 * @param user - The parent {@link User}.
6956 */
6957 constructor(user, clientContext) {
6958 this.user = user;
6959 this.clientContext = clientContext;
6960 }
6961 query(query, options) {
6962 const path = getPathFromLink(this.user.url, exports.ResourceType.permission);
6963 const id = getIdFromLink(this.user.url);
6964 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
6965 return this.clientContext.queryFeed({
6966 path,
6967 resourceType: exports.ResourceType.permission,
6968 resourceId: id,
6969 resultFn: (result) => result.Permissions,
6970 query,
6971 options: innerOptions,
6972 });
6973 });
6974 }
6975 /**
6976 * Read all permissions.
6977 * @example Read all permissions to array.
6978 * ```typescript
6979 * const {body: permissionList} = await user.permissions.readAll().fetchAll();
6980 * ```
6981 */
6982 readAll(options) {
6983 return this.query(undefined, options);
6984 }
6985 /**
6986 * Create a permission.
6987 *
6988 * A permission represents a per-User Permission to access a specific resource
6989 * e.g. Item or Container.
6990 * @param body - Represents the body of the permission.
6991 */
6992 async create(body, options) {
6993 const err = {};
6994 if (!isResourceValid(body, err)) {
6995 throw err;
6996 }
6997 const path = getPathFromLink(this.user.url, exports.ResourceType.permission);
6998 const id = getIdFromLink(this.user.url);
6999 const response = await this.clientContext.create({
7000 body,
7001 path,
7002 resourceType: exports.ResourceType.permission,
7003 resourceId: id,
7004 options,
7005 });
7006 const ref = new Permission(this.user, response.result.id, this.clientContext);
7007 return new PermissionResponse(response.result, response.headers, response.code, ref);
7008 }
7009 /**
7010 * Upsert a permission.
7011 *
7012 * A permission represents a per-User Permission to access a
7013 * specific resource e.g. Item or Container.
7014 */
7015 async upsert(body, options) {
7016 const err = {};
7017 if (!isResourceValid(body, err)) {
7018 throw err;
7019 }
7020 const path = getPathFromLink(this.user.url, exports.ResourceType.permission);
7021 const id = getIdFromLink(this.user.url);
7022 const response = await this.clientContext.upsert({
7023 body,
7024 path,
7025 resourceType: exports.ResourceType.permission,
7026 resourceId: id,
7027 options,
7028 });
7029 const ref = new Permission(this.user, response.result.id, this.clientContext);
7030 return new PermissionResponse(response.result, response.headers, response.code, ref);
7031 }
7032}
7033
7034class UserResponse extends ResourceResponse {
7035 constructor(resource, headers, statusCode, user) {
7036 super(resource, headers, statusCode);
7037 this.user = user;
7038 }
7039}
7040
7041/**
7042 * Used to read, replace, and delete Users.
7043 *
7044 * Additionally, you can access the permissions for a given user via `user.permission` and `user.permissions`.
7045 *
7046 * @see {@link Users} to create, upsert, query, or read all.
7047 */
7048class User {
7049 /**
7050 * @hidden
7051 * @param database - The parent {@link Database}.
7052 */
7053 constructor(database, id, clientContext) {
7054 this.database = database;
7055 this.id = id;
7056 this.clientContext = clientContext;
7057 this.permissions = new Permissions(this, this.clientContext);
7058 }
7059 /**
7060 * Returns a reference URL to the resource. Used for linking in Permissions.
7061 */
7062 get url() {
7063 return createUserUri(this.database.id, this.id);
7064 }
7065 /**
7066 * Operations to read, replace, or delete a specific Permission by id.
7067 *
7068 * See `client.permissions` for creating, upserting, querying, or reading all operations.
7069 */
7070 permission(id) {
7071 return new Permission(this, id, this.clientContext);
7072 }
7073 /**
7074 * Read the {@link UserDefinition} for the given {@link User}.
7075 */
7076 async read(options) {
7077 const path = getPathFromLink(this.url);
7078 const id = getIdFromLink(this.url);
7079 const response = await this.clientContext.read({
7080 path,
7081 resourceType: exports.ResourceType.user,
7082 resourceId: id,
7083 options,
7084 });
7085 return new UserResponse(response.result, response.headers, response.code, this);
7086 }
7087 /**
7088 * Replace the given {@link User}'s definition with the specified {@link UserDefinition}.
7089 * @param body - The specified {@link UserDefinition} to replace the definition.
7090 */
7091 async replace(body, options) {
7092 const err = {};
7093 if (!isResourceValid(body, err)) {
7094 throw err;
7095 }
7096 const path = getPathFromLink(this.url);
7097 const id = getIdFromLink(this.url);
7098 const response = await this.clientContext.replace({
7099 body,
7100 path,
7101 resourceType: exports.ResourceType.user,
7102 resourceId: id,
7103 options,
7104 });
7105 return new UserResponse(response.result, response.headers, response.code, this);
7106 }
7107 /**
7108 * Delete the given {@link User}.
7109 */
7110 async delete(options) {
7111 const path = getPathFromLink(this.url);
7112 const id = getIdFromLink(this.url);
7113 const response = await this.clientContext.delete({
7114 path,
7115 resourceType: exports.ResourceType.user,
7116 resourceId: id,
7117 options,
7118 });
7119 return new UserResponse(response.result, response.headers, response.code, this);
7120 }
7121}
7122
7123/**
7124 * Used to create, upsert, query, and read all users.
7125 *
7126 * @see {@link User} to read, replace, or delete a specific User by id.
7127 */
7128class Users {
7129 /**
7130 * @hidden
7131 * @param database - The parent {@link Database}.
7132 */
7133 constructor(database, clientContext) {
7134 this.database = database;
7135 this.clientContext = clientContext;
7136 }
7137 query(query, options) {
7138 const path = getPathFromLink(this.database.url, exports.ResourceType.user);
7139 const id = getIdFromLink(this.database.url);
7140 return new QueryIterator(this.clientContext, query, options, (innerOptions) => {
7141 return this.clientContext.queryFeed({
7142 path,
7143 resourceType: exports.ResourceType.user,
7144 resourceId: id,
7145 resultFn: (result) => result.Users,
7146 query,
7147 options: innerOptions,
7148 });
7149 });
7150 }
7151 /**
7152 * Read all users.-
7153 * @example Read all users to array.
7154 * ```typescript
7155 * const {body: usersList} = await database.users.readAll().fetchAll();
7156 * ```
7157 */
7158 readAll(options) {
7159 return this.query(undefined, options);
7160 }
7161 /**
7162 * Create a database user with the specified {@link UserDefinition}.
7163 * @param body - The specified {@link UserDefinition}.
7164 */
7165 async create(body, options) {
7166 const err = {};
7167 if (!isResourceValid(body, err)) {
7168 throw err;
7169 }
7170 const path = getPathFromLink(this.database.url, exports.ResourceType.user);
7171 const id = getIdFromLink(this.database.url);
7172 const response = await this.clientContext.create({
7173 body,
7174 path,
7175 resourceType: exports.ResourceType.user,
7176 resourceId: id,
7177 options,
7178 });
7179 const ref = new User(this.database, response.result.id, this.clientContext);
7180 return new UserResponse(response.result, response.headers, response.code, ref);
7181 }
7182 /**
7183 * Upsert a database user with a specified {@link UserDefinition}.
7184 * @param body - The specified {@link UserDefinition}.
7185 */
7186 async upsert(body, options) {
7187 const err = {};
7188 if (!isResourceValid(body, err)) {
7189 throw err;
7190 }
7191 const path = getPathFromLink(this.database.url, exports.ResourceType.user);
7192 const id = getIdFromLink(this.database.url);
7193 const response = await this.clientContext.upsert({
7194 body,
7195 path,
7196 resourceType: exports.ResourceType.user,
7197 resourceId: id,
7198 options,
7199 });
7200 const ref = new User(this.database, response.result.id, this.clientContext);
7201 return new UserResponse(response.result, response.headers, response.code, ref);
7202 }
7203}
7204
7205/** Response object for Database operations */
7206class DatabaseResponse extends ResourceResponse {
7207 constructor(resource, headers, statusCode, database) {
7208 super(resource, headers, statusCode);
7209 this.database = database;
7210 }
7211}
7212
7213/**
7214 * Operations for reading or deleting an existing database.
7215 *
7216 * @see {@link Databases} for creating new databases, and reading/querying all databases; use `client.databases`.
7217 *
7218 * Note: all these operations make calls against a fixed budget.
7219 * You should design your system such that these calls scale sublinearly with your application.
7220 * For instance, do not call `database.read()` before every single `item.read()` call, to ensure the database exists;
7221 * do this once on application start up.
7222 */
7223class Database {
7224 /** Returns a new {@link Database} instance.
7225 *
7226 * Note: the intention is to get this object from {@link CosmosClient} via `client.database(id)`, not to instantiate it yourself.
7227 */
7228 constructor(client, id, clientContext) {
7229 this.client = client;
7230 this.id = id;
7231 this.clientContext = clientContext;
7232 this.containers = new Containers(this, this.clientContext);
7233 this.users = new Users(this, this.clientContext);
7234 }
7235 /**
7236 * Returns a reference URL to the resource. Used for linking in Permissions.
7237 */
7238 get url() {
7239 return createDatabaseUri(this.id);
7240 }
7241 /**
7242 * Used to read, replace, or delete a specific, existing {@link Database} by id.
7243 *
7244 * Use `.containers` creating new containers, or querying/reading all containers.
7245 *
7246 * @example Delete a container
7247 * ```typescript
7248 * await client.database("<db id>").container("<container id>").delete();
7249 * ```
7250 */
7251 container(id) {
7252 return new Container(this, id, this.clientContext);
7253 }
7254 /**
7255 * Used to read, replace, or delete a specific, existing {@link User} by id.
7256 *
7257 * Use `.users` for creating new users, or querying/reading all users.
7258 */
7259 user(id) {
7260 return new User(this, id, this.clientContext);
7261 }
7262 /** Read the definition of the given Database. */
7263 async read(options) {
7264 const path = getPathFromLink(this.url);
7265 const id = getIdFromLink(this.url);
7266 const response = await this.clientContext.read({
7267 path,
7268 resourceType: exports.ResourceType.database,
7269 resourceId: id,
7270 options,
7271 });
7272 return new DatabaseResponse(response.result, response.headers, response.code, this);
7273 }
7274 /** Delete the given Database. */
7275 async delete(options) {
7276 const path = getPathFromLink(this.url);
7277 const id = getIdFromLink(this.url);
7278 const response = await this.clientContext.delete({
7279 path,
7280 resourceType: exports.ResourceType.database,
7281 resourceId: id,
7282 options,
7283 });
7284 return new DatabaseResponse(response.result, response.headers, response.code, this);
7285 }
7286 /**
7287 * Gets offer on database. If none exists, returns an OfferResponse with undefined.
7288 */
7289 async readOffer(options = {}) {
7290 const { resource: record } = await this.read();
7291 const path = "/offers";
7292 const url = record._self;
7293 const response = await this.clientContext.queryFeed({
7294 path,
7295 resourceId: "",
7296 resourceType: exports.ResourceType.offer,
7297 query: `SELECT * from root where root.resource = "${url}"`,
7298 resultFn: (result) => result.Offers,
7299 options,
7300 });
7301 const offer = response.result[0]
7302 ? new Offer(this.client, response.result[0].id, this.clientContext)
7303 : undefined;
7304 return new OfferResponse(response.result[0], response.headers, response.code, offer);
7305 }
7306}
7307
7308/**
7309 * Operations for creating new databases, and reading/querying all databases
7310 *
7311 * @see {@link Database} for reading or deleting an existing database; use `client.database(id)`.
7312 *
7313 * Note: all these operations make calls against a fixed budget.
7314 * You should design your system such that these calls scale sublinearly with your application.
7315 * For instance, do not call `databases.readAll()` before every single `item.read()` call, to ensure the database exists;
7316 * do this once on application start up.
7317 */
7318class Databases {
7319 /**
7320 * @hidden
7321 * @param client - The parent {@link CosmosClient} for the Database.
7322 */
7323 constructor(client, clientContext) {
7324 this.client = client;
7325 this.clientContext = clientContext;
7326 }
7327 query(query, options) {
7328 const cb = (innerOptions) => {
7329 return this.clientContext.queryFeed({
7330 path: "/dbs",
7331 resourceType: exports.ResourceType.database,
7332 resourceId: "",
7333 resultFn: (result) => result.Databases,
7334 query,
7335 options: innerOptions,
7336 });
7337 };
7338 return new QueryIterator(this.clientContext, query, options, cb);
7339 }
7340 /**
7341 * Send a request for creating a database.
7342 *
7343 * A database manages users, permissions and a set of containers.
7344 * Each Azure Cosmos DB Database Account is able to support multiple independent named databases,
7345 * with the database being the logical container for data.
7346 *
7347 * Each Database consists of one or more containers, each of which in turn contain one or more
7348 * documents. Since databases are an administrative resource, the Service Master Key will be
7349 * required in order to access and successfully complete any action using the User APIs.
7350 *
7351 * @param body - The {@link DatabaseDefinition} that represents the {@link Database} to be created.
7352 * @param options - Use to set options like response page size, continuation tokens, etc.
7353 */
7354 async create(body, options = {}) {
7355 const err = {};
7356 if (!isResourceValid(body, err)) {
7357 throw err;
7358 }
7359 validateOffer(body);
7360 if (body.maxThroughput) {
7361 const autoscaleParams = {
7362 maxThroughput: body.maxThroughput,
7363 };
7364 if (body.autoUpgradePolicy) {
7365 autoscaleParams.autoUpgradePolicy = body.autoUpgradePolicy;
7366 }
7367 const autoscaleHeaders = JSON.stringify(autoscaleParams);
7368 options.initialHeaders = Object.assign({}, options.initialHeaders, {
7369 [Constants.HttpHeaders.AutoscaleSettings]: autoscaleHeaders,
7370 });
7371 delete body.maxThroughput;
7372 delete body.autoUpgradePolicy;
7373 }
7374 if (body.throughput) {
7375 options.initialHeaders = Object.assign({}, options.initialHeaders, {
7376 [Constants.HttpHeaders.OfferThroughput]: body.throughput,
7377 });
7378 delete body.throughput;
7379 }
7380 const path = "/dbs"; // TODO: constant
7381 const response = await this.clientContext.create({
7382 body,
7383 path,
7384 resourceType: exports.ResourceType.database,
7385 resourceId: undefined,
7386 options,
7387 });
7388 const ref = new Database(this.client, body.id, this.clientContext);
7389 return new DatabaseResponse(response.result, response.headers, response.code, ref);
7390 }
7391 /**
7392 * Check if a database exists, and if it doesn't, create it.
7393 * This will make a read operation based on the id in the `body`, then if it is not found, a create operation.
7394 *
7395 * A database manages users, permissions and a set of containers.
7396 * Each Azure Cosmos DB Database Account is able to support multiple independent named databases,
7397 * with the database being the logical container for data.
7398 *
7399 * Each Database consists of one or more containers, each of which in turn contain one or more
7400 * documents. Since databases are an an administrative resource, the Service Master Key will be
7401 * required in order to access and successfully complete any action using the User APIs.
7402 *
7403 * @param body - The {@link DatabaseDefinition} that represents the {@link Database} to be created.
7404 * @param options - Additional options for the request
7405 */
7406 async createIfNotExists(body, options) {
7407 if (!body || body.id === null || body.id === undefined) {
7408 throw new Error("body parameter must be an object with an id property");
7409 }
7410 /*
7411 1. Attempt to read the Database (based on an assumption that most databases will already exist, so its faster)
7412 2. If it fails with NotFound error, attempt to create the db. Else, return the read results.
7413 */
7414 try {
7415 const readResponse = await this.client.database(body.id).read(options);
7416 return readResponse;
7417 }
7418 catch (err) {
7419 if (err.code === StatusCodes.NotFound) {
7420 const createResponse = await this.create(body, options);
7421 // Must merge the headers to capture RU costskaty
7422 mergeHeaders(createResponse.headers, err.headers);
7423 return createResponse;
7424 }
7425 else {
7426 throw err;
7427 }
7428 }
7429 }
7430 // TODO: DatabaseResponse for QueryIterator?
7431 /**
7432 * Reads all databases.
7433 * @param options - Use to set options like response page size, continuation tokens, etc.
7434 * @returns {@link QueryIterator} Allows you to return all databases in an array or iterate over them one at a time.
7435 * @example Read all databases to array.
7436 * ```typescript
7437 * const {body: databaseList} = await client.databases.readAll().fetchAll();
7438 * ```
7439 */
7440 readAll(options) {
7441 return this.query(undefined, options);
7442 }
7443}
7444
7445/**
7446 * Used to specify which type of events to execute this plug in on.
7447 *
7448 * @hidden
7449 */
7450exports.PluginOn = void 0;
7451(function (PluginOn) {
7452 /**
7453 * Will be executed per network request
7454 */
7455 PluginOn["request"] = "request";
7456 /**
7457 * Will be executed per API operation
7458 */
7459 PluginOn["operation"] = "operation";
7460})(exports.PluginOn || (exports.PluginOn = {}));
7461/**
7462 * @internal
7463 */
7464async function executePlugins(requestContext, next, on) {
7465 if (!requestContext.plugins) {
7466 return next(requestContext, undefined);
7467 }
7468 let level = 0;
7469 const _ = (inner) => {
7470 if (++level >= inner.plugins.length) {
7471 return next(requestContext, undefined);
7472 }
7473 else if (inner.plugins[level].on !== on) {
7474 return _(requestContext);
7475 }
7476 else {
7477 return inner.plugins[level].plugin(inner, _);
7478 }
7479 };
7480 if (requestContext.plugins[level].on !== on) {
7481 return _(requestContext);
7482 }
7483 else {
7484 return requestContext.plugins[level].plugin(requestContext, _);
7485 }
7486}
7487
7488// Copyright (c) Microsoft Corporation.
7489/**
7490 * @hidden
7491 */
7492// Windows Socket Error Codes
7493const WindowsInterruptedFunctionCall = 10004;
7494/**
7495 * @hidden
7496 */
7497const WindowsFileHandleNotValid = 10009;
7498/**
7499 * @hidden
7500 */
7501const WindowsPermissionDenied = 10013;
7502/**
7503 * @hidden
7504 */
7505const WindowsBadAddress = 10014;
7506/**
7507 * @hidden
7508 */
7509const WindowsInvalidArgumnet = 10022;
7510/**
7511 * @hidden
7512 */
7513const WindowsResourceTemporarilyUnavailable = 10035;
7514/**
7515 * @hidden
7516 */
7517const WindowsOperationNowInProgress = 10036;
7518/**
7519 * @hidden
7520 */
7521const WindowsAddressAlreadyInUse = 10048;
7522/**
7523 * @hidden
7524 */
7525const WindowsConnectionResetByPeer = 10054;
7526/**
7527 * @hidden
7528 */
7529const WindowsCannotSendAfterSocketShutdown = 10058;
7530/**
7531 * @hidden
7532 */
7533const WindowsConnectionTimedOut = 10060;
7534/**
7535 * @hidden
7536 */
7537const WindowsConnectionRefused = 10061;
7538/**
7539 * @hidden
7540 */
7541const WindowsNameTooLong = 10063;
7542/**
7543 * @hidden
7544 */
7545const WindowsHostIsDown = 10064;
7546/**
7547 * @hidden
7548 */
7549const WindowsNoRouteTohost = 10065;
7550/**
7551 * @hidden
7552 */
7553// Linux Error Codes
7554/**
7555 * @hidden
7556 */
7557const LinuxConnectionReset = "ECONNRESET";
7558// Node Error Codes
7559/**
7560 * @hidden
7561 */
7562const BrokenPipe = "EPIPE";
7563/**
7564 * @hidden
7565 */
7566const CONNECTION_ERROR_CODES = [
7567 WindowsInterruptedFunctionCall,
7568 WindowsFileHandleNotValid,
7569 WindowsPermissionDenied,
7570 WindowsBadAddress,
7571 WindowsInvalidArgumnet,
7572 WindowsResourceTemporarilyUnavailable,
7573 WindowsOperationNowInProgress,
7574 WindowsAddressAlreadyInUse,
7575 WindowsConnectionResetByPeer,
7576 WindowsCannotSendAfterSocketShutdown,
7577 WindowsConnectionTimedOut,
7578 WindowsConnectionRefused,
7579 WindowsNameTooLong,
7580 WindowsHostIsDown,
7581 WindowsNoRouteTohost,
7582 LinuxConnectionReset,
7583 TimeoutErrorCode,
7584 BrokenPipe,
7585];
7586/**
7587 * @hidden
7588 */
7589function needsRetry(operationType, code) {
7590 if ((operationType === exports.OperationType.Read || operationType === exports.OperationType.Query) &&
7591 CONNECTION_ERROR_CODES.indexOf(code) !== -1) {
7592 return true;
7593 }
7594 else {
7595 return false;
7596 }
7597}
7598/**
7599 * This class implements the default connection retry policy for requests.
7600 * @hidden
7601 */
7602class DefaultRetryPolicy {
7603 constructor(operationType) {
7604 this.operationType = operationType;
7605 this.maxTries = 10;
7606 this.currentRetryAttemptCount = 0;
7607 this.retryAfterInMs = 1000;
7608 }
7609 /**
7610 * Determines whether the request should be retried or not.
7611 * @param err - Error returned by the request.
7612 */
7613 async shouldRetry(err) {
7614 if (err) {
7615 if (this.currentRetryAttemptCount < this.maxTries &&
7616 needsRetry(this.operationType, err.code)) {
7617 this.currentRetryAttemptCount++;
7618 return true;
7619 }
7620 }
7621 return false;
7622 }
7623}
7624
7625/**
7626 * This class implements the retry policy for endpoint discovery.
7627 * @hidden
7628 */
7629class EndpointDiscoveryRetryPolicy {
7630 /**
7631 * @param globalEndpointManager - The GlobalEndpointManager instance.
7632 */
7633 constructor(globalEndpointManager, operationType) {
7634 this.globalEndpointManager = globalEndpointManager;
7635 this.operationType = operationType;
7636 this.maxTries = EndpointDiscoveryRetryPolicy.maxTries;
7637 this.currentRetryAttemptCount = 0;
7638 this.retryAfterInMs = EndpointDiscoveryRetryPolicy.retryAfterInMs;
7639 }
7640 /**
7641 * Determines whether the request should be retried or not.
7642 * @param err - Error returned by the request.
7643 */
7644 async shouldRetry(err, retryContext, locationEndpoint) {
7645 if (!err) {
7646 return false;
7647 }
7648 if (!retryContext || !locationEndpoint) {
7649 return false;
7650 }
7651 if (!this.globalEndpointManager.enableEndpointDiscovery) {
7652 return false;
7653 }
7654 if (this.currentRetryAttemptCount >= this.maxTries) {
7655 return false;
7656 }
7657 this.currentRetryAttemptCount++;
7658 if (isReadRequest(this.operationType)) {
7659 await this.globalEndpointManager.markCurrentLocationUnavailableForRead(locationEndpoint);
7660 }
7661 else {
7662 await this.globalEndpointManager.markCurrentLocationUnavailableForWrite(locationEndpoint);
7663 }
7664 retryContext.retryCount = this.currentRetryAttemptCount;
7665 retryContext.clearSessionTokenNotAvailable = false;
7666 retryContext.retryRequestOnPreferredLocations = false;
7667 return true;
7668 }
7669}
7670EndpointDiscoveryRetryPolicy.maxTries = 120; // TODO: Constant?
7671EndpointDiscoveryRetryPolicy.retryAfterInMs = 1000;
7672
7673/**
7674 * This class implements the resource throttle retry policy for requests.
7675 * @hidden
7676 */
7677class ResourceThrottleRetryPolicy {
7678 /**
7679 * @param maxTries - Max number of retries to be performed for a request.
7680 * @param fixedRetryIntervalInMs - Fixed retry interval in milliseconds to wait between each
7681 * retry ignoring the retryAfter returned as part of the response.
7682 * @param timeoutInSeconds - Max wait time in seconds to wait for a request while the
7683 * retries are happening.
7684 */
7685 constructor(maxTries = 9, fixedRetryIntervalInMs = 0, timeoutInSeconds = 30) {
7686 this.maxTries = maxTries;
7687 this.fixedRetryIntervalInMs = fixedRetryIntervalInMs;
7688 /** Current retry attempt count. */
7689 this.currentRetryAttemptCount = 0;
7690 /** Cummulative wait time in milliseconds for a request while the retries are happening. */
7691 this.cummulativeWaitTimeinMs = 0;
7692 /** Retry interval in milliseconds to wait before the next request will be sent. */
7693 this.retryAfterInMs = 0;
7694 this.timeoutInMs = timeoutInSeconds * 1000;
7695 this.currentRetryAttemptCount = 0;
7696 this.cummulativeWaitTimeinMs = 0;
7697 }
7698 /**
7699 * Determines whether the request should be retried or not.
7700 * @param err - Error returned by the request.
7701 */
7702 async shouldRetry(err) {
7703 // TODO: any custom error object
7704 if (err) {
7705 if (this.currentRetryAttemptCount < this.maxTries) {
7706 this.currentRetryAttemptCount++;
7707 this.retryAfterInMs = 0;
7708 if (this.fixedRetryIntervalInMs) {
7709 this.retryAfterInMs = this.fixedRetryIntervalInMs;
7710 }
7711 else if (err.retryAfterInMs) {
7712 this.retryAfterInMs = err.retryAfterInMs;
7713 }
7714 if (this.cummulativeWaitTimeinMs < this.timeoutInMs) {
7715 this.cummulativeWaitTimeinMs += this.retryAfterInMs;
7716 return true;
7717 }
7718 }
7719 }
7720 return false;
7721 }
7722}
7723
7724// Copyright (c) Microsoft Corporation.
7725/**
7726 * This class implements the retry policy for session consistent reads.
7727 * @hidden
7728 */
7729class SessionRetryPolicy {
7730 /**
7731 * @param globalEndpointManager - The GlobalEndpointManager instance.
7732 */
7733 constructor(globalEndpointManager, resourceType, operationType, connectionPolicy) {
7734 this.globalEndpointManager = globalEndpointManager;
7735 this.resourceType = resourceType;
7736 this.operationType = operationType;
7737 this.connectionPolicy = connectionPolicy;
7738 /** Current retry attempt count. */
7739 this.currentRetryAttemptCount = 0;
7740 /** Retry interval in milliseconds. */
7741 this.retryAfterInMs = 0;
7742 }
7743 /**
7744 * Determines whether the request should be retried or not.
7745 * @param err - Error returned by the request.
7746 * @param callback - The callback function which takes bool argument which specifies whether the request
7747 * will be retried or not.
7748 */
7749 async shouldRetry(err, retryContext) {
7750 if (!err) {
7751 return false;
7752 }
7753 if (!retryContext) {
7754 return false;
7755 }
7756 if (!this.connectionPolicy.enableEndpointDiscovery) {
7757 return false;
7758 }
7759 if (this.globalEndpointManager.canUseMultipleWriteLocations(this.resourceType, this.operationType)) {
7760 // If we can write to multiple locations, we should against every write endpoint until we succeed
7761 const endpoints = isReadRequest(this.operationType)
7762 ? await this.globalEndpointManager.getReadEndpoints()
7763 : await this.globalEndpointManager.getWriteEndpoints();
7764 if (this.currentRetryAttemptCount > endpoints.length) {
7765 return false;
7766 }
7767 else {
7768 this.currentRetryAttemptCount++;
7769 retryContext.retryCount++;
7770 retryContext.retryRequestOnPreferredLocations = this.currentRetryAttemptCount > 1;
7771 retryContext.clearSessionTokenNotAvailable =
7772 this.currentRetryAttemptCount === endpoints.length;
7773 return true;
7774 }
7775 }
7776 else {
7777 if (this.currentRetryAttemptCount > 1) {
7778 return false;
7779 }
7780 else {
7781 this.currentRetryAttemptCount++;
7782 retryContext.retryCount++;
7783 retryContext.retryRequestOnPreferredLocations = false; // Forces all operations to primary write endpoint
7784 retryContext.clearSessionTokenNotAvailable = true;
7785 return true;
7786 }
7787 }
7788 }
7789}
7790
7791// Copyright (c) Microsoft Corporation.
7792/**
7793 * @hidden
7794 */
7795async function execute({ retryContext = { retryCount: 0 }, retryPolicies, requestContext, executeRequest, }) {
7796 // TODO: any response
7797 if (!retryPolicies) {
7798 retryPolicies = {
7799 endpointDiscoveryRetryPolicy: new EndpointDiscoveryRetryPolicy(requestContext.globalEndpointManager, requestContext.operationType),
7800 resourceThrottleRetryPolicy: new ResourceThrottleRetryPolicy(requestContext.connectionPolicy.retryOptions.maxRetryAttemptCount, requestContext.connectionPolicy.retryOptions.fixedRetryIntervalInMilliseconds, requestContext.connectionPolicy.retryOptions.maxWaitTimeInSeconds),
7801 sessionReadRetryPolicy: new SessionRetryPolicy(requestContext.globalEndpointManager, requestContext.resourceType, requestContext.operationType, requestContext.connectionPolicy),
7802 defaultRetryPolicy: new DefaultRetryPolicy(requestContext.operationType),
7803 };
7804 }
7805 if (retryContext && retryContext.clearSessionTokenNotAvailable) {
7806 requestContext.client.clearSessionToken(requestContext.path);
7807 delete requestContext.headers["x-ms-session-token"];
7808 }
7809 requestContext.endpoint = await requestContext.globalEndpointManager.resolveServiceEndpoint(requestContext.resourceType, requestContext.operationType);
7810 try {
7811 const response = await executeRequest(requestContext);
7812 response.headers[Constants.ThrottleRetryCount] =
7813 retryPolicies.resourceThrottleRetryPolicy.currentRetryAttemptCount;
7814 response.headers[Constants.ThrottleRetryWaitTimeInMs] =
7815 retryPolicies.resourceThrottleRetryPolicy.cummulativeWaitTimeinMs;
7816 return response;
7817 }
7818 catch (err) {
7819 // TODO: any error
7820 let retryPolicy = null;
7821 const headers = err.headers || {};
7822 if (err.code === StatusCodes.ENOTFOUND ||
7823 err.code === "REQUEST_SEND_ERROR" ||
7824 (err.code === StatusCodes.Forbidden &&
7825 (err.substatus === SubStatusCodes.DatabaseAccountNotFound ||
7826 err.substatus === SubStatusCodes.WriteForbidden))) {
7827 retryPolicy = retryPolicies.endpointDiscoveryRetryPolicy;
7828 }
7829 else if (err.code === StatusCodes.TooManyRequests) {
7830 retryPolicy = retryPolicies.resourceThrottleRetryPolicy;
7831 }
7832 else if (err.code === StatusCodes.NotFound &&
7833 err.substatus === SubStatusCodes.ReadSessionNotAvailable) {
7834 retryPolicy = retryPolicies.sessionReadRetryPolicy;
7835 }
7836 else {
7837 retryPolicy = retryPolicies.defaultRetryPolicy;
7838 }
7839 const results = await retryPolicy.shouldRetry(err, retryContext, requestContext.endpoint);
7840 if (!results) {
7841 headers[Constants.ThrottleRetryCount] =
7842 retryPolicies.resourceThrottleRetryPolicy.currentRetryAttemptCount;
7843 headers[Constants.ThrottleRetryWaitTimeInMs] =
7844 retryPolicies.resourceThrottleRetryPolicy.cummulativeWaitTimeinMs;
7845 err.headers = Object.assign(Object.assign({}, err.headers), headers);
7846 throw err;
7847 }
7848 else {
7849 requestContext.retryCount++;
7850 const newUrl = results[1]; // TODO: any hack
7851 if (newUrl !== undefined) {
7852 requestContext.endpoint = newUrl;
7853 }
7854 await sleep(retryPolicy.retryAfterInMs);
7855 return execute({
7856 executeRequest,
7857 requestContext,
7858 retryContext,
7859 retryPolicies,
7860 });
7861 }
7862 }
7863}
7864
7865/**
7866 * @hidden
7867 */
7868let defaultHttpsAgent;
7869const https = require("https"); // eslint-disable-line @typescript-eslint/no-require-imports
7870const tls = require("tls"); // eslint-disable-line @typescript-eslint/no-require-imports
7871// minVersion only available in Node 10+
7872if (tls.DEFAULT_MIN_VERSION) {
7873 defaultHttpsAgent = new https.Agent({
7874 keepAlive: true,
7875 minVersion: "TLSv1.2",
7876 });
7877}
7878else {
7879 // Remove when Node 8 support has been dropped
7880 defaultHttpsAgent = new https.Agent({
7881 keepAlive: true,
7882 secureProtocol: "TLSv1_2_method",
7883 });
7884}
7885const http = require("http"); // eslint-disable-line @typescript-eslint/no-require-imports
7886/**
7887 * @internal
7888 */
7889const defaultHttpAgent = new http.Agent({
7890 keepAlive: true,
7891});
7892
7893// Copyright (c) Microsoft Corporation.
7894let cachedHttpClient;
7895function getCachedDefaultHttpClient() {
7896 if (!cachedHttpClient) {
7897 cachedHttpClient = coreRestPipeline.createDefaultHttpClient();
7898 }
7899 return cachedHttpClient;
7900}
7901
7902// Copyright (c) Microsoft Corporation.
7903const logger$1 = logger$4.createClientLogger("RequestHandler");
7904async function executeRequest(requestContext) {
7905 return executePlugins(requestContext, httpRequest, exports.PluginOn.request);
7906}
7907/**
7908 * @hidden
7909 */
7910async function httpRequest(requestContext) {
7911 const controller = new nodeAbortController.AbortController();
7912 const signal = controller.signal;
7913 // Wrap users passed abort events and call our own internal abort()
7914 const userSignal = requestContext.options && requestContext.options.abortSignal;
7915 if (userSignal) {
7916 if (userSignal.aborted) {
7917 controller.abort();
7918 }
7919 else {
7920 userSignal.addEventListener("abort", () => {
7921 controller.abort();
7922 });
7923 }
7924 }
7925 const timeout = setTimeout(() => {
7926 controller.abort();
7927 }, requestContext.connectionPolicy.requestTimeout);
7928 let response;
7929 if (requestContext.body) {
7930 requestContext.body = bodyFromData(requestContext.body);
7931 }
7932 const httpsClient = getCachedDefaultHttpClient();
7933 const url = trimSlashes(requestContext.endpoint) + requestContext.path;
7934 const reqHeaders = coreRestPipeline.createHttpHeaders(requestContext.headers);
7935 const pipelineRequest = coreRestPipeline.createPipelineRequest({
7936 url,
7937 headers: reqHeaders,
7938 method: requestContext.method,
7939 abortSignal: signal,
7940 body: requestContext.body,
7941 });
7942 if (requestContext.requestAgent) {
7943 pipelineRequest.agent = requestContext.requestAgent;
7944 }
7945 else {
7946 const parsedUrl = new URL(url);
7947 pipelineRequest.agent = parsedUrl.protocol === "http" ? defaultHttpAgent : defaultHttpsAgent;
7948 }
7949 try {
7950 if (requestContext.pipeline) {
7951 response = await requestContext.pipeline.sendRequest(httpsClient, pipelineRequest);
7952 }
7953 else {
7954 response = await httpsClient.sendRequest(pipelineRequest);
7955 }
7956 }
7957 catch (error) {
7958 if (error.name === "AbortError") {
7959 // If the user passed signal caused the abort, cancel the timeout and rethrow the error
7960 if (userSignal && userSignal.aborted === true) {
7961 clearTimeout(timeout);
7962 throw error;
7963 }
7964 // If the user didn't cancel, it must be an abort we called due to timeout
7965 throw new TimeoutError(`Timeout Error! Request took more than ${requestContext.connectionPolicy.requestTimeout} ms`);
7966 }
7967 throw error;
7968 }
7969 clearTimeout(timeout);
7970 const result = response.status === 204 || response.status === 304 || response.bodyAsText === ""
7971 ? null
7972 : JSON.parse(response.bodyAsText);
7973 const headers = response.headers.toJSON();
7974 const substatus = headers[Constants.HttpHeaders.SubStatus]
7975 ? parseInt(headers[Constants.HttpHeaders.SubStatus], 10)
7976 : undefined;
7977 if (response.status >= 400) {
7978 const errorResponse = new ErrorResponse(result.message);
7979 logger$1.warning(response.status +
7980 " " +
7981 requestContext.endpoint +
7982 " " +
7983 requestContext.path +
7984 " " +
7985 result.message);
7986 errorResponse.code = response.status;
7987 errorResponse.body = result;
7988 errorResponse.headers = headers;
7989 if (Constants.HttpHeaders.ActivityId in headers) {
7990 errorResponse.activityId = headers[Constants.HttpHeaders.ActivityId];
7991 }
7992 if (Constants.HttpHeaders.SubStatus in headers) {
7993 errorResponse.substatus = substatus;
7994 }
7995 if (Constants.HttpHeaders.RetryAfterInMs in headers) {
7996 errorResponse.retryAfterInMs = parseInt(headers[Constants.HttpHeaders.RetryAfterInMs], 10);
7997 Object.defineProperty(errorResponse, "retryAfterInMilliseconds", {
7998 get: () => {
7999 return errorResponse.retryAfterInMs;
8000 },
8001 });
8002 }
8003 throw errorResponse;
8004 }
8005 return {
8006 headers,
8007 result,
8008 code: response.status,
8009 substatus,
8010 };
8011}
8012/**
8013 * @hidden
8014 */
8015async function request(requestContext) {
8016 if (requestContext.body) {
8017 requestContext.body = bodyFromData(requestContext.body);
8018 if (!requestContext.body) {
8019 throw new Error("parameter data must be a javascript object, string, or Buffer");
8020 }
8021 }
8022 return execute({
8023 requestContext,
8024 executeRequest,
8025 });
8026}
8027const RequestHandler = {
8028 request,
8029};
8030
8031// Copyright (c) Microsoft Corporation.
8032// Licensed under the MIT license.
8033function atob(str) {
8034 return Buffer.from(str, "base64").toString("binary");
8035}
8036
8037// Copyright (c) Microsoft Corporation.
8038// Licensed under the MIT license.
8039/**
8040 * Models vector clock bases session token. Session token has the following format:
8041 * `{Version}#{GlobalLSN}#{RegionId1}={LocalLsn1}#{RegionId2}={LocalLsn2}....#{RegionIdN}={LocalLsnN}`
8042 * 'Version' captures the configuration number of the partition which returned this session token.
8043 * 'Version' is incremented everytime topology of the partition is updated (say due to Add/Remove/Failover).
8044 *
8045 * The choice of separators '#' and '=' is important. Separators ';' and ',' are used to delimit
8046 * per-partitionKeyRange session token
8047 * @hidden
8048 *
8049 */
8050class VectorSessionToken {
8051 constructor(version, globalLsn, localLsnByregion, sessionToken) {
8052 this.version = version;
8053 this.globalLsn = globalLsn;
8054 this.localLsnByregion = localLsnByregion;
8055 this.sessionToken = sessionToken;
8056 if (!this.sessionToken) {
8057 const regionAndLocalLsn = [];
8058 for (const [key, value] of this.localLsnByregion.entries()) {
8059 regionAndLocalLsn.push(`${key}${VectorSessionToken.REGION_PROGRESS_SEPARATOR}${value}`);
8060 }
8061 const regionProgress = regionAndLocalLsn.join(VectorSessionToken.SEGMENT_SEPARATOR);
8062 if (regionProgress === "") {
8063 this.sessionToken = `${this.version}${VectorSessionToken.SEGMENT_SEPARATOR}${this.globalLsn}`;
8064 }
8065 else {
8066 this.sessionToken = `${this.version}${VectorSessionToken.SEGMENT_SEPARATOR}${this.globalLsn}${VectorSessionToken.SEGMENT_SEPARATOR}${regionProgress}`;
8067 }
8068 }
8069 }
8070 static create(sessionToken) {
8071 const [versionStr, globalLsnStr, ...regionSegments] = sessionToken.split(VectorSessionToken.SEGMENT_SEPARATOR);
8072 const version = parseInt(versionStr, 10);
8073 const globalLsn = parseFloat(globalLsnStr);
8074 if (typeof version !== "number" || typeof globalLsn !== "number") {
8075 return null;
8076 }
8077 const lsnByRegion = new Map();
8078 for (const regionSegment of regionSegments) {
8079 const [regionIdStr, localLsnStr] = regionSegment.split(VectorSessionToken.REGION_PROGRESS_SEPARATOR);
8080 if (!regionIdStr || !localLsnStr) {
8081 return null;
8082 }
8083 const regionId = parseInt(regionIdStr, 10);
8084 let localLsn;
8085 try {
8086 localLsn = localLsnStr;
8087 }
8088 catch (err) {
8089 // TODO: log error
8090 return null;
8091 }
8092 if (typeof regionId !== "number") {
8093 return null;
8094 }
8095 lsnByRegion.set(regionId, localLsn);
8096 }
8097 return new VectorSessionToken(version, globalLsn, lsnByRegion, sessionToken);
8098 }
8099 equals(other) {
8100 return !other
8101 ? false
8102 : this.version === other.version &&
8103 this.globalLsn === other.globalLsn &&
8104 this.areRegionProgressEqual(other.localLsnByregion);
8105 }
8106 merge(other) {
8107 if (other == null) {
8108 throw new Error("other (Vector Session Token) must not be null");
8109 }
8110 if (this.version === other.version &&
8111 this.localLsnByregion.size !== other.localLsnByregion.size) {
8112 throw new Error(`Compared session tokens ${this.sessionToken} and ${other.sessionToken} have unexpected regions`);
8113 }
8114 const [higherVersionSessionToken, lowerVersionSessionToken] = this.version < other.version ? [other, this] : [this, other];
8115 const highestLocalLsnByRegion = new Map();
8116 for (const [regionId, highLocalLsn] of higherVersionSessionToken.localLsnByregion.entries()) {
8117 const lowLocalLsn = lowerVersionSessionToken.localLsnByregion.get(regionId);
8118 if (lowLocalLsn) {
8119 highestLocalLsnByRegion.set(regionId, max(highLocalLsn, lowLocalLsn));
8120 }
8121 else if (this.version === other.version) {
8122 throw new Error(`Compared session tokens have unexpected regions. Session 1: ${this.sessionToken} - Session 2: ${this.sessionToken}`);
8123 }
8124 else {
8125 highestLocalLsnByRegion.set(regionId, highLocalLsn);
8126 }
8127 }
8128 return new VectorSessionToken(Math.max(this.version, other.version), Math.max(this.globalLsn, other.globalLsn), highestLocalLsnByRegion);
8129 }
8130 toString() {
8131 return this.sessionToken;
8132 }
8133 areRegionProgressEqual(other) {
8134 if (this.localLsnByregion.size !== other.size) {
8135 return false;
8136 }
8137 for (const [regionId, localLsn] of this.localLsnByregion.entries()) {
8138 const otherLocalLsn = other.get(regionId);
8139 if (localLsn !== otherLocalLsn) {
8140 return false;
8141 }
8142 }
8143 return true;
8144 }
8145}
8146VectorSessionToken.SEGMENT_SEPARATOR = "#";
8147VectorSessionToken.REGION_PROGRESS_SEPARATOR = "=";
8148/**
8149 * @hidden
8150 */
8151function max(int1, int2) {
8152 // NOTE: This only works for positive numbers
8153 if (int1.length === int2.length) {
8154 return int1 > int2 ? int1 : int2;
8155 }
8156 else if (int1.length > int2.length) {
8157 return int1;
8158 }
8159 else {
8160 return int2;
8161 }
8162}
8163
8164// Copyright (c) Microsoft Corporation.
8165/** @hidden */
8166class SessionContainer {
8167 constructor(collectionNameToCollectionResourceId = new Map(), collectionResourceIdToSessionTokens = new Map()) {
8168 this.collectionNameToCollectionResourceId = collectionNameToCollectionResourceId;
8169 this.collectionResourceIdToSessionTokens = collectionResourceIdToSessionTokens;
8170 }
8171 get(request) {
8172 if (!request) {
8173 throw new Error("request cannot be null");
8174 }
8175 const collectionName = getContainerLink(trimSlashes(request.resourceAddress));
8176 const rangeIdToTokenMap = this.getPartitionKeyRangeIdToTokenMap(collectionName);
8177 return SessionContainer.getCombinedSessionTokenString(rangeIdToTokenMap);
8178 }
8179 remove(request) {
8180 let collectionResourceId;
8181 const resourceAddress = trimSlashes(request.resourceAddress);
8182 const collectionName = getContainerLink(resourceAddress);
8183 if (collectionName) {
8184 collectionResourceId = this.collectionNameToCollectionResourceId.get(collectionName);
8185 this.collectionNameToCollectionResourceId.delete(collectionName);
8186 }
8187 if (collectionResourceId !== undefined) {
8188 this.collectionResourceIdToSessionTokens.delete(collectionResourceId);
8189 }
8190 }
8191 set(request, resHeaders) {
8192 // TODO: we check the master logic a few different places. Might not need it.
8193 if (!resHeaders ||
8194 SessionContainer.isReadingFromMaster(request.resourceType, request.operationType)) {
8195 return;
8196 }
8197 const sessionTokenString = resHeaders[Constants.HttpHeaders.SessionToken];
8198 if (!sessionTokenString) {
8199 return;
8200 }
8201 const containerName = this.getContainerName(request, resHeaders);
8202 const ownerId = !request.isNameBased
8203 ? request.resourceId
8204 : resHeaders[Constants.HttpHeaders.OwnerId] || request.resourceId;
8205 if (!ownerId) {
8206 return;
8207 }
8208 if (containerName && this.validateOwnerID(ownerId)) {
8209 if (!this.collectionResourceIdToSessionTokens.has(ownerId)) {
8210 this.collectionResourceIdToSessionTokens.set(ownerId, new Map());
8211 }
8212 if (!this.collectionNameToCollectionResourceId.has(containerName)) {
8213 this.collectionNameToCollectionResourceId.set(containerName, ownerId);
8214 }
8215 const containerSessionContainer = this.collectionResourceIdToSessionTokens.get(ownerId);
8216 SessionContainer.compareAndSetToken(sessionTokenString, containerSessionContainer);
8217 }
8218 }
8219 validateOwnerID(ownerId) {
8220 // If ownerId contains exactly 8 bytes it represents a unique database+collection identifier. Otherwise it represents another resource
8221 // The first 4 bytes are the database. The last 4 bytes are the collection.
8222 // Cosmos rids potentially contain "-" which is an invalid character in the browser atob implementation
8223 // See https://en.wikipedia.org/wiki/Base64#Filenames
8224 return atob(ownerId.replace(/-/g, "/")).length === 8;
8225 }
8226 getPartitionKeyRangeIdToTokenMap(collectionName) {
8227 let rangeIdToTokenMap = null;
8228 if (collectionName && this.collectionNameToCollectionResourceId.has(collectionName)) {
8229 rangeIdToTokenMap = this.collectionResourceIdToSessionTokens.get(this.collectionNameToCollectionResourceId.get(collectionName));
8230 }
8231 return rangeIdToTokenMap;
8232 }
8233 static getCombinedSessionTokenString(tokens) {
8234 if (!tokens || tokens.size === 0) {
8235 return SessionContainer.EMPTY_SESSION_TOKEN;
8236 }
8237 let result = "";
8238 for (const [range, token] of tokens.entries()) {
8239 result +=
8240 range +
8241 SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER +
8242 token.toString() +
8243 SessionContainer.SESSION_TOKEN_SEPARATOR;
8244 }
8245 return result.slice(0, -1);
8246 }
8247 static compareAndSetToken(newTokenString, containerSessionTokens) {
8248 if (!newTokenString) {
8249 return;
8250 }
8251 const partitionsParts = newTokenString.split(SessionContainer.SESSION_TOKEN_SEPARATOR);
8252 for (const partitionPart of partitionsParts) {
8253 const newTokenParts = partitionPart.split(SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER);
8254 if (newTokenParts.length !== 2) {
8255 return;
8256 }
8257 const range = newTokenParts[0];
8258 const newToken = VectorSessionToken.create(newTokenParts[1]);
8259 const tokenForRange = !containerSessionTokens.get(range)
8260 ? newToken
8261 : containerSessionTokens.get(range).merge(newToken);
8262 containerSessionTokens.set(range, tokenForRange);
8263 }
8264 }
8265 // TODO: have a assert if the type doesn't mastch known types
8266 static isReadingFromMaster(resourceType, operationType) {
8267 if (resourceType === Constants.Path.OffersPathSegment ||
8268 resourceType === Constants.Path.DatabasesPathSegment ||
8269 resourceType === Constants.Path.UsersPathSegment ||
8270 resourceType === Constants.Path.PermissionsPathSegment ||
8271 resourceType === Constants.Path.TopologyPathSegment ||
8272 resourceType === Constants.Path.DatabaseAccountPathSegment ||
8273 resourceType === Constants.Path.PartitionKeyRangesPathSegment ||
8274 (resourceType === Constants.Path.CollectionsPathSegment &&
8275 operationType === exports.OperationType.Query)) {
8276 return true;
8277 }
8278 return false;
8279 }
8280 getContainerName(request, headers) {
8281 let ownerFullName = headers[Constants.HttpHeaders.OwnerFullName];
8282 if (!ownerFullName) {
8283 ownerFullName = trimSlashes(request.resourceAddress);
8284 }
8285 return getContainerLink(ownerFullName);
8286 }
8287}
8288SessionContainer.EMPTY_SESSION_TOKEN = "";
8289SessionContainer.SESSION_TOKEN_SEPARATOR = ",";
8290SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER = ":";
8291
8292// Copyright (c) Microsoft Corporation.
8293// Licensed under the MIT license.
8294function checkURL(testString) {
8295 return new URL(testString);
8296}
8297function sanitizeEndpoint(url) {
8298 return new URL(url).href.replace(/\/$/, "");
8299}
8300
8301// Copyright (c) Microsoft Corporation.
8302const uuid = uuid$3.v4;
8303const logger = logger$4.createClientLogger("ClientContext");
8304const QueryJsonContentType = "application/query+json";
8305/**
8306 * @hidden
8307 * @hidden
8308 */
8309class ClientContext {
8310 constructor(cosmosClientOptions, globalEndpointManager) {
8311 this.cosmosClientOptions = cosmosClientOptions;
8312 this.globalEndpointManager = globalEndpointManager;
8313 this.connectionPolicy = cosmosClientOptions.connectionPolicy;
8314 this.sessionContainer = new SessionContainer();
8315 this.partitionKeyDefinitionCache = {};
8316 this.pipeline = null;
8317 if (cosmosClientOptions.aadCredentials) {
8318 this.pipeline = coreRestPipeline.createEmptyPipeline();
8319 const hrefEndpoint = sanitizeEndpoint(cosmosClientOptions.endpoint);
8320 const scope = `${hrefEndpoint}/.default`;
8321 this.pipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({
8322 credential: cosmosClientOptions.aadCredentials,
8323 scopes: scope,
8324 challengeCallbacks: {
8325 async authorizeRequest({ request, getAccessToken }) {
8326 const tokenResponse = await getAccessToken([scope], {});
8327 const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
8328 const authorizationToken = `${AUTH_PREFIX}${tokenResponse.token}`;
8329 request.headers.set("Authorization", authorizationToken);
8330 },
8331 },
8332 }));
8333 }
8334 }
8335 /** @hidden */
8336 async read({ path, resourceType, resourceId, options = {}, partitionKey, }) {
8337 try {
8338 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.get, path, operationType: exports.OperationType.Read, resourceId,
8339 options,
8340 resourceType,
8341 partitionKey });
8342 request.headers = await this.buildHeaders(request);
8343 this.applySessionToken(request);
8344 // read will use ReadEndpoint since it uses GET operation
8345 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8346 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8347 this.captureSessionToken(undefined, path, exports.OperationType.Read, response.headers);
8348 return response;
8349 }
8350 catch (err) {
8351 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8352 throw err;
8353 }
8354 }
8355 async queryFeed({ path, resourceType, resourceId, resultFn, query, options, partitionKeyRangeId, partitionKey, }) {
8356 // Query operations will use ReadEndpoint even though it uses
8357 // GET(for queryFeed) and POST(for regular query operations)
8358 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.get, path, operationType: exports.OperationType.Query, partitionKeyRangeId,
8359 resourceId,
8360 resourceType,
8361 options, body: query, partitionKey });
8362 const requestId = uuid();
8363 if (query !== undefined) {
8364 request.method = exports.HTTPMethod.post;
8365 }
8366 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8367 request.headers = await this.buildHeaders(request);
8368 if (query !== undefined) {
8369 request.headers[Constants.HttpHeaders.IsQuery] = "true";
8370 request.headers[Constants.HttpHeaders.ContentType] = QueryJsonContentType;
8371 if (typeof query === "string") {
8372 request.body = { query }; // Converts query text to query object.
8373 }
8374 }
8375 this.applySessionToken(request);
8376 logger.info("query " +
8377 requestId +
8378 " started" +
8379 (request.partitionKeyRangeId ? " pkrid: " + request.partitionKeyRangeId : ""));
8380 logger.verbose(request);
8381 const start = Date.now();
8382 const response = await RequestHandler.request(request);
8383 logger.info("query " + requestId + " finished - " + (Date.now() - start) + "ms");
8384 this.captureSessionToken(undefined, path, exports.OperationType.Query, response.headers);
8385 return this.processQueryFeedResponse(response, !!query, resultFn);
8386 }
8387 async getQueryPlan(path, resourceType, resourceId, query, options = {}) {
8388 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, path, operationType: exports.OperationType.Read, resourceId,
8389 resourceType,
8390 options, body: query });
8391 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8392 request.headers = await this.buildHeaders(request);
8393 request.headers[Constants.HttpHeaders.IsQueryPlan] = "True";
8394 request.headers[Constants.HttpHeaders.QueryVersion] = "1.4";
8395 request.headers[Constants.HttpHeaders.SupportedQueryFeatures] =
8396 "NonValueAggregate, Aggregate, Distinct, MultipleOrderBy, OffsetAndLimit, OrderBy, Top, CompositeAggregate, GroupBy, MultipleAggregates";
8397 request.headers[Constants.HttpHeaders.ContentType] = QueryJsonContentType;
8398 if (typeof query === "string") {
8399 request.body = { query }; // Converts query text to query object.
8400 }
8401 this.applySessionToken(request);
8402 const response = await RequestHandler.request(request);
8403 this.captureSessionToken(undefined, path, exports.OperationType.Query, response.headers);
8404 return response;
8405 }
8406 queryPartitionKeyRanges(collectionLink, query, options) {
8407 const path = getPathFromLink(collectionLink, exports.ResourceType.pkranges);
8408 const id = getIdFromLink(collectionLink);
8409 const cb = (innerOptions) => {
8410 return this.queryFeed({
8411 path,
8412 resourceType: exports.ResourceType.pkranges,
8413 resourceId: id,
8414 resultFn: (result) => result.PartitionKeyRanges,
8415 query,
8416 options: innerOptions,
8417 });
8418 };
8419 return new QueryIterator(this, query, options, cb);
8420 }
8421 async delete({ path, resourceType, resourceId, options = {}, partitionKey, method = exports.HTTPMethod.delete, }) {
8422 try {
8423 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: method, operationType: exports.OperationType.Delete, path,
8424 resourceType,
8425 options,
8426 resourceId,
8427 partitionKey });
8428 request.headers = await this.buildHeaders(request);
8429 this.applySessionToken(request);
8430 // deleteResource will use WriteEndpoint since it uses DELETE operation
8431 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8432 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8433 if (parseLink(path).type !== "colls") {
8434 this.captureSessionToken(undefined, path, exports.OperationType.Delete, response.headers);
8435 }
8436 else {
8437 this.clearSessionToken(path);
8438 }
8439 return response;
8440 }
8441 catch (err) {
8442 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8443 throw err;
8444 }
8445 }
8446 async patch({ body, path, resourceType, resourceId, options = {}, partitionKey, }) {
8447 try {
8448 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.patch, operationType: exports.OperationType.Patch, path,
8449 resourceType,
8450 body,
8451 resourceId,
8452 options,
8453 partitionKey });
8454 request.headers = await this.buildHeaders(request);
8455 this.applySessionToken(request);
8456 // patch will use WriteEndpoint
8457 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8458 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8459 this.captureSessionToken(undefined, path, exports.OperationType.Patch, response.headers);
8460 return response;
8461 }
8462 catch (err) {
8463 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8464 throw err;
8465 }
8466 }
8467 async create({ body, path, resourceType, resourceId, options = {}, partitionKey, }) {
8468 try {
8469 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Create, path,
8470 resourceType,
8471 resourceId,
8472 body,
8473 options,
8474 partitionKey });
8475 request.headers = await this.buildHeaders(request);
8476 // create will use WriteEndpoint since it uses POST operation
8477 this.applySessionToken(request);
8478 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8479 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8480 this.captureSessionToken(undefined, path, exports.OperationType.Create, response.headers);
8481 return response;
8482 }
8483 catch (err) {
8484 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8485 throw err;
8486 }
8487 }
8488 processQueryFeedResponse(res, isQuery, resultFn) {
8489 if (isQuery) {
8490 return { result: resultFn(res.result), headers: res.headers, code: res.code };
8491 }
8492 else {
8493 const newResult = resultFn(res.result).map((body) => body);
8494 return { result: newResult, headers: res.headers, code: res.code };
8495 }
8496 }
8497 applySessionToken(requestContext) {
8498 const request = this.getSessionParams(requestContext.path);
8499 if (requestContext.headers && requestContext.headers[Constants.HttpHeaders.SessionToken]) {
8500 return;
8501 }
8502 const sessionConsistency = requestContext.headers[Constants.HttpHeaders.ConsistencyLevel];
8503 if (!sessionConsistency) {
8504 return;
8505 }
8506 if (sessionConsistency !== exports.ConsistencyLevel.Session) {
8507 return;
8508 }
8509 if (request.resourceAddress) {
8510 const sessionToken = this.sessionContainer.get(request);
8511 if (sessionToken) {
8512 requestContext.headers[Constants.HttpHeaders.SessionToken] = sessionToken;
8513 }
8514 }
8515 }
8516 async replace({ body, path, resourceType, resourceId, options = {}, partitionKey, }) {
8517 try {
8518 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.put, operationType: exports.OperationType.Replace, path,
8519 resourceType,
8520 body,
8521 resourceId,
8522 options,
8523 partitionKey });
8524 request.headers = await this.buildHeaders(request);
8525 this.applySessionToken(request);
8526 // replace will use WriteEndpoint since it uses PUT operation
8527 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8528 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8529 this.captureSessionToken(undefined, path, exports.OperationType.Replace, response.headers);
8530 return response;
8531 }
8532 catch (err) {
8533 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8534 throw err;
8535 }
8536 }
8537 async upsert({ body, path, resourceType, resourceId, options = {}, partitionKey, }) {
8538 try {
8539 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Upsert, path,
8540 resourceType,
8541 body,
8542 resourceId,
8543 options,
8544 partitionKey });
8545 request.headers = await this.buildHeaders(request);
8546 request.headers[Constants.HttpHeaders.IsUpsert] = true;
8547 this.applySessionToken(request);
8548 // upsert will use WriteEndpoint since it uses POST operation
8549 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8550 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8551 this.captureSessionToken(undefined, path, exports.OperationType.Upsert, response.headers);
8552 return response;
8553 }
8554 catch (err) {
8555 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8556 throw err;
8557 }
8558 }
8559 async execute({ sprocLink, params, options = {}, partitionKey, }) {
8560 // Accept a single parameter or an array of parameters.
8561 // Didn't add type annotation for this because we should legacy this behavior
8562 if (params !== null && params !== undefined && !Array.isArray(params)) {
8563 params = [params];
8564 }
8565 const path = getPathFromLink(sprocLink);
8566 const id = getIdFromLink(sprocLink);
8567 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Execute, path, resourceType: exports.ResourceType.sproc, options, resourceId: id, body: params, partitionKey });
8568 request.headers = await this.buildHeaders(request);
8569 // executeStoredProcedure will use WriteEndpoint since it uses POST operation
8570 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8571 return executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8572 }
8573 /**
8574 * Gets the Database account information.
8575 * @param options - `urlConnection` in the options is the endpoint url whose database account needs to be retrieved.
8576 * If not present, current client's url will be used.
8577 */
8578 async getDatabaseAccount(options = {}) {
8579 const endpoint = options.urlConnection || this.cosmosClientOptions.endpoint;
8580 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { endpoint, method: exports.HTTPMethod.get, operationType: exports.OperationType.Read, path: "", resourceType: exports.ResourceType.none, options });
8581 request.headers = await this.buildHeaders(request);
8582 // await options.beforeOperation({ endpoint, request, headers: requestHeaders });
8583 const { result, headers } = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8584 const databaseAccount = new DatabaseAccount(result, headers);
8585 return { result: databaseAccount, headers };
8586 }
8587 getWriteEndpoint() {
8588 return this.globalEndpointManager.getWriteEndpoint();
8589 }
8590 getReadEndpoint() {
8591 return this.globalEndpointManager.getReadEndpoint();
8592 }
8593 getWriteEndpoints() {
8594 return this.globalEndpointManager.getWriteEndpoints();
8595 }
8596 getReadEndpoints() {
8597 return this.globalEndpointManager.getReadEndpoints();
8598 }
8599 async batch({ body, path, partitionKey, resourceId, options = {}, }) {
8600 try {
8601 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Batch, path,
8602 body, resourceType: exports.ResourceType.item, resourceId,
8603 options,
8604 partitionKey });
8605 request.headers = await this.buildHeaders(request);
8606 request.headers[Constants.HttpHeaders.IsBatchRequest] = true;
8607 request.headers[Constants.HttpHeaders.IsBatchAtomic] = true;
8608 this.applySessionToken(request);
8609 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8610 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8611 this.captureSessionToken(undefined, path, exports.OperationType.Batch, response.headers);
8612 return response;
8613 }
8614 catch (err) {
8615 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8616 throw err;
8617 }
8618 }
8619 async bulk({ body, path, partitionKeyRangeId, resourceId, bulkOptions = {}, options = {}, }) {
8620 try {
8621 const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Batch, path,
8622 body, resourceType: exports.ResourceType.item, resourceId,
8623 options });
8624 request.headers = await this.buildHeaders(request);
8625 request.headers[Constants.HttpHeaders.IsBatchRequest] = true;
8626 request.headers[Constants.HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId;
8627 request.headers[Constants.HttpHeaders.IsBatchAtomic] = false;
8628 request.headers[Constants.HttpHeaders.BatchContinueOnError] =
8629 bulkOptions.continueOnError || false;
8630 this.applySessionToken(request);
8631 request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request.resourceType, request.operationType);
8632 const response = await executePlugins(request, RequestHandler.request, exports.PluginOn.operation);
8633 this.captureSessionToken(undefined, path, exports.OperationType.Batch, response.headers);
8634 return response;
8635 }
8636 catch (err) {
8637 this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers);
8638 throw err;
8639 }
8640 }
8641 captureSessionToken(err, path, operationType, resHeaders) {
8642 const request = this.getSessionParams(path);
8643 request.operationType = operationType;
8644 if (!err ||
8645 (!this.isMasterResource(request.resourceType) &&
8646 (err.code === StatusCodes.PreconditionFailed ||
8647 err.code === StatusCodes.Conflict ||
8648 (err.code === StatusCodes.NotFound &&
8649 err.substatus !== SubStatusCodes.ReadSessionNotAvailable)))) {
8650 this.sessionContainer.set(request, resHeaders);
8651 }
8652 }
8653 clearSessionToken(path) {
8654 const request = this.getSessionParams(path);
8655 this.sessionContainer.remove(request);
8656 }
8657 getSessionParams(resourceLink) {
8658 const resourceId = null;
8659 let resourceAddress = null;
8660 const parserOutput = parseLink(resourceLink);
8661 resourceAddress = parserOutput.objectBody.self;
8662 const resourceType = parserOutput.type;
8663 return {
8664 resourceId,
8665 resourceAddress,
8666 resourceType,
8667 isNameBased: true,
8668 };
8669 }
8670 isMasterResource(resourceType) {
8671 if (resourceType === Constants.Path.OffersPathSegment ||
8672 resourceType === Constants.Path.DatabasesPathSegment ||
8673 resourceType === Constants.Path.UsersPathSegment ||
8674 resourceType === Constants.Path.PermissionsPathSegment ||
8675 resourceType === Constants.Path.TopologyPathSegment ||
8676 resourceType === Constants.Path.DatabaseAccountPathSegment ||
8677 resourceType === Constants.Path.PartitionKeyRangesPathSegment ||
8678 resourceType === Constants.Path.CollectionsPathSegment) {
8679 return true;
8680 }
8681 return false;
8682 }
8683 buildHeaders(requestContext) {
8684 return getHeaders({
8685 clientOptions: this.cosmosClientOptions,
8686 defaultHeaders: Object.assign(Object.assign({}, this.cosmosClientOptions.defaultHeaders), requestContext.options.initialHeaders),
8687 verb: requestContext.method,
8688 path: requestContext.path,
8689 resourceId: requestContext.resourceId,
8690 resourceType: requestContext.resourceType,
8691 options: requestContext.options,
8692 partitionKeyRangeId: requestContext.partitionKeyRangeId,
8693 useMultipleWriteLocations: this.connectionPolicy.useMultipleWriteLocations,
8694 partitionKey: requestContext.partitionKey,
8695 });
8696 }
8697 /**
8698 * Returns collection of properties which are derived from the context for Request Creation
8699 * @returns
8700 */
8701 getContextDerivedPropsForRequestCreation() {
8702 return {
8703 globalEndpointManager: this.globalEndpointManager,
8704 requestAgent: this.cosmosClientOptions.agent,
8705 connectionPolicy: this.connectionPolicy,
8706 client: this,
8707 plugins: this.cosmosClientOptions.plugins,
8708 pipeline: this.pipeline,
8709 };
8710 }
8711}
8712
8713// Copyright (c) Microsoft Corporation.
8714/**
8715 * @hidden
8716 */
8717function getUserAgent(suffix) {
8718 const ua = `${universalUserAgent.getUserAgent()} ${Constants.SDKName}/${Constants.SDKVersion}`;
8719 if (suffix) {
8720 return ua + " " + suffix;
8721 }
8722 return ua;
8723}
8724
8725// Copyright (c) Microsoft Corporation.
8726/**
8727 * @hidden
8728 * This internal class implements the logic for endpoint management for geo-replicated database accounts.
8729 */
8730class GlobalEndpointManager {
8731 /**
8732 * @param options - The document client instance.
8733 */
8734 constructor(options, readDatabaseAccount) {
8735 this.readDatabaseAccount = readDatabaseAccount;
8736 this.writeableLocations = [];
8737 this.readableLocations = [];
8738 this.unavailableReadableLocations = [];
8739 this.unavailableWriteableLocations = [];
8740 this.options = options;
8741 this.defaultEndpoint = options.endpoint;
8742 this.enableEndpointDiscovery = options.connectionPolicy.enableEndpointDiscovery;
8743 this.isRefreshing = false;
8744 this.preferredLocations = this.options.connectionPolicy.preferredLocations;
8745 }
8746 /**
8747 * Gets the current read endpoint from the endpoint cache.
8748 */
8749 async getReadEndpoint() {
8750 return this.resolveServiceEndpoint(exports.ResourceType.item, exports.OperationType.Read);
8751 }
8752 /**
8753 * Gets the current write endpoint from the endpoint cache.
8754 */
8755 async getWriteEndpoint() {
8756 return this.resolveServiceEndpoint(exports.ResourceType.item, exports.OperationType.Replace);
8757 }
8758 async getReadEndpoints() {
8759 return this.readableLocations.map((loc) => loc.databaseAccountEndpoint);
8760 }
8761 async getWriteEndpoints() {
8762 return this.writeableLocations.map((loc) => loc.databaseAccountEndpoint);
8763 }
8764 async markCurrentLocationUnavailableForRead(endpoint) {
8765 await this.refreshEndpointList();
8766 const location = this.readableLocations.find((loc) => loc.databaseAccountEndpoint === endpoint);
8767 if (location) {
8768 location.unavailable = true;
8769 location.lastUnavailabilityTimestampInMs = Date.now();
8770 this.unavailableReadableLocations.push(location);
8771 }
8772 }
8773 async markCurrentLocationUnavailableForWrite(endpoint) {
8774 await this.refreshEndpointList();
8775 const location = this.writeableLocations.find((loc) => loc.databaseAccountEndpoint === endpoint);
8776 if (location) {
8777 location.unavailable = true;
8778 location.lastUnavailabilityTimestampInMs = Date.now();
8779 this.unavailableWriteableLocations.push(location);
8780 }
8781 }
8782 canUseMultipleWriteLocations(resourceType, operationType) {
8783 let canUse = this.options.connectionPolicy.useMultipleWriteLocations;
8784 if (resourceType) {
8785 canUse =
8786 canUse &&
8787 (resourceType === exports.ResourceType.item ||
8788 (resourceType === exports.ResourceType.sproc && operationType === exports.OperationType.Execute));
8789 }
8790 return canUse;
8791 }
8792 async resolveServiceEndpoint(resourceType, operationType) {
8793 // If endpoint discovery is disabled, always use the user provided endpoint
8794 if (!this.options.connectionPolicy.enableEndpointDiscovery) {
8795 return this.defaultEndpoint;
8796 }
8797 // If getting the database account, always use the user provided endpoint
8798 if (resourceType === exports.ResourceType.none) {
8799 return this.defaultEndpoint;
8800 }
8801 if (this.readableLocations.length === 0 || this.writeableLocations.length === 0) {
8802 const { resource: databaseAccount } = await this.readDatabaseAccount({
8803 urlConnection: this.defaultEndpoint,
8804 });
8805 this.writeableLocations = databaseAccount.writableLocations;
8806 this.readableLocations = databaseAccount.readableLocations;
8807 }
8808 const locations = isReadRequest(operationType)
8809 ? this.readableLocations
8810 : this.writeableLocations;
8811 let location;
8812 // If we have preferred locations, try each one in order and use the first available one
8813 if (this.preferredLocations && this.preferredLocations.length > 0) {
8814 for (const preferredLocation of this.preferredLocations) {
8815 location = locations.find((loc) => loc.unavailable !== true &&
8816 normalizeEndpoint(loc.name) === normalizeEndpoint(preferredLocation));
8817 if (location) {
8818 break;
8819 }
8820 }
8821 }
8822 // If no preferred locations or one did not match, just grab the first one that is available
8823 if (!location) {
8824 location = locations.find((loc) => {
8825 return loc.unavailable !== true;
8826 });
8827 }
8828 return location ? location.databaseAccountEndpoint : this.defaultEndpoint;
8829 }
8830 /**
8831 * Refreshes the endpoint list by clearning stale unavailability and then
8832 * retrieving the writable and readable locations from the geo-replicated database account
8833 * and then updating the locations cache.
8834 * We skip the refreshing if enableEndpointDiscovery is set to False
8835 */
8836 async refreshEndpointList() {
8837 if (!this.isRefreshing && this.enableEndpointDiscovery) {
8838 this.isRefreshing = true;
8839 const databaseAccount = await this.getDatabaseAccountFromAnyEndpoint();
8840 if (databaseAccount) {
8841 this.refreshStaleUnavailableLocations();
8842 this.refreshEndpoints(databaseAccount);
8843 }
8844 this.isRefreshing = false;
8845 }
8846 }
8847 refreshEndpoints(databaseAccount) {
8848 for (const location of databaseAccount.writableLocations) {
8849 const existingLocation = this.writeableLocations.find((loc) => loc.name === location.name);
8850 if (!existingLocation) {
8851 this.writeableLocations.push(location);
8852 }
8853 }
8854 for (const location of databaseAccount.readableLocations) {
8855 const existingLocation = this.readableLocations.find((loc) => loc.name === location.name);
8856 if (!existingLocation) {
8857 this.readableLocations.push(location);
8858 }
8859 }
8860 }
8861 refreshStaleUnavailableLocations() {
8862 const now = Date.now();
8863 this.updateLocation(now, this.unavailableReadableLocations, this.readableLocations);
8864 this.unavailableReadableLocations = this.cleanUnavailableLocationList(now, this.unavailableReadableLocations);
8865 this.updateLocation(now, this.unavailableWriteableLocations, this.writeableLocations);
8866 this.unavailableWriteableLocations = this.cleanUnavailableLocationList(now, this.unavailableWriteableLocations);
8867 }
8868 /**
8869 * update the locationUnavailability to undefined if the location is available again
8870 * @param now - current time
8871 * @param unavailableLocations - list of unavailable locations
8872 * @param allLocations - list of all locations
8873 */
8874 updateLocation(now, unavailableLocations, allLocations) {
8875 for (const location of unavailableLocations) {
8876 const unavaialableLocation = allLocations.find((loc) => loc.name === location.name);
8877 if (unavaialableLocation &&
8878 now - unavaialableLocation.lastUnavailabilityTimestampInMs >
8879 Constants.LocationUnavailableExpirationTimeInMs) {
8880 unavaialableLocation.unavailable = false;
8881 }
8882 }
8883 }
8884 cleanUnavailableLocationList(now, unavailableLocations) {
8885 return unavailableLocations.filter((loc) => {
8886 if (loc &&
8887 now - loc.lastUnavailabilityTimestampInMs >= Constants.LocationUnavailableExpirationTimeInMs) {
8888 return false;
8889 }
8890 return true;
8891 });
8892 }
8893 /**
8894 * Gets the database account first by using the default endpoint, and if that doesn't returns
8895 * use the endpoints for the preferred locations in the order they are specified to get
8896 * the database account.
8897 */
8898 async getDatabaseAccountFromAnyEndpoint() {
8899 try {
8900 const options = { urlConnection: this.defaultEndpoint };
8901 const { resource: databaseAccount } = await this.readDatabaseAccount(options);
8902 return databaseAccount;
8903 // If for any reason(non - globaldb related), we are not able to get the database
8904 // account from the above call to readDatabaseAccount,
8905 // we would try to get this information from any of the preferred locations that the user
8906 // might have specified (by creating a locational endpoint)
8907 // and keeping eating the exception until we get the database account and return None at the end,
8908 // if we are not able to get that info from any endpoints
8909 }
8910 catch (err) {
8911 // TODO: Tracing
8912 }
8913 if (this.preferredLocations) {
8914 for (const location of this.preferredLocations) {
8915 try {
8916 const locationalEndpoint = GlobalEndpointManager.getLocationalEndpoint(this.defaultEndpoint, location);
8917 const options = { urlConnection: locationalEndpoint };
8918 const { resource: databaseAccount } = await this.readDatabaseAccount(options);
8919 if (databaseAccount) {
8920 return databaseAccount;
8921 }
8922 }
8923 catch (err) {
8924 // TODO: Tracing
8925 }
8926 }
8927 }
8928 }
8929 /**
8930 * Gets the locational endpoint using the location name passed to it using the default endpoint.
8931 *
8932 * @param defaultEndpoint - The default endpoint to use for the endpoint.
8933 * @param locationName - The location name for the azure region like "East US".
8934 */
8935 static getLocationalEndpoint(defaultEndpoint, locationName) {
8936 // For defaultEndpoint like 'https://contoso.documents.azure.com:443/' parse it to generate URL format
8937 // This defaultEndpoint should be global endpoint(and cannot be a locational endpoint)
8938 // and we agreed to document that
8939 const endpointUrl = new URL(defaultEndpoint);
8940 // hostname attribute in endpointUrl will return 'contoso.documents.azure.com'
8941 if (endpointUrl.hostname) {
8942 const hostnameParts = endpointUrl.hostname.toString().toLowerCase().split(".");
8943 if (hostnameParts) {
8944 // globalDatabaseAccountName will return 'contoso'
8945 const globalDatabaseAccountName = hostnameParts[0];
8946 // Prepare the locationalDatabaseAccountName as contoso-EastUS for location_name 'East US'
8947 const locationalDatabaseAccountName = globalDatabaseAccountName + "-" + locationName.replace(" ", "");
8948 // Replace 'contoso' with 'contoso-EastUS' and
8949 // return locationalEndpoint as https://contoso-EastUS.documents.azure.com:443/
8950 const locationalEndpoint = defaultEndpoint
8951 .toLowerCase()
8952 .replace(globalDatabaseAccountName, locationalDatabaseAccountName);
8953 return locationalEndpoint;
8954 }
8955 }
8956 return null;
8957 }
8958}
8959function normalizeEndpoint(endpoint) {
8960 return endpoint.split(" ").join("").toLowerCase();
8961}
8962
8963// Copyright (c) Microsoft Corporation.
8964/**
8965 * Provides a client-side logical representation of the Azure Cosmos DB database account.
8966 * This client is used to configure and execute requests in the Azure Cosmos DB database service.
8967 * @example Instantiate a client and create a new database
8968 * ```typescript
8969 * const client = new CosmosClient({endpoint: "<URL HERE>", auth: {masterKey: "<KEY HERE>"}});
8970 * await client.databases.create({id: "<datbase name here>"});
8971 * ```
8972 * @example Instantiate a client with custom Connection Policy
8973 * ```typescript
8974 * const connectionPolicy = new ConnectionPolicy();
8975 * connectionPolicy.RequestTimeout = 10000;
8976 * const client = new CosmosClient({
8977 * endpoint: "<URL HERE>",
8978 * auth: {masterKey: "<KEY HERE>"},
8979 * connectionPolicy
8980 * });
8981 * ```
8982 */
8983class CosmosClient {
8984 constructor(optionsOrConnectionString) {
8985 var _a, _b;
8986 if (typeof optionsOrConnectionString === "string") {
8987 optionsOrConnectionString = parseConnectionString(optionsOrConnectionString);
8988 }
8989 const endpoint = checkURL(optionsOrConnectionString.endpoint);
8990 if (!endpoint) {
8991 throw new Error("Invalid endpoint specified");
8992 }
8993 optionsOrConnectionString.connectionPolicy = Object.assign({}, defaultConnectionPolicy, optionsOrConnectionString.connectionPolicy);
8994 optionsOrConnectionString.defaultHeaders = optionsOrConnectionString.defaultHeaders || {};
8995 optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.CacheControl] = "no-cache";
8996 optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.Version] =
8997 Constants.CurrentVersion;
8998 if (optionsOrConnectionString.consistencyLevel !== undefined) {
8999 optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.ConsistencyLevel] =
9000 optionsOrConnectionString.consistencyLevel;
9001 }
9002 optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.UserAgent] = getUserAgent(optionsOrConnectionString.userAgentSuffix);
9003 const globalEndpointManager = new GlobalEndpointManager(optionsOrConnectionString, async (opts) => this.getDatabaseAccount(opts));
9004 this.clientContext = new ClientContext(optionsOrConnectionString, globalEndpointManager);
9005 if (((_a = optionsOrConnectionString.connectionPolicy) === null || _a === void 0 ? void 0 : _a.enableEndpointDiscovery) &&
9006 ((_b = optionsOrConnectionString.connectionPolicy) === null || _b === void 0 ? void 0 : _b.enableBackgroundEndpointRefreshing)) {
9007 this.backgroundRefreshEndpointList(globalEndpointManager, optionsOrConnectionString.connectionPolicy.endpointRefreshRateInMs ||
9008 defaultConnectionPolicy.endpointRefreshRateInMs);
9009 }
9010 this.databases = new Databases(this, this.clientContext);
9011 this.offers = new Offers(this, this.clientContext);
9012 }
9013 /**
9014 * Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.)
9015 */
9016 async getDatabaseAccount(options) {
9017 const response = await this.clientContext.getDatabaseAccount(options);
9018 return new ResourceResponse(response.result, response.headers, response.code);
9019 }
9020 /**
9021 * Gets the currently used write endpoint url. Useful for troubleshooting purposes.
9022 *
9023 * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
9024 */
9025 getWriteEndpoint() {
9026 return this.clientContext.getWriteEndpoint();
9027 }
9028 /**
9029 * Gets the currently used read endpoint. Useful for troubleshooting purposes.
9030 *
9031 * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
9032 */
9033 getReadEndpoint() {
9034 return this.clientContext.getReadEndpoint();
9035 }
9036 /**
9037 * Gets the known write endpoints. Useful for troubleshooting purposes.
9038 *
9039 * The urls may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
9040 */
9041 getWriteEndpoints() {
9042 return this.clientContext.getWriteEndpoints();
9043 }
9044 /**
9045 * Gets the currently used read endpoint. Useful for troubleshooting purposes.
9046 *
9047 * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
9048 */
9049 getReadEndpoints() {
9050 return this.clientContext.getReadEndpoints();
9051 }
9052 /**
9053 * Used for reading, updating, or deleting a existing database by id or accessing containers belonging to that database.
9054 *
9055 * This does not make a network call. Use `.read` to get info about the database after getting the {@link Database} object.
9056 *
9057 * @param id - The id of the database.
9058 * @example Create a new container off of an existing database
9059 * ```typescript
9060 * const container = client.database("<database id>").containers.create("<container id>");
9061 * ```
9062 *
9063 * @example Delete an existing database
9064 * ```typescript
9065 * await client.database("<id here>").delete();
9066 * ```
9067 */
9068 database(id) {
9069 return new Database(this, id, this.clientContext);
9070 }
9071 /**
9072 * Used for reading, or updating a existing offer by id.
9073 * @param id - The id of the offer.
9074 */
9075 offer(id) {
9076 return new Offer(this, id, this.clientContext);
9077 }
9078 /**
9079 * Clears background endpoint refresher. Use client.dispose() when destroying the CosmosClient within another process.
9080 */
9081 dispose() {
9082 clearTimeout(this.endpointRefresher);
9083 }
9084 async backgroundRefreshEndpointList(globalEndpointManager, refreshRate) {
9085 this.endpointRefresher = setInterval(() => {
9086 try {
9087 globalEndpointManager.refreshEndpointList();
9088 }
9089 catch (e) {
9090 console.warn("Failed to refresh endpoints", e);
9091 }
9092 }, refreshRate);
9093 if (this.endpointRefresher.unref && typeof this.endpointRefresher.unref === "function") {
9094 this.endpointRefresher.unref();
9095 }
9096 }
9097}
9098
9099// Copyright (c) Microsoft Corporation.
9100// Licensed under the MIT license.
9101class SasTokenProperties {
9102}
9103
9104// Copyright (c) Microsoft Corporation.
9105// Licensed under the MIT license.
9106/// <reference lib="dom"/>
9107function encodeUTF8(str) {
9108 const bytes = new Uint8Array(str.length);
9109 for (let i = 0; i < str.length; i++) {
9110 bytes[i] = str.charCodeAt(i);
9111 }
9112 return bytes;
9113}
9114
9115// Copyright (c) Microsoft Corporation.
9116/**
9117 * Experimental internal only
9118 * Generates the payload representing the permission configuration for the sas token.
9119 */
9120async function createAuthorizationSasToken(masterKey, sasTokenProperties) {
9121 let resourcePrefixPath = "";
9122 if (typeof sasTokenProperties.databaseName === "string" &&
9123 sasTokenProperties.databaseName !== "") {
9124 resourcePrefixPath += `/${Constants.Path.DatabasesPathSegment}/${sasTokenProperties.databaseName}`;
9125 }
9126 if (typeof sasTokenProperties.containerName === "string" &&
9127 sasTokenProperties.containerName !== "") {
9128 if (sasTokenProperties.databaseName === "") {
9129 throw new Error(`illegalArgumentException : ${sasTokenProperties.databaseName} \
9130 is an invalid database name`);
9131 }
9132 resourcePrefixPath += `/${Constants.Path.CollectionsPathSegment}/${sasTokenProperties.containerName}`;
9133 }
9134 if (typeof sasTokenProperties.resourceName === "string" &&
9135 sasTokenProperties.resourceName !== "") {
9136 if (sasTokenProperties.containerName === "") {
9137 throw new Error(`illegalArgumentException : ${sasTokenProperties.containerName} \
9138 is an invalid container name`);
9139 }
9140 switch (sasTokenProperties.resourceKind) {
9141 case "ITEM":
9142 resourcePrefixPath += `${Constants.Path.Root}${Constants.Path.DocumentsPathSegment}`;
9143 break;
9144 case "STORED_PROCEDURE":
9145 resourcePrefixPath += `${Constants.Path.Root}${Constants.Path.StoredProceduresPathSegment}`;
9146 break;
9147 case "USER_DEFINED_FUNCTION":
9148 resourcePrefixPath += `${Constants.Path.Root}${Constants.Path.UserDefinedFunctionsPathSegment}`;
9149 break;
9150 case "TRIGGER":
9151 resourcePrefixPath += `${Constants.Path.Root}${Constants.Path.TriggersPathSegment}`;
9152 break;
9153 default:
9154 throw new Error(`illegalArgumentException : ${sasTokenProperties.resourceKind} \
9155 is an invalid resource kind`);
9156 }
9157 resourcePrefixPath += `${Constants.Path.Root}${sasTokenProperties.resourceName}${Constants.Path.Root}`;
9158 }
9159 sasTokenProperties.resourcePath = resourcePrefixPath.toString();
9160 let partitionRanges = "";
9161 if (sasTokenProperties.partitionKeyValueRanges !== undefined &&
9162 sasTokenProperties.partitionKeyValueRanges.length > 0) {
9163 if (typeof sasTokenProperties.resourceKind !== "string" &&
9164 sasTokenProperties.resourceKind !== "ITEM") {
9165 throw new Error(`illegalArgumentException : ${sasTokenProperties.resourceKind} \
9166 is an invalid partition key value range`);
9167 }
9168 sasTokenProperties.partitionKeyValueRanges.forEach((range) => {
9169 partitionRanges += `${encodeUTF8(range)},`;
9170 });
9171 }
9172 if (sasTokenProperties.controlPlaneReaderScope === 0) {
9173 sasTokenProperties.controlPlaneReaderScope += exports.SasTokenPermissionKind.ContainerReadAny;
9174 sasTokenProperties.controlPlaneWriterScope += exports.SasTokenPermissionKind.ContainerReadAny;
9175 }
9176 if (sasTokenProperties.dataPlaneReaderScope === 0 &&
9177 sasTokenProperties.dataPlaneWriterScope === 0) {
9178 sasTokenProperties.dataPlaneReaderScope = exports.SasTokenPermissionKind.ContainerFullAccess;
9179 sasTokenProperties.dataPlaneWriterScope = exports.SasTokenPermissionKind.ContainerFullAccess;
9180 }
9181 if (typeof sasTokenProperties.keyType !== "number" ||
9182 typeof sasTokenProperties.keyType === undefined) {
9183 switch (sasTokenProperties.keyType) {
9184 case CosmosKeyType.PrimaryMaster:
9185 sasTokenProperties.keyType = 1;
9186 break;
9187 case CosmosKeyType.SecondaryMaster:
9188 sasTokenProperties.keyType = 2;
9189 break;
9190 case CosmosKeyType.PrimaryReadOnly:
9191 sasTokenProperties.keyType = 3;
9192 break;
9193 case CosmosKeyType.SecondaryReadOnly:
9194 sasTokenProperties.keyType = 4;
9195 break;
9196 default:
9197 throw new Error(`illegalArgumentException : ${sasTokenProperties.keyType} \
9198 is an invalid key type`);
9199 }
9200 }
9201 const payload = sasTokenProperties.user +
9202 "\n" +
9203 sasTokenProperties.userTag +
9204 "\n" +
9205 sasTokenProperties.resourcePath +
9206 "\n" +
9207 partitionRanges +
9208 "\n" +
9209 utcsecondsSinceEpoch(sasTokenProperties.startTime).toString(16) +
9210 "\n" +
9211 utcsecondsSinceEpoch(sasTokenProperties.expiryTime).toString(16) +
9212 "\n" +
9213 sasTokenProperties.keyType +
9214 "\n" +
9215 sasTokenProperties.controlPlaneReaderScope.toString(16) +
9216 "\n" +
9217 sasTokenProperties.controlPlaneWriterScope.toString(16) +
9218 "\n" +
9219 sasTokenProperties.dataPlaneReaderScope.toString(16) +
9220 "\n" +
9221 sasTokenProperties.dataPlaneWriterScope.toString(16) +
9222 "\n";
9223 const signedPayload = await hmac(masterKey, Buffer.from(payload).toString("base64"));
9224 return "type=sas&ver=1.0&sig=" + signedPayload + ";" + Buffer.from(payload).toString("base64");
9225}
9226/**
9227 * @hidden
9228 */
9229// TODO: utcMilllisecondsSinceEpoch
9230function utcsecondsSinceEpoch(date) {
9231 return Math.round(date.getTime() / 1000);
9232}
9233
9234Object.defineProperty(exports, 'RestError', {
9235 enumerable: true,
9236 get: function () { return coreRestPipeline.RestError; }
9237});
9238Object.defineProperty(exports, 'AbortError', {
9239 enumerable: true,
9240 get: function () { return abortController.AbortError; }
9241});
9242exports.BulkOperationType = BulkOperationType;
9243exports.ChangeFeedIterator = ChangeFeedIterator;
9244exports.ChangeFeedResponse = ChangeFeedResponse;
9245exports.ClientContext = ClientContext;
9246exports.ClientSideMetrics = ClientSideMetrics;
9247exports.Conflict = Conflict;
9248exports.ConflictResponse = ConflictResponse;
9249exports.Conflicts = Conflicts;
9250exports.Constants = Constants;
9251exports.Container = Container;
9252exports.ContainerResponse = ContainerResponse;
9253exports.Containers = Containers;
9254exports.CosmosClient = CosmosClient;
9255exports.DEFAULT_PARTITION_KEY_PATH = DEFAULT_PARTITION_KEY_PATH;
9256exports.Database = Database;
9257exports.DatabaseAccount = DatabaseAccount;
9258exports.DatabaseResponse = DatabaseResponse;
9259exports.Databases = Databases;
9260exports.ErrorResponse = ErrorResponse;
9261exports.FeedResponse = FeedResponse;
9262exports.GlobalEndpointManager = GlobalEndpointManager;
9263exports.Item = Item;
9264exports.ItemResponse = ItemResponse;
9265exports.Items = Items;
9266exports.Offer = Offer;
9267exports.OfferResponse = OfferResponse;
9268exports.Offers = Offers;
9269exports.PatchOperationType = PatchOperationType;
9270exports.Permission = Permission;
9271exports.PermissionResponse = PermissionResponse;
9272exports.Permissions = Permissions;
9273exports.QueryIterator = QueryIterator;
9274exports.QueryMetrics = QueryMetrics;
9275exports.QueryMetricsConstants = QueryMetricsConstants;
9276exports.QueryPreparationTimes = QueryPreparationTimes;
9277exports.ResourceResponse = ResourceResponse;
9278exports.RuntimeExecutionTimes = RuntimeExecutionTimes;
9279exports.SasTokenProperties = SasTokenProperties;
9280exports.Scripts = Scripts;
9281exports.StatusCodes = StatusCodes;
9282exports.StoredProcedure = StoredProcedure;
9283exports.StoredProcedureResponse = StoredProcedureResponse;
9284exports.StoredProcedures = StoredProcedures;
9285exports.TimeSpan = TimeSpan;
9286exports.TimeoutError = TimeoutError;
9287exports.Trigger = Trigger;
9288exports.TriggerResponse = TriggerResponse;
9289exports.Triggers = Triggers;
9290exports.User = User;
9291exports.UserDefinedFunction = UserDefinedFunction;
9292exports.UserDefinedFunctionResponse = UserDefinedFunctionResponse;
9293exports.UserDefinedFunctions = UserDefinedFunctions;
9294exports.UserResponse = UserResponse;
9295exports.Users = Users;
9296exports.createAuthorizationSasToken = createAuthorizationSasToken;
9297exports.extractPartitionKey = extractPartitionKey;
9298exports.setAuthorizationTokenHeaderUsingMasterKey = setAuthorizationTokenHeaderUsingMasterKey;
9299//# sourceMappingURL=index.js.map