Breaking API changes for v3 (#238)
This commit is contained in:
Родитель
99041b5c81
Коммит
10fdf15356
|
@ -263,6 +263,7 @@ lib/**
|
|||
|
||||
dist-esm/
|
||||
dist-test/
|
||||
dist-commonjs/
|
||||
temp/
|
||||
ts-test/package-lock.json
|
||||
ts-test/package.json
|
||||
|
|
|
@ -9,13 +9,22 @@
|
|||
"request": "launch",
|
||||
"name": "Mocha Tests",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"args": ["-u", "tdd", "--colors", "${workspaceFolder}/lib/test/**/*.js", "-g", ".*Location Cache.*"],
|
||||
"args": [
|
||||
//"-r",
|
||||
//"ts-node/register",
|
||||
"-u",
|
||||
"tdd",
|
||||
"--colors",
|
||||
"${workspaceFolder}/dist-commonjs/src/test/**/*.js",
|
||||
"-g",
|
||||
".*Single Boundary Case Query Range.*"
|
||||
],
|
||||
"internalConsoleOptions": "openOnSessionStart",
|
||||
"sourceMaps": true,
|
||||
"outFiles": ["${workspaceFolder}/lib/**"],
|
||||
"env": {
|
||||
"TS_NODE_COMPILER_OPTIONS": "{\"module\":\"commonjs\"}",
|
||||
"MOCHA_TIMEOUT": "999999"
|
||||
}
|
||||
},
|
||||
"protocol": "inspector"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
|
|
|
@ -317,6 +317,12 @@
|
|||
"normalize-path": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"arg": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.0.tgz",
|
||||
"integrity": "sha512-ZWc51jO3qegGkVh8Hwpv636EkbesNV5ZNQPCtRa+0qytRYPEs9IYT9qITY9buezqUH5uqyzlWLcufrzU2rffdg==",
|
||||
"dev": true
|
||||
},
|
||||
"argparse": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
|
||||
|
@ -368,12 +374,6 @@
|
|||
"integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==",
|
||||
"dev": true
|
||||
},
|
||||
"arrify": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
|
||||
"integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=",
|
||||
"dev": true
|
||||
},
|
||||
"assign-symbols": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz",
|
||||
|
@ -655,9 +655,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"buffer-from": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.0.0.tgz",
|
||||
"integrity": "sha512-83apNb8KK0Se60UE1+4Ukbe3HbfELJ6UlI4ldtOGs7So4KD26orJM8hIY9lxdzP+UpItH1Yh/Y8GUvNFWFFRxA==",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==",
|
||||
"dev": true
|
||||
},
|
||||
"builtin-modules": {
|
||||
|
@ -3198,9 +3198,9 @@
|
|||
}
|
||||
},
|
||||
"make-error": {
|
||||
"version": "1.3.4",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.4.tgz",
|
||||
"integrity": "sha512-0Dab5btKVPhibSalc9QGXb559ED7G7iLjFXBaj9Wq8O3vorueR5K5jaE3hkG6ZQINyhA/JgG6Qk4qdFQjsYV6g==",
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz",
|
||||
"integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==",
|
||||
"dev": true
|
||||
},
|
||||
"map-cache": {
|
||||
|
@ -4669,9 +4669,9 @@
|
|||
}
|
||||
},
|
||||
"source-map-support": {
|
||||
"version": "0.5.6",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.6.tgz",
|
||||
"integrity": "sha512-N4KXEz7jcKqPf2b2vZF11lQIz9W5ZMuUcIOGj243lduidkf2fjkVKJS9vNxVWn3u/uxX38AcE8U9nnH9FPcq+g==",
|
||||
"version": "0.5.10",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.10.tgz",
|
||||
"integrity": "sha512-YfQ3tQFTK/yzlGJuX8pTwa4tifQj4QS2Mj7UegOu8jAz59MqIiMGPXxQhVQiIMNzayuUSF/jEuVnfFF5JqybmQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"buffer-from": "^1.0.0",
|
||||
|
@ -4858,33 +4858,16 @@
|
|||
}
|
||||
},
|
||||
"ts-node": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-6.2.0.tgz",
|
||||
"integrity": "sha512-ZNT+OEGfUNVMGkpIaDJJ44Zq3Yr0bkU/ugN1PHbU+/01Z7UV1fsELRiTx1KuQNvQ1A3pGh3y25iYF6jXgxV21A==",
|
||||
"version": "8.0.2",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.0.2.tgz",
|
||||
"integrity": "sha512-MosTrinKmaAcWgO8tqMjMJB22h+sp3Rd1i4fdoWY4mhBDekOwIAKI/bzmRi7IcbCmjquccYg2gcF6NBkLgr0Tw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arrify": "^1.0.0",
|
||||
"buffer-from": "^1.1.0",
|
||||
"arg": "^4.1.0",
|
||||
"diff": "^3.1.0",
|
||||
"make-error": "^1.1.1",
|
||||
"minimist": "^1.2.0",
|
||||
"mkdirp": "^0.5.1",
|
||||
"source-map-support": "^0.5.6",
|
||||
"yn": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"buffer-from": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.0.tgz",
|
||||
"integrity": "sha512-c5mRlguI/Pe2dSZmpER62rSCu0ryKmWddzRYsuXc50U2/g8jMOulc31VZMa4mYx31U5xsmSOpDCgH88Vl9cDGQ==",
|
||||
"dev": true
|
||||
},
|
||||
"minimist": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
|
||||
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
|
||||
"dev": true
|
||||
}
|
||||
"yn": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"tslib": {
|
||||
|
@ -5286,9 +5269,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"yn": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz",
|
||||
"integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=",
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.0.0.tgz",
|
||||
"integrity": "sha512-+Wo/p5VRfxUgBUGy2j/6KX2mj9AYJWOHuhMjMcbBFc3y54o9/4buK1ksBvuiK01C3kby8DH9lSmJdSxw+4G/2Q==",
|
||||
"dev": true
|
||||
},
|
||||
"z-schema": {
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
"format": "prettier --write --config .prettierrc.json \"src/**/*.ts\"",
|
||||
"check-format": "prettier --list-different --config .prettierrc.json \"src/**/*.ts\"",
|
||||
"compile": "echo Using TypeScript && tsc --version && tsc --pretty",
|
||||
"compile-commonjs": "echo Using TypeScript && tsc --version && tsc --pretty -p ./tsconfig.commonjs.json",
|
||||
"docs": "typedoc --excludePrivate --exclude \"**/test/**\" --mode file --out ./dist/docs ./src",
|
||||
"bundle": "rollup -c",
|
||||
"bundle-types": "node bundle-types.js",
|
||||
|
@ -65,7 +66,7 @@
|
|||
"rollup-plugin-local-resolve": "^1.0.7",
|
||||
"rollup-plugin-multi-entry": "2.0.2",
|
||||
"sinon": "^5.1.1",
|
||||
"ts-node": "^6.2.0",
|
||||
"ts-node": "^8.0.2",
|
||||
"tslint": "5.11.0",
|
||||
"tslint-config-prettier": "^1.14.0",
|
||||
"typedoc": "0.13.0",
|
||||
|
|
|
@ -96,7 +96,7 @@ export class ChangeFeedIterator<T> {
|
|||
if (!this.isPartitionSpecified && isParittionedContainer) {
|
||||
throw new Error("Container is partitioned, but no partition key or partition key range id was specified.");
|
||||
}
|
||||
const feedOptions: FeedOptions = { initialHeaders: {}, a_im: "Incremental feed" };
|
||||
const feedOptions: FeedOptions = { initialHeaders: {}, useIncrementalFeed: true };
|
||||
|
||||
if (typeof this.changeFeedOptions.maxItemCount === "number") {
|
||||
feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Constants } from "./common";
|
||||
import { IHeaders } from "./queryExecutionContext";
|
||||
import { CosmosHeaders } from "./queryExecutionContext";
|
||||
|
||||
/**
|
||||
* A single response page from the Azure Cosmos DB Change Feed
|
||||
|
@ -27,7 +27,7 @@ export class ChangeFeedResponse<T> {
|
|||
* Gets the status code of the response from Azure Cosmos DB
|
||||
*/
|
||||
public readonly statusCode: number,
|
||||
headers: IHeaders
|
||||
headers: CosmosHeaders
|
||||
) {
|
||||
this.headers = Object.freeze(headers);
|
||||
}
|
||||
|
@ -79,5 +79,5 @@ export class ChangeFeedResponse<T> {
|
|||
/**
|
||||
* Response headers of the response from Azure Cosmos DB
|
||||
*/
|
||||
public headers: IHeaders;
|
||||
public headers: CosmosHeaders;
|
||||
}
|
||||
|
|
|
@ -1,29 +1,17 @@
|
|||
import { PartitionKeyRange } from "./client/Container/PartitionKeyRange";
|
||||
import { Resource } from "./client/Resource";
|
||||
import {
|
||||
getIdFromLink,
|
||||
getPathFromLink,
|
||||
HTTPMethod,
|
||||
parseConnectionPolicy,
|
||||
parseLink,
|
||||
ResourceType,
|
||||
setIsUpsertHeader,
|
||||
StatusCodes,
|
||||
SubStatusCodes
|
||||
} from "./common";
|
||||
|
||||
import { ConnectionPolicy, ConsistencyLevel, DatabaseAccount, QueryCompatibilityMode } from "./documents";
|
||||
import { GlobalEndpointManager } from "./globalEndpointManager";
|
||||
import {
|
||||
Constants,
|
||||
CosmosClientOptions,
|
||||
IHeaders,
|
||||
QueryIterator,
|
||||
RequestOptions,
|
||||
Response,
|
||||
SqlQuerySpec
|
||||
} from "./index";
|
||||
import { FetchFunctionCallback } from "./queryExecutionContext";
|
||||
import { FeedOptions, RequestHandler } from "./request";
|
||||
|
||||
import { Constants, HTTPMethod, ResourceType } from "./common/constants";
|
||||
import { getIdFromLink, getPathFromLink, parseConnectionPolicy, parseLink, setIsUpsertHeader } from "./common/helper";
|
||||
import { StatusCodes, SubStatusCodes } from "./common/statusCodes";
|
||||
import { CosmosClientOptions } from "./CosmosClientOptions";
|
||||
import { FetchFunctionCallback, SqlQuerySpec } from "./queryExecutionContext";
|
||||
import { CosmosHeaders } from "./queryExecutionContext/CosmosHeaders";
|
||||
import { QueryIterator } from "./queryIterator";
|
||||
import { FeedOptions, RequestHandler, RequestOptions, Response } from "./request";
|
||||
import { ErrorResponse, getHeaders } from "./request/request";
|
||||
import { RequestContext } from "./request/RequestContext";
|
||||
import { SessionContainer } from "./session/sessionContainer";
|
||||
|
@ -57,7 +45,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & Resource>> {
|
||||
try {
|
||||
|
@ -181,7 +169,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & Resource>> {
|
||||
try {
|
||||
|
@ -226,7 +214,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & Resource>>;
|
||||
|
||||
|
@ -236,7 +224,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & U & Resource>>;
|
||||
public async create<T, U>(
|
||||
|
@ -244,7 +232,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & U & Resource>> {
|
||||
try {
|
||||
|
@ -293,14 +281,14 @@ export class ClientContext {
|
|||
}
|
||||
}
|
||||
|
||||
private applySessionToken(path: string, reqHeaders: IHeaders) {
|
||||
private applySessionToken(path: string, reqHeaders: CosmosHeaders) {
|
||||
const request = this.getSessionParams(path);
|
||||
|
||||
if (reqHeaders && reqHeaders[Constants.HttpHeaders.SessionToken]) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionConsistency: ConsistencyLevel = reqHeaders[Constants.HttpHeaders.ConsistencyLevel];
|
||||
const sessionConsistency: ConsistencyLevel = reqHeaders[Constants.HttpHeaders.ConsistencyLevel] as ConsistencyLevel;
|
||||
if (!sessionConsistency) {
|
||||
return;
|
||||
}
|
||||
|
@ -322,7 +310,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & Resource>> {
|
||||
try {
|
||||
|
@ -363,7 +351,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & Resource>>;
|
||||
public async upsert<T, U>(
|
||||
|
@ -371,7 +359,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & U & Resource>>;
|
||||
public async upsert<T>(
|
||||
|
@ -379,7 +367,7 @@ export class ClientContext {
|
|||
path: string,
|
||||
type: ResourceType,
|
||||
id: string,
|
||||
initialHeaders: IHeaders,
|
||||
initialHeaders: CosmosHeaders,
|
||||
options?: RequestOptions
|
||||
): Promise<Response<T & Resource>> {
|
||||
try {
|
||||
|
@ -497,7 +485,7 @@ export class ClientContext {
|
|||
return this.globalEndpointManager.getReadEndpoint();
|
||||
}
|
||||
|
||||
private captureSessionToken(err: ErrorResponse, path: string, opType: string, resHeaders: IHeaders) {
|
||||
private captureSessionToken(err: ErrorResponse, path: string, opType: string, resHeaders: CosmosHeaders) {
|
||||
const request = this.getSessionParams(path); // TODO: any request
|
||||
request.operationType = opType;
|
||||
if (
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { Agent, AgentOptions } from "https";
|
||||
import * as url from "url";
|
||||
import { Database, Databases } from "./client/Database";
|
||||
import { Offer, Offers } from "./client/Offer";
|
||||
import { ClientContext } from "./ClientContext";
|
||||
import { Constants, getPlatformDefaultHeaders, getUserAgent, parseConnectionPolicy } from "./common";
|
||||
import { Constants } from "./common/constants";
|
||||
import { parseConnectionPolicy } from "./common/helper";
|
||||
import { getPlatformDefaultHeaders, getUserAgent } from "./common/platform";
|
||||
import { CosmosClientOptions } from "./CosmosClientOptions";
|
||||
import { DatabaseAccount } from "./documents";
|
||||
import { GlobalEndpointManager } from "./globalEndpointManager";
|
||||
import { CosmosResponse, RequestOptions } from "./request";
|
||||
import { RequestOptions, ResourceResponse } from "./request";
|
||||
|
||||
/**
|
||||
* Provides a client-side logical representation of the Azure Cosmos DB database account.
|
||||
|
@ -36,7 +36,7 @@ export class CosmosClient {
|
|||
*
|
||||
* @example Create a new database
|
||||
* ```typescript
|
||||
* const {body: databaseDefinition, database} = await client.databases.create({id: "<name here>"});
|
||||
* const {resource: databaseDefinition, database} = await client.databases.create({id: "<name here>"});
|
||||
* ```
|
||||
*/
|
||||
public readonly databases: Databases;
|
||||
|
@ -86,9 +86,9 @@ export class CosmosClient {
|
|||
/**
|
||||
* Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.)
|
||||
*/
|
||||
public async getDatabaseAccount(options?: RequestOptions): Promise<CosmosResponse<DatabaseAccount, CosmosClient>> {
|
||||
public async getDatabaseAccount(options?: RequestOptions): Promise<ResourceResponse<DatabaseAccount>> {
|
||||
const response = await this.clientContext.getDatabaseAccount(options);
|
||||
return { body: response.result, headers: response.headers, ref: this };
|
||||
return new ResourceResponse<DatabaseAccount>(response.result, response.headers, response.statusCode);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { AuthOptions } from "./auth";
|
||||
import { ConnectionPolicy, ConsistencyLevel, QueryCompatibilityMode } from "./documents";
|
||||
import { IHeaders } from "./queryExecutionContext/IHeaders";
|
||||
import { CosmosHeaders } from "./queryExecutionContext/CosmosHeaders";
|
||||
|
||||
// We expose our own Agent interface to avoid taking a dependency on and leaking node types. This interface should mirror the node Agent interface
|
||||
interface Agent {
|
||||
|
@ -26,7 +26,7 @@ export interface CosmosClientOptions {
|
|||
* It can take any value from {@link ConsistencyLevel}.
|
||||
*/
|
||||
consistencyLevel?: keyof typeof ConsistencyLevel;
|
||||
defaultHeaders?: IHeaders;
|
||||
defaultHeaders?: CosmosHeaders;
|
||||
/** An optional custom http(s) Agent to be used in NodeJS enironments
|
||||
* Use an agent such as https://github.com/TooTallNate/node-proxy-agent if you need to connect to Cosmos via a proxy
|
||||
*/
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { generateHeaders } from "@azure/cosmos-sign";
|
||||
import { PermissionDefinition } from "./client";
|
||||
import { Constants, getResourceIdFromPath, HTTPMethod, ResourceType } from "./common";
|
||||
import { IHeaders } from "./queryExecutionContext";
|
||||
import { CosmosHeaders } from "./queryExecutionContext";
|
||||
|
||||
/** @hidden */
|
||||
export interface RequestInfo {
|
||||
|
@ -9,7 +9,7 @@ export interface RequestInfo {
|
|||
path: string;
|
||||
resourceId: string;
|
||||
resourceType: ResourceType;
|
||||
headers: IHeaders;
|
||||
headers: CosmosHeaders;
|
||||
}
|
||||
|
||||
export type TokenProvider = (requestInfo: RequestInfo) => Promise<string>;
|
||||
|
@ -37,7 +37,7 @@ export async function setAuthorizationHeader(
|
|||
path: string,
|
||||
resourceId: string,
|
||||
resourceType: ResourceType,
|
||||
headers: IHeaders
|
||||
headers: CosmosHeaders
|
||||
): Promise<void> {
|
||||
if (authOptions.permissionFeed) {
|
||||
authOptions.resourceTokens = {};
|
||||
|
@ -71,7 +71,7 @@ export function setAuthorizationTokenHeaderUsingMasterKey(
|
|||
verb: HTTPMethod,
|
||||
resourceId: string,
|
||||
resourceType: ResourceType,
|
||||
headers: IHeaders,
|
||||
headers: CosmosHeaders,
|
||||
masterKey: string
|
||||
) {
|
||||
// TODO This should live in cosmos-sign
|
||||
|
|
|
@ -37,7 +37,7 @@ export class Conflict {
|
|||
const id = getIdFromLink(this.url);
|
||||
|
||||
const response = await this.clientContext.read<ConflictDefinition>(path, ResourceType.user, id, undefined, options);
|
||||
return { body: response.result, headers: response.headers, ref: this, conflict: this };
|
||||
return new ConflictResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -55,6 +55,6 @@ export class Conflict {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, conflict: this };
|
||||
return new ConflictResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { Conflict } from "./Conflict";
|
||||
import { ConflictDefinition } from "./ConflictDefinition";
|
||||
|
||||
export interface ConflictResponse extends CosmosResponse<ConflictDefinition & Resource, Conflict> {
|
||||
export class ConflictResponse extends ResourceResponse<ConflictDefinition & Resource> {
|
||||
constructor(resource: ConflictDefinition & Resource, headers: CosmosHeaders, statusCode: number, conflict: Conflict) {
|
||||
super(resource, headers, statusCode);
|
||||
this.conflict = conflict;
|
||||
}
|
||||
/** A reference to the {@link Conflict} corresponding to the returned {@link ConflictDefinition}. */
|
||||
conflict: Conflict;
|
||||
public readonly conflict: Conflict;
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
import { PartitionKeyDefinition } from "../../documents";
|
||||
import { PartitionKey } from "../../index";
|
||||
import { QueryIterator } from "../../queryIterator";
|
||||
import { CosmosResponse, FeedOptions, RequestOptions } from "../../request";
|
||||
import { FeedOptions, RequestOptions, ResourceResponse } from "../../request";
|
||||
import { Conflict, Conflicts } from "../Conflict";
|
||||
import { Database } from "../Database";
|
||||
import { Item, Items } from "../Item";
|
||||
|
@ -32,6 +32,7 @@ import { PartitionKeyRange } from "./PartitionKeyRange";
|
|||
* do this once on application start up.
|
||||
*/
|
||||
export class Container {
|
||||
private $items: Items;
|
||||
/**
|
||||
* Operations for creating new items, and reading/querying all items
|
||||
*
|
||||
|
@ -42,27 +43,64 @@ export class Container {
|
|||
* const {body: createdItem} = await container.items.create({id: "<item id>", properties: {}});
|
||||
* ```
|
||||
*/
|
||||
public readonly items: Items;
|
||||
public get items(): Items {
|
||||
if (!this.$items) {
|
||||
this.$items = new Items(this, this.clientContext);
|
||||
}
|
||||
return this.$items;
|
||||
}
|
||||
|
||||
private $sprocs: StoredProcedures;
|
||||
/**
|
||||
* Operations for creating new stored procedures, and reading/querying all stored procedures.
|
||||
*
|
||||
* For reading, replacing, or deleting an existing stored procedure, use `.storedProcedure(id)`.
|
||||
*/
|
||||
public readonly storedProcedures: StoredProcedures;
|
||||
public get storedProcedures(): StoredProcedures {
|
||||
if (!this.$sprocs) {
|
||||
this.$sprocs = new StoredProcedures(this, this.clientContext);
|
||||
}
|
||||
return this.$sprocs;
|
||||
}
|
||||
|
||||
private $triggers: Triggers;
|
||||
/**
|
||||
* Operations for creating new triggers, and reading/querying all triggers.
|
||||
*
|
||||
* For reading, replacing, or deleting an existing trigger, use `.trigger(id)`.
|
||||
*/
|
||||
public readonly triggers: Triggers;
|
||||
protected get __triggers(): Triggers {
|
||||
if (!this.$triggers) {
|
||||
this.$triggers = new Triggers(this, this.clientContext);
|
||||
}
|
||||
return this.$triggers;
|
||||
}
|
||||
|
||||
private $udfs: UserDefinedFunctions;
|
||||
/**
|
||||
* Operations for creating new user defined functions, and reading/querying all user defined functions.
|
||||
*
|
||||
* For reading, replacing, or deleting an existing user defined function, use `.userDefinedFunction(id)`.
|
||||
*/
|
||||
public readonly userDefinedFunctions: UserDefinedFunctions;
|
||||
protected get __userDefinedFunctions(): UserDefinedFunctions {
|
||||
if (!this.$udfs) {
|
||||
this.$udfs = new UserDefinedFunctions(this, this.clientContext);
|
||||
}
|
||||
return this.$udfs;
|
||||
}
|
||||
|
||||
public readonly conflicts: Conflicts;
|
||||
private $conflicts: Conflicts;
|
||||
/**
|
||||
* Opertaions for reading and querying conflicts for the given container.
|
||||
*
|
||||
* For reading or deleting a specific conflict, use `.conflict(id)`.
|
||||
*/
|
||||
public get conflicts(): Conflicts {
|
||||
if (!this.$conflicts) {
|
||||
this.$conflicts = new Conflicts(this, this.clientContext);
|
||||
}
|
||||
return this.$conflicts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a reference URL to the resource. Used for linking in Permissions.
|
||||
|
@ -81,13 +119,7 @@ export class Container {
|
|||
public readonly database: Database,
|
||||
public readonly id: string,
|
||||
private readonly clientContext: ClientContext
|
||||
) {
|
||||
this.items = new Items(this, this.clientContext);
|
||||
this.storedProcedures = new StoredProcedures(this, this.clientContext);
|
||||
this.triggers = new Triggers(this, this.clientContext);
|
||||
this.userDefinedFunctions = new UserDefinedFunctions(this, this.clientContext);
|
||||
this.conflicts = new Conflicts(this, this.clientContext);
|
||||
}
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Used to read, replace, or delete a specific, existing {@link Item} by id.
|
||||
|
@ -109,7 +141,7 @@ export class Container {
|
|||
* Use `.userDefinedFunctions` for creating new user defined functions, or querying/reading all user defined functions.
|
||||
* @param id The id of the {@link UserDefinedFunction}.
|
||||
*/
|
||||
public userDefinedFunction(id: string): UserDefinedFunction {
|
||||
protected __userDefinedFunction(id: string): UserDefinedFunction {
|
||||
return new UserDefinedFunction(this, id, this.clientContext);
|
||||
}
|
||||
|
||||
|
@ -139,7 +171,7 @@ export class Container {
|
|||
* Use `.triggers` for creating new triggers, or querying/reading all triggers.
|
||||
* @param id The id of the {@link Trigger}.
|
||||
*/
|
||||
public trigger(id: string): Trigger {
|
||||
protected __trigger(id: string): Trigger {
|
||||
return new Trigger(this, id, this.clientContext);
|
||||
}
|
||||
|
||||
|
@ -156,12 +188,7 @@ export class Container {
|
|||
options
|
||||
);
|
||||
this.clientContext.partitionKeyDefinitionCache[this.url] = response.result.partitionKey;
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
container: this
|
||||
};
|
||||
return new ContainerResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/** Replace the container's definition */
|
||||
|
@ -182,12 +209,7 @@ export class Container {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
container: this
|
||||
};
|
||||
return new ContainerResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/** Delete the container */
|
||||
|
@ -202,12 +224,7 @@ export class Container {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
container: this
|
||||
};
|
||||
return new ContainerResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -217,22 +234,23 @@ export class Container {
|
|||
* @param {function} callback - \
|
||||
* The arguments to the callback are(in order): error, partitionKeyDefinition, response object and response headers
|
||||
*/
|
||||
public async getPartitionKeyDefinition(): Promise<CosmosResponse<PartitionKeyDefinition, Container>> {
|
||||
public async getPartitionKeyDefinition(): Promise<ResourceResponse<PartitionKeyDefinition>> {
|
||||
// $ISSUE-felixfan-2016-03-17: Make name based path and link based path use the same key
|
||||
// $ISSUE-felixfan-2016-03-17: Refresh partitionKeyDefinitionCache when necessary
|
||||
if (this.url in this.clientContext.partitionKeyDefinitionCache) {
|
||||
return {
|
||||
body: this.clientContext.partitionKeyDefinitionCache[this.url],
|
||||
ref: this
|
||||
};
|
||||
return new ResourceResponse<PartitionKeyDefinition>(
|
||||
this.clientContext.partitionKeyDefinitionCache[this.url],
|
||||
{},
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
const { headers } = await this.read();
|
||||
return {
|
||||
body: this.clientContext.partitionKeyDefinitionCache[this.url],
|
||||
const { headers, statusCode } = await this.read();
|
||||
return new ResourceResponse<PartitionKeyDefinition>(
|
||||
this.clientContext.partitionKeyDefinitionCache[this.url],
|
||||
headers,
|
||||
ref: this
|
||||
};
|
||||
statusCode
|
||||
);
|
||||
}
|
||||
|
||||
public readPartitionKeyRanges(feedOptions?: FeedOptions): QueryIterator<PartitionKeyRange> {
|
||||
|
|
|
@ -1,10 +1,20 @@
|
|||
import { CosmosResponse } from "../../request/CosmosResponse";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request/ResourceResponse";
|
||||
import { Resource } from "../Resource";
|
||||
import { ContainerDefinition } from "./ContainerDefinition";
|
||||
import { Container } from "./index";
|
||||
|
||||
/** Response object for Container operations */
|
||||
export interface ContainerResponse extends CosmosResponse<ContainerDefinition & Resource, Container> {
|
||||
export class ContainerResponse extends ResourceResponse<ContainerDefinition & Resource> {
|
||||
constructor(
|
||||
resource: ContainerDefinition & Resource,
|
||||
headers: CosmosHeaders,
|
||||
statusCode: number,
|
||||
container: Container
|
||||
) {
|
||||
super(resource, headers, statusCode);
|
||||
this.container = container;
|
||||
}
|
||||
/** A reference to the {@link Container} that the returned {@link ContainerDefinition} corresponds to. */
|
||||
container: Container;
|
||||
public readonly container: Container;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { ClientContext } from "../../ClientContext";
|
||||
import { Constants, getIdFromLink, getPathFromLink, isResourceValid, ResourceType, StatusCodes } from "../../common";
|
||||
import { IHeaders, mergeHeaders, SqlQuerySpec } from "../../queryExecutionContext";
|
||||
import { CosmosHeaders, mergeHeaders, SqlQuerySpec } from "../../queryExecutionContext";
|
||||
import { QueryIterator } from "../../queryIterator";
|
||||
import { FeedOptions, RequestOptions } from "../../request";
|
||||
import { Database } from "../Database";
|
||||
|
@ -97,7 +97,7 @@ export class Containers {
|
|||
}
|
||||
const path = getPathFromLink(this.database.url, ResourceType.container);
|
||||
const id = getIdFromLink(this.database.url);
|
||||
let initialHeaders: IHeaders;
|
||||
let initialHeaders: CosmosHeaders;
|
||||
|
||||
if (body.throughput) {
|
||||
initialHeaders = { [Constants.HttpHeaders.OfferThroughput]: body.throughput };
|
||||
|
@ -113,12 +113,7 @@ export class Containers {
|
|||
options
|
||||
);
|
||||
const ref = new Container(this.database, response.result.id, this.clientContext);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref,
|
||||
container: ref
|
||||
};
|
||||
return new ContainerResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -86,12 +86,7 @@ export class Database {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
database: this
|
||||
};
|
||||
return new DatabaseResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/** Delete the given Database. */
|
||||
|
@ -105,11 +100,6 @@ export class Database {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
database: this
|
||||
};
|
||||
return new DatabaseResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
import { CosmosResponse } from "../../request/CosmosResponse";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request/ResourceResponse";
|
||||
import { Resource } from "../Resource";
|
||||
import { Database } from "./Database";
|
||||
import { DatabaseDefinition } from "./DatabaseDefinition";
|
||||
|
||||
/** Response object for Database operations */
|
||||
export interface DatabaseResponse extends CosmosResponse<DatabaseDefinition & Resource, Database> {
|
||||
export class DatabaseResponse extends ResourceResponse<DatabaseDefinition & Resource> {
|
||||
constructor(resource: DatabaseDefinition & Resource, headers: CosmosHeaders, statusCode: number, database: Database) {
|
||||
super(resource, headers, statusCode);
|
||||
this.database = database;
|
||||
}
|
||||
/** A reference to the {@link Database} that the returned {@link DatabaseDefinition} corresponds to. */
|
||||
database: Database;
|
||||
public readonly database: Database;
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import { ClientContext } from "../../ClientContext";
|
|||
import { Constants, isResourceValid, ResourceType, StatusCodes } from "../../common";
|
||||
import { CosmosClient } from "../../CosmosClient";
|
||||
import { FetchFunctionCallback, mergeHeaders, SqlQuerySpec } from "../../queryExecutionContext";
|
||||
import { IHeaders } from "../../queryExecutionContext/IHeaders";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext/CosmosHeaders";
|
||||
import { QueryIterator } from "../../queryIterator";
|
||||
import { FeedOptions, RequestOptions } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
|
@ -96,7 +96,7 @@ export class Databases {
|
|||
throw err;
|
||||
}
|
||||
|
||||
let initialHeaders: IHeaders;
|
||||
let initialHeaders: CosmosHeaders;
|
||||
|
||||
if (body.throughput) {
|
||||
initialHeaders = { [Constants.HttpHeaders.OfferThroughput]: body.throughput };
|
||||
|
@ -113,12 +113,7 @@ export class Databases {
|
|||
options
|
||||
);
|
||||
const ref = new Database(this.client, body.id, this.clientContext);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref,
|
||||
database: ref
|
||||
};
|
||||
return new DatabaseResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -76,12 +76,7 @@ export class Item {
|
|||
const id = getIdFromLink(this.url);
|
||||
const response = await this.clientContext.read<T>(path, ResourceType.item, id, undefined, options);
|
||||
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
item: this
|
||||
};
|
||||
return new ItemResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -111,7 +106,7 @@ export class Item {
|
|||
options.partitionKey = this.primaryKey;
|
||||
}
|
||||
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
|
||||
const { body: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
|
||||
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
|
||||
options.partitionKey = extractPartitionKey(body, partitionKeyDefinition);
|
||||
}
|
||||
|
||||
|
@ -124,12 +119,7 @@ export class Item {
|
|||
const id = getIdFromLink(this.url);
|
||||
|
||||
const response = await this.clientContext.replace<T>(body, path, ResourceType.item, id, undefined, options);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
item: this
|
||||
};
|
||||
return new ItemResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -155,11 +145,6 @@ export class Item {
|
|||
const id = getIdFromLink(this.url);
|
||||
|
||||
const response = await this.clientContext.delete<T>(path, ResourceType.item, id, undefined, options);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
item: this
|
||||
};
|
||||
return new ItemResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import { CosmosResponse } from "../../request/CosmosResponse";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request/ResourceResponse";
|
||||
import { Resource } from "../Resource";
|
||||
import { Item } from "./Item";
|
||||
import { ItemDefinition } from "./ItemDefinition";
|
||||
|
||||
export interface ItemResponse<T extends ItemDefinition> extends CosmosResponse<T & Resource, Item> {
|
||||
export class ItemResponse<T extends ItemDefinition> extends ResourceResponse<T & Resource> {
|
||||
constructor(resource: T & Resource, headers: CosmosHeaders, statusCode: number, item: Item) {
|
||||
super(resource, headers, statusCode);
|
||||
this.item = item;
|
||||
}
|
||||
/** Reference to the {@link Item} the response corresponds to. */
|
||||
item: Item;
|
||||
public readonly item: Item;
|
||||
}
|
||||
|
|
|
@ -144,7 +144,7 @@ export class Items {
|
|||
path,
|
||||
partitionKey,
|
||||
async () => {
|
||||
const bodyWillBeTruthyIfPartitioned = (await this.container.getPartitionKeyDefinition()).body;
|
||||
const bodyWillBeTruthyIfPartitioned = (await this.container.getPartitionKeyDefinition()).resource;
|
||||
return !!bodyWillBeTruthyIfPartitioned;
|
||||
},
|
||||
changeFeedOptions
|
||||
|
@ -205,7 +205,7 @@ export class Items {
|
|||
public async create<T extends ItemDefinition>(body: T, options?: RequestOptions): Promise<ItemResponse<T>>;
|
||||
public async create<T extends ItemDefinition>(body: T, options: RequestOptions = {}): Promise<ItemResponse<T>> {
|
||||
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
|
||||
const { body: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
|
||||
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
|
||||
options.partitionKey = extractPartitionKey(body, partitionKeyDefinition);
|
||||
}
|
||||
|
||||
|
@ -231,12 +231,7 @@ export class Items {
|
|||
(options && options.partitionKey) as string,
|
||||
this.clientContext
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref,
|
||||
item: ref
|
||||
};
|
||||
return new ItemResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -262,7 +257,7 @@ export class Items {
|
|||
public async upsert<T extends ItemDefinition>(body: T, options?: RequestOptions): Promise<ItemResponse<T>>;
|
||||
public async upsert<T extends ItemDefinition>(body: T, options: RequestOptions = {}): Promise<ItemResponse<T>> {
|
||||
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
|
||||
const { body: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
|
||||
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
|
||||
options.partitionKey = extractPartitionKey(body, partitionKeyDefinition);
|
||||
}
|
||||
|
||||
|
@ -289,11 +284,6 @@ export class Items {
|
|||
(options && options.partitionKey) as string,
|
||||
this.clientContext
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref,
|
||||
item: ref
|
||||
};
|
||||
return new ItemResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ export class Offer {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, offer: this };
|
||||
return new OfferResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -61,6 +61,6 @@ export class Offer {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, offer: this };
|
||||
return new OfferResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { Offer } from "./Offer";
|
||||
import { OfferDefinition } from "./OfferDefinition";
|
||||
|
||||
export interface OfferResponse extends CosmosResponse<OfferDefinition & Resource, Offer> {
|
||||
export class OfferResponse extends ResourceResponse<OfferDefinition & Resource> {
|
||||
constructor(resource: OfferDefinition & Resource, headers: CosmosHeaders, statusCode: number, offer: Offer) {
|
||||
super(resource, headers, statusCode);
|
||||
this.offer = offer;
|
||||
}
|
||||
/** A reference to the {@link Offer} corresponding to the returned {@link OfferDefinition}. */
|
||||
offer: Offer;
|
||||
public readonly offer: Offer;
|
||||
}
|
||||
|
|
|
@ -40,12 +40,7 @@ export class Permission {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
permission: this
|
||||
};
|
||||
return new PermissionResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -70,12 +65,7 @@ export class Permission {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
permission: this
|
||||
};
|
||||
return new PermissionResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,11 +83,6 @@ export class Permission {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref: this,
|
||||
permission: this
|
||||
};
|
||||
return new PermissionResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,20 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { Permission } from "./Permission";
|
||||
import { PermissionBody } from "./PermissionBody";
|
||||
import { PermissionDefinition } from "./PermissionDefinition";
|
||||
|
||||
export interface PermissionResponse
|
||||
extends CosmosResponse<PermissionDefinition & PermissionBody & Resource, Permission> {
|
||||
export class PermissionResponse extends ResourceResponse<PermissionDefinition & PermissionBody & Resource> {
|
||||
constructor(
|
||||
resource: PermissionDefinition & PermissionBody & Resource,
|
||||
headers: CosmosHeaders,
|
||||
statusCode: number,
|
||||
permission: Permission
|
||||
) {
|
||||
super(resource, headers, statusCode);
|
||||
this.permission = permission;
|
||||
}
|
||||
/** A reference to the {@link Permission} corresponding to the returned {@link PermissionDefinition}. */
|
||||
permission: Permission;
|
||||
public readonly permission: Permission;
|
||||
}
|
||||
|
|
|
@ -87,12 +87,7 @@ export class Permissions {
|
|||
options
|
||||
);
|
||||
const ref = new Permission(this.user, response.result.id, this.clientContext);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref,
|
||||
permission: ref
|
||||
};
|
||||
return new PermissionResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -119,11 +114,6 @@ export class Permissions {
|
|||
options
|
||||
);
|
||||
const ref = new Permission(this.user, response.result.id, this.clientContext);
|
||||
return {
|
||||
body: response.result,
|
||||
headers: response.headers,
|
||||
ref,
|
||||
permission: ref
|
||||
};
|
||||
return new PermissionResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { ClientContext } from "../../ClientContext";
|
||||
import { createStoredProcedureUri, getIdFromLink, getPathFromLink, isResourceValid, ResourceType } from "../../common";
|
||||
import { CosmosResponse, RequestOptions } from "../../request";
|
||||
import { RequestOptions, ResourceResponse } from "../../request";
|
||||
import { Container } from "../Container";
|
||||
import { StoredProcedureDefinition } from "./StoredProcedureDefinition";
|
||||
import { StoredProcedureResponse } from "./StoredProcedureResponse";
|
||||
|
@ -43,8 +43,7 @@ export class StoredProcedure {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
|
||||
return { body: response.result, headers: response.headers, ref: this, storedProcedure: this, sproc: this };
|
||||
return new StoredProcedureResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -73,8 +72,7 @@ export class StoredProcedure {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
|
||||
return { body: response.result, headers: response.headers, ref: this, storedProcedure: this, sproc: this };
|
||||
return new StoredProcedureResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -92,7 +90,7 @@ export class StoredProcedure {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, storedProcedure: this, sproc: this };
|
||||
return new StoredProcedureResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -100,7 +98,7 @@ export class StoredProcedure {
|
|||
* @param params Array of parameters to pass as arguments to the given {@link StoredProcedure}.
|
||||
* @param options Additional options, such as the partition key to invoke the {@link StoredProcedure} on.
|
||||
*/
|
||||
public async execute(params?: any[], options?: RequestOptions): Promise<CosmosResponse<any, StoredProcedure>>;
|
||||
public async execute(params?: any[], options?: RequestOptions): Promise<ResourceResponse<any>>;
|
||||
/**
|
||||
* Execute the given {@link StoredProcedure}.
|
||||
*
|
||||
|
@ -110,9 +108,9 @@ export class StoredProcedure {
|
|||
* @param params Array of parameters to pass as arguments to the given {@link StoredProcedure}.
|
||||
* @param options Additional options, such as the partition key to invoke the {@link StoredProcedure} on.
|
||||
*/
|
||||
public async execute<T>(params?: any[], options?: RequestOptions): Promise<CosmosResponse<T, StoredProcedure>>;
|
||||
public async execute<T>(params?: any[], options?: RequestOptions): Promise<CosmosResponse<T, StoredProcedure>> {
|
||||
public async execute<T>(params?: any[], options?: RequestOptions): Promise<ResourceResponse<T>>;
|
||||
public async execute<T>(params?: any[], options?: RequestOptions): Promise<ResourceResponse<T>> {
|
||||
const response = await this.clientContext.execute<T>(this.url, params, options);
|
||||
return { body: response.result, headers: response.headers, ref: this };
|
||||
return new ResourceResponse<T>(response.result, response.headers, response.statusCode);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,18 +1,30 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { StoredProcedure } from "./StoredProcedure";
|
||||
import { StoredProcedureDefinition } from "./StoredProcedureDefinition";
|
||||
|
||||
export interface StoredProcedureResponse extends CosmosResponse<StoredProcedureDefinition & Resource, StoredProcedure> {
|
||||
export class StoredProcedureResponse extends ResourceResponse<StoredProcedureDefinition & Resource> {
|
||||
constructor(
|
||||
resource: StoredProcedureDefinition & Resource,
|
||||
headers: CosmosHeaders,
|
||||
statusCode: number,
|
||||
storedProcedure: StoredProcedure
|
||||
) {
|
||||
super(resource, headers, statusCode);
|
||||
this.storedProcedure = storedProcedure;
|
||||
}
|
||||
/**
|
||||
* A reference to the {@link StoredProcedure} which the {@link StoredProcedureDefinition} corresponds to.
|
||||
*/
|
||||
storedProcedure: StoredProcedure;
|
||||
public readonly storedProcedure: StoredProcedure;
|
||||
|
||||
/**
|
||||
* Alias for storedProcedure.
|
||||
*
|
||||
* A reference to the {@link StoredProcedure} which the {@link StoredProcedureDefinition} corresponds to.
|
||||
*/
|
||||
sproc: StoredProcedure;
|
||||
public get sproc(): StoredProcedure {
|
||||
return this.storedProcedure;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ export class StoredProcedures {
|
|||
options
|
||||
);
|
||||
const ref = new StoredProcedure(this.container, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, storedProcedure: ref, sproc: ref };
|
||||
return new StoredProcedureResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -147,6 +147,6 @@ export class StoredProcedures {
|
|||
options
|
||||
);
|
||||
const ref = new StoredProcedure(this.container, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, storedProcedure: ref, sproc: ref };
|
||||
return new StoredProcedureResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ export class Trigger {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, trigger: this };
|
||||
return new TriggerResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -78,8 +78,7 @@ export class Trigger {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
|
||||
return { body: response.result, headers: response.headers, ref: this, trigger: this };
|
||||
return new TriggerResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -97,7 +96,6 @@ export class Trigger {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
|
||||
return { body: response.result, headers: response.headers, ref: this, trigger: this };
|
||||
return new TriggerResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { Trigger } from "./index";
|
||||
import { TriggerDefinition } from "./TriggerDefinition";
|
||||
|
||||
export interface TriggerResponse extends CosmosResponse<TriggerDefinition & Resource, Trigger> {
|
||||
export class TriggerResponse extends ResourceResponse<TriggerDefinition & Resource> {
|
||||
constructor(resource: TriggerDefinition & Resource, headers: CosmosHeaders, statusCode: number, trigger: Trigger) {
|
||||
super(resource, headers, statusCode);
|
||||
this.trigger = trigger;
|
||||
}
|
||||
/** A reference to the {@link Trigger} corresponding to the returned {@link TriggerDefinition}. */
|
||||
trigger: Trigger;
|
||||
public readonly trigger: Trigger;
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ export class Triggers {
|
|||
options
|
||||
);
|
||||
const ref = new Trigger(this.container, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, trigger: ref };
|
||||
return new TriggerResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -127,6 +127,6 @@ export class Triggers {
|
|||
options
|
||||
);
|
||||
const ref = new Trigger(this.container, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, trigger: ref };
|
||||
return new TriggerResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ export class User {
|
|||
const path = getPathFromLink(this.url);
|
||||
const id = getIdFromLink(this.url);
|
||||
const response = await this.clientContext.read<UserDefinition>(path, ResourceType.user, id, undefined, options);
|
||||
return { body: response.result, headers: response.headers, ref: this, user: this };
|
||||
return new UserResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -82,7 +82,7 @@ export class User {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, user: this };
|
||||
return new UserResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -94,6 +94,6 @@ export class User {
|
|||
const id = getIdFromLink(this.url);
|
||||
|
||||
const response = await this.clientContext.delete<UserDefinition>(path, ResourceType.user, id, undefined, options);
|
||||
return { body: response.result, headers: response.headers, ref: this, user: this };
|
||||
return new UserResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { User } from "./User";
|
||||
import { UserDefinition } from "./UserDefinition";
|
||||
|
||||
export interface UserResponse extends CosmosResponse<UserDefinition & Resource, User> {
|
||||
export class UserResponse extends ResourceResponse<UserDefinition & Resource> {
|
||||
constructor(resource: UserDefinition & Resource, headers: CosmosHeaders, statusCode: number, user: User) {
|
||||
super(resource, headers, statusCode);
|
||||
this.user = user;
|
||||
}
|
||||
/** A reference to the {@link User} corresponding to the returned {@link UserDefinition}. */
|
||||
user: User;
|
||||
public readonly user: User;
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ export class Users {
|
|||
options
|
||||
);
|
||||
const ref = new User(this.database, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, user: ref };
|
||||
return new UserResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -102,6 +102,6 @@ export class Users {
|
|||
options
|
||||
);
|
||||
const ref = new User(this.database, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, user: ref };
|
||||
return new UserResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ export class UserDefinedFunction {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, userDefinedFunction: this, udf: this };
|
||||
return new UserDefinedFunctionResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -81,7 +81,7 @@ export class UserDefinedFunction {
|
|||
undefined,
|
||||
options
|
||||
);
|
||||
return { body: response.result, headers: response.headers, ref: this, userDefinedFunction: this, udf: this };
|
||||
return new UserDefinedFunctionResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,6 +93,6 @@ export class UserDefinedFunction {
|
|||
const id = getIdFromLink(this.url);
|
||||
|
||||
const response = await this.clientContext.delete(path, ResourceType.udf, id, undefined, options);
|
||||
return { body: response.result, headers: response.headers, ref: this, userDefinedFunction: this, udf: this };
|
||||
return new UserDefinedFunctionResponse(response.result, response.headers, response.statusCode, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,27 @@
|
|||
import { CosmosResponse } from "../../request";
|
||||
import { CosmosHeaders } from "../../queryExecutionContext";
|
||||
import { ResourceResponse } from "../../request";
|
||||
import { Resource } from "../Resource";
|
||||
import { UserDefinedFunction } from "./UserDefinedFunction";
|
||||
import { UserDefinedFunctionDefinition } from "./UserDefinedFunctionDefinition";
|
||||
|
||||
export interface UserDefinedFunctionResponse
|
||||
extends CosmosResponse<UserDefinedFunctionDefinition & Resource, UserDefinedFunction> {
|
||||
export class UserDefinedFunctionResponse extends ResourceResponse<UserDefinedFunctionDefinition & Resource> {
|
||||
constructor(
|
||||
resource: UserDefinedFunctionDefinition & Resource,
|
||||
headers: CosmosHeaders,
|
||||
statusCode: number,
|
||||
udf: UserDefinedFunction
|
||||
) {
|
||||
super(resource, headers, statusCode);
|
||||
this.userDefinedFunction = udf;
|
||||
}
|
||||
/** A reference to the {@link UserDefinedFunction} corresponding to the returned {@link UserDefinedFunctionDefinition}. */
|
||||
userDefinedFunction: UserDefinedFunction;
|
||||
public readonly userDefinedFunction: UserDefinedFunction;
|
||||
/**
|
||||
* Alias for `userDefinedFunction(id).
|
||||
*
|
||||
* A reference to the {@link UserDefinedFunction} corresponding to the returned {@link UserDefinedFunctionDefinition}.
|
||||
*/
|
||||
udf: UserDefinedFunction;
|
||||
public get udf(): UserDefinedFunction {
|
||||
return this.userDefinedFunction;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ export class UserDefinedFunctions {
|
|||
options
|
||||
);
|
||||
const ref = new UserDefinedFunction(this.container, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, userDefinedFunction: ref, udf: ref };
|
||||
return new UserDefinedFunctionResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -130,6 +130,6 @@ export class UserDefinedFunctions {
|
|||
options
|
||||
);
|
||||
const ref = new UserDefinedFunction(this.container, response.result.id, this.clientContext);
|
||||
return { body: response.result, headers: response.headers, ref, userDefinedFunction: ref, udf: ref };
|
||||
return new UserDefinedFunctionResponse(response.result, response.headers, response.statusCode, ref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { ConnectionPolicy } from "../documents";
|
||||
import { IHeaders } from "../index";
|
||||
import { ConnectionPolicy } from "../documents/ConnectionPolicy";
|
||||
import { CosmosHeaders } from "../queryExecutionContext/CosmosHeaders";
|
||||
import { RequestContext } from "../request/RequestContext";
|
||||
import { Constants } from "./index";
|
||||
import { Constants } from "./constants";
|
||||
|
||||
/** @hidden */
|
||||
const Regexes = Constants.RegularExpressions;
|
||||
|
@ -98,16 +98,16 @@ export function getHexaDigit() {
|
|||
return Math.floor(Math.random() * 16).toString(16);
|
||||
}
|
||||
|
||||
export function setIsUpsertHeader(headers: IHeaders) {
|
||||
export function setIsUpsertHeader(headers: CosmosHeaders) {
|
||||
if (headers === undefined || headers === null) {
|
||||
throw new Error('The "headers" parameter must not be null or undefined');
|
||||
}
|
||||
|
||||
if (!(headers instanceof Object)) {
|
||||
throw new Error(`The "headers" parameter must be an instance of "Object". Actual type is: "${typeof headers}".`);
|
||||
if (typeof headers !== "object") {
|
||||
throw new Error('The "headers" parameter must be an instance of "Object". Actual type is: "string".');
|
||||
}
|
||||
|
||||
(headers as IHeaders)[Constants.HttpHeaders.IsUpsert] = true;
|
||||
headers[Constants.HttpHeaders.IsUpsert] = true;
|
||||
}
|
||||
|
||||
// TODO: replace with well known library?
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as os from "os";
|
||||
import { Constants } from "./index";
|
||||
import { Constants } from "./constants";
|
||||
|
||||
/** @hidden */
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Constants } from "./constants";
|
||||
import { trimSlashFromLeftAndRight, validateResourceId } from "./helper";
|
||||
import { Constants } from "./index";
|
||||
|
||||
/** @hidden */
|
||||
/**
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { RetryOptions } from "../retry";
|
||||
import { ConnectionMode, MediaReadMode } from "./index";
|
||||
import { RetryOptions } from "../retry/retryOptions";
|
||||
import { ConnectionMode } from "./ConnectionMode";
|
||||
import { MediaReadMode } from "./MediaReadMode";
|
||||
/**
|
||||
* Represents the Connection policy associated with a CosmosClient in the Azure Cosmos DB database service.
|
||||
*/
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Constants } from "../common";
|
||||
import { IHeaders } from "../queryExecutionContext";
|
||||
import { ConsistencyLevel } from "./index";
|
||||
import { CosmosHeaders } from "../queryExecutionContext";
|
||||
import { ConsistencyLevel } from "./ConsistencyLevel";
|
||||
|
||||
/**
|
||||
* Represents a DatabaseAccount in the Azure Cosmos DB database service.
|
||||
|
@ -28,7 +28,7 @@ export class DatabaseAccount {
|
|||
public readonly enableMultipleWritableLocations: boolean;
|
||||
|
||||
// TODO: body - any
|
||||
public constructor(body: { [key: string]: any }, headers: IHeaders) {
|
||||
public constructor(body: { [key: string]: any }, headers: CosmosHeaders) {
|
||||
this.DatabasesLink = "/dbs/";
|
||||
this.MediaLink = "/media/";
|
||||
this.MaxMediaStorageUsageInMB = headers[Constants.HttpHeaders.MaxMediaStorageUsageInMB];
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import * as url from "url";
|
||||
import { Constants, sleep } from "./common";
|
||||
import { CosmosClient } from "./CosmosClient";
|
||||
import { CosmosClientOptions } from "./CosmosClientOptions";
|
||||
import { DatabaseAccount } from "./documents";
|
||||
import { RequestOptions } from "./index";
|
||||
import { LocationCache } from "./LocationCache";
|
||||
import { CosmosResponse } from "./request";
|
||||
import { ResourceResponse } from "./request";
|
||||
import { RequestContext } from "./request/RequestContext";
|
||||
|
||||
/**
|
||||
|
@ -33,7 +32,7 @@ export class GlobalEndpointManager {
|
|||
*/
|
||||
constructor(
|
||||
options: CosmosClientOptions,
|
||||
private readDatabaseAccount: (opts: RequestOptions) => Promise<CosmosResponse<DatabaseAccount, CosmosClient>>
|
||||
private readDatabaseAccount: (opts: RequestOptions) => Promise<ResourceResponse<DatabaseAccount>>
|
||||
) {
|
||||
this.defaultEndpoint = options.endpoint;
|
||||
this.enableEndpointDiscovery = options.connectionPolicy.EnableEndpointDiscovery;
|
||||
|
@ -158,7 +157,7 @@ export class GlobalEndpointManager {
|
|||
private async getDatabaseAccountFromAnyEndpoint(): Promise<DatabaseAccount> {
|
||||
try {
|
||||
const options = { urlConnection: this.defaultEndpoint };
|
||||
const { body: databaseAccount } = await this.readDatabaseAccount(options);
|
||||
const { resource: databaseAccount } = await this.readDatabaseAccount(options);
|
||||
return databaseAccount;
|
||||
// If for any reason(non - globaldb related), we are not able to get the database
|
||||
// account from the above call to readDatabaseAccount,
|
||||
|
@ -175,7 +174,7 @@ export class GlobalEndpointManager {
|
|||
try {
|
||||
const locationalEndpoint = GlobalEndpointManager.getLocationalEndpoint(this.defaultEndpoint, location);
|
||||
const options = { urlConnection: locationalEndpoint };
|
||||
const { body: databaseAccount } = await this.readDatabaseAccount(options);
|
||||
const { resource: databaseAccount } = await this.readDatabaseAccount(options);
|
||||
if (databaseAccount) {
|
||||
return databaseAccount;
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ export { UniqueKeyPolicy, UniqueKey } from "./client/Container/UniqueKeyPolicy";
|
|||
export { Constants } from "./common";
|
||||
export { RetryOptions } from "./retry";
|
||||
export { Response, RequestOptions, FeedOptions, MediaOptions, ErrorResponse } from "./request";
|
||||
export { IHeaders, SqlParameter, SqlQuerySpec } from "./queryExecutionContext";
|
||||
export { CosmosHeaders, SqlParameter, SqlQuerySpec } from "./queryExecutionContext";
|
||||
export { QueryIterator } from "./queryIterator";
|
||||
export * from "./queryMetrics";
|
||||
export { CosmosClient } from "./CosmosClient";
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
export interface IHeaders {
|
||||
export interface CosmosHeaders {
|
||||
[key: string]: string | boolean | number;
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import { Response } from "../../request/request";
|
||||
import { AverageAggregator, CountAggregator, MaxAggregator, MinAggregator, SumAggregator } from "../Aggregators";
|
||||
import { IExecutionContext } from "../IExecutionContext";
|
||||
import { IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../index";
|
||||
import { IEndpointComponent } from "./IEndpointComponent";
|
||||
|
||||
/** @hidden */
|
||||
|
@ -105,7 +105,7 @@ export class AggregateEndpointComponent implements IEndpointComponent {
|
|||
*/
|
||||
public async nextItem(): Promise<Response<any>> {
|
||||
try {
|
||||
let resHeaders: IHeaders;
|
||||
let resHeaders: CosmosHeaders;
|
||||
let resources: any;
|
||||
if (this.aggregateValues === undefined) {
|
||||
({ result: resources, headers: resHeaders } = await this._getAggregateResult());
|
||||
|
|
|
@ -4,7 +4,7 @@ import { FeedOptions } from "../request";
|
|||
import { Response } from "../request/request";
|
||||
import { DefaultQueryExecutionContext } from "./defaultQueryExecutionContext";
|
||||
import { FetchResult, FetchResultType } from "./FetchResult";
|
||||
import { getInitialHeader, IHeaders, mergeHeaders } from "./headerUtils";
|
||||
import { CosmosHeaders, getInitialHeader, mergeHeaders } from "./headerUtils";
|
||||
import { FetchFunctionCallback, SqlQuerySpec } from "./index";
|
||||
|
||||
/** @hidden */
|
||||
|
@ -31,7 +31,7 @@ export class DocumentProducer {
|
|||
private err: Error;
|
||||
public previousContinuationToken: string;
|
||||
public continuationToken: string;
|
||||
private respHeaders: IHeaders;
|
||||
private respHeaders: CosmosHeaders;
|
||||
private internalExecutionContext: DefaultQueryExecutionContext;
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { Constants } from "../common";
|
||||
import { QueryMetrics } from "../queryMetrics";
|
||||
|
||||
export interface IHeaders {
|
||||
export interface CosmosHeaders {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
/** @hidden */
|
||||
// TODO: docs
|
||||
export function getRequestChargeIfAny(headers: IHeaders): number {
|
||||
export function getRequestChargeIfAny(headers: CosmosHeaders): number {
|
||||
if (typeof headers === "number") {
|
||||
return headers;
|
||||
} else if (typeof headers === "string") {
|
||||
|
@ -26,15 +26,15 @@ export function getRequestChargeIfAny(headers: IHeaders): number {
|
|||
}
|
||||
}
|
||||
|
||||
export function getInitialHeader(): IHeaders {
|
||||
const headers: IHeaders = {};
|
||||
export function getInitialHeader(): CosmosHeaders {
|
||||
const headers: CosmosHeaders = {};
|
||||
headers[Constants.HttpHeaders.RequestCharge] = 0;
|
||||
headers[Constants.HttpHeaders.QueryMetrics] = {};
|
||||
return headers;
|
||||
}
|
||||
|
||||
// TODO: The name of this method isn't very accurate to what it does
|
||||
export function mergeHeaders(headers: IHeaders, toBeMergedHeaders: IHeaders) {
|
||||
export function mergeHeaders(headers: CosmosHeaders, toBeMergedHeaders: CosmosHeaders) {
|
||||
if (headers[Constants.HttpHeaders.RequestCharge] === undefined) {
|
||||
headers[Constants.HttpHeaders.RequestCharge] = 0;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import { ClientContext } from "../ClientContext";
|
||||
import {
|
||||
DocumentProducer,
|
||||
IExecutionContext,
|
||||
OrderByDocumentProducerComparator,
|
||||
ParallelQueryExecutionContextBase,
|
||||
PartitionedQueryExecutionContextInfo
|
||||
} from "./index";
|
||||
import { DocumentProducer } from "./documentProducer";
|
||||
import { IExecutionContext } from "./IExecutionContext";
|
||||
import { OrderByDocumentProducerComparator } from "./orderByDocumentProducerComparator";
|
||||
import { ParallelQueryExecutionContextBase } from "./parallelQueryExecutionContextBase";
|
||||
import { PartitionedQueryExecutionContextInfo } from "./partitionedQueryExecutionContextInfoParser";
|
||||
|
||||
/** @hidden */
|
||||
export class OrderByQueryExecutionContext extends ParallelQueryExecutionContextBase implements IExecutionContext {
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import { ClientContext } from "../ClientContext";
|
||||
import { PARITIONKEYRANGE } from "../routing";
|
||||
import {
|
||||
DocumentProducer,
|
||||
IExecutionContext,
|
||||
ParallelQueryExecutionContextBase,
|
||||
PartitionedQueryExecutionContextInfo
|
||||
} from "./index";
|
||||
import { PARITIONKEYRANGE } from "../routing/smartRoutingMapProvider";
|
||||
import { DocumentProducer } from "./documentProducer";
|
||||
import { IExecutionContext } from "./IExecutionContext";
|
||||
import { ParallelQueryExecutionContextBase } from "./parallelQueryExecutionContextBase";
|
||||
import { PartitionedQueryExecutionContextInfo } from "./partitionedQueryExecutionContextInfoParser";
|
||||
|
||||
/** @hidden */
|
||||
export class ParallelQueryExecutionContext extends ParallelQueryExecutionContextBase implements IExecutionContext {
|
||||
|
|
|
@ -2,11 +2,15 @@ import * as bs from "binary-search-bounds";
|
|||
import PriorityQueue from "priorityqueuejs";
|
||||
import semaphore from "semaphore";
|
||||
import { ClientContext } from "../ClientContext";
|
||||
import { StatusCodes, SubStatusCodes } from "../common";
|
||||
import { StatusCodes, SubStatusCodes } from "../common/statusCodes";
|
||||
import { Response } from "../request/request";
|
||||
import { PARITIONKEYRANGE, QueryRange, SmartRoutingMapProvider } from "../routing";
|
||||
import { QueryRange } from "../routing/QueryRange";
|
||||
import { PARITIONKEYRANGE, SmartRoutingMapProvider } from "../routing/smartRoutingMapProvider";
|
||||
import { CosmosHeaders } from "./CosmosHeaders";
|
||||
import { DocumentProducer } from "./documentProducer";
|
||||
import { getInitialHeader, mergeHeaders } from "./headerUtils";
|
||||
import { DocumentProducer, IExecutionContext, IHeaders, PartitionedQueryExecutionContextInfo } from "./index";
|
||||
import { IExecutionContext } from "./IExecutionContext";
|
||||
import { PartitionedQueryExecutionContextInfo } from "./partitionedQueryExecutionContextInfoParser";
|
||||
import * as PartitionedQueryExecutionContextInfoParser from "./partitionedQueryExecutionContextInfoParser";
|
||||
|
||||
/** @hidden */
|
||||
|
@ -27,7 +31,7 @@ export abstract class ParallelQueryExecutionContextBase implements IExecutionCon
|
|||
protected sortOrders: any;
|
||||
private pageSize: any;
|
||||
private requestContinuation: any;
|
||||
private respHeaders: IHeaders;
|
||||
private respHeaders: CosmosHeaders;
|
||||
private orderByPQ: PriorityQueue<DocumentProducer>;
|
||||
private sem: any;
|
||||
private waitingForInternalExecutionContexts: number;
|
||||
|
@ -213,7 +217,7 @@ export abstract class ParallelQueryExecutionContextBase implements IExecutionCon
|
|||
}
|
||||
}
|
||||
|
||||
private _mergeWithActiveResponseHeaders(headers: IHeaders) {
|
||||
private _mergeWithActiveResponseHeaders(headers: CosmosHeaders) {
|
||||
mergeHeaders(this.respHeaders, headers);
|
||||
}
|
||||
|
||||
|
@ -403,7 +407,7 @@ export abstract class ParallelQueryExecutionContextBase implements IExecutionCon
|
|||
}
|
||||
|
||||
let item: any;
|
||||
let headers: IHeaders;
|
||||
let headers: CosmosHeaders;
|
||||
try {
|
||||
const response = await documentProducer.nextItem();
|
||||
item = response.result;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { ClientContext } from "../ClientContext";
|
||||
import { Response } from "../request/request";
|
||||
import { CosmosHeaders } from "./CosmosHeaders";
|
||||
import {
|
||||
AggregateEndpointComponent,
|
||||
IEndpointComponent,
|
||||
|
@ -7,19 +8,16 @@ import {
|
|||
TopEndpointComponent
|
||||
} from "./EndpointComponent";
|
||||
import { getInitialHeader, mergeHeaders } from "./headerUtils";
|
||||
import {
|
||||
IExecutionContext,
|
||||
IHeaders,
|
||||
OrderByQueryExecutionContext,
|
||||
ParallelQueryExecutionContext,
|
||||
PartitionedQueryExecutionContextInfo
|
||||
} from "./index";
|
||||
import { IExecutionContext } from "./IExecutionContext";
|
||||
import { OrderByQueryExecutionContext } from "./orderByQueryExecutionContext";
|
||||
import { ParallelQueryExecutionContext } from "./parallelQueryExecutionContext";
|
||||
import * as PartitionedQueryExecutionContextInfoParser from "./partitionedQueryExecutionContextInfoParser";
|
||||
import { PartitionedQueryExecutionContextInfo } from "./partitionedQueryExecutionContextInfoParser";
|
||||
|
||||
/** @hidden */
|
||||
export class PipelinedQueryExecutionContext implements IExecutionContext {
|
||||
private fetchBuffer: any[];
|
||||
private fetchMoreRespHeaders: IHeaders;
|
||||
private fetchMoreRespHeaders: CosmosHeaders;
|
||||
private endpoint: IEndpointComponent;
|
||||
private pageSize: number;
|
||||
private static DEFAULT_PAGE_SIZE = 10;
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
import { ClientContext } from "../ClientContext";
|
||||
import { StatusCodes, SubStatusCodes } from "../common";
|
||||
import { StatusCodes, SubStatusCodes } from "../common/statusCodes";
|
||||
import { Response } from "../request/request";
|
||||
import {
|
||||
DefaultQueryExecutionContext,
|
||||
FetchFunctionCallback,
|
||||
IExecutionContext,
|
||||
PartitionedQueryExecutionContextInfo,
|
||||
PipelinedQueryExecutionContext,
|
||||
SqlQuerySpec
|
||||
} from "./index";
|
||||
import { DefaultQueryExecutionContext, FetchFunctionCallback } from "./defaultQueryExecutionContext";
|
||||
import { IExecutionContext } from "./IExecutionContext";
|
||||
import { PartitionedQueryExecutionContextInfo } from "./partitionedQueryExecutionContextInfoParser";
|
||||
import { PipelinedQueryExecutionContext } from "./pipelinedQueryExecutionContext";
|
||||
import { SqlQuerySpec } from "./SqlQuerySpec";
|
||||
|
||||
/** @hidden */
|
||||
export class ProxyQueryExecutionContext implements IExecutionContext {
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
/// <reference lib="esnext.asynciterable" />
|
||||
import { ClientContext } from "./ClientContext";
|
||||
import {
|
||||
CosmosHeaders,
|
||||
FetchFunctionCallback,
|
||||
IExecutionContext,
|
||||
IHeaders,
|
||||
ProxyQueryExecutionContext,
|
||||
SqlQuerySpec
|
||||
} from "./queryExecutionContext";
|
||||
import { FeedOptions } from "./request/FeedOptions";
|
||||
import { Response } from "./request/request";
|
||||
import { FeedResponse } from "./request/FeedResponse";
|
||||
|
||||
/**
|
||||
* Represents a QueryIterator Object, an implmenetation of feed or query response that enables
|
||||
|
@ -16,8 +16,8 @@ import { Response } from "./request/request";
|
|||
* in the Azure Cosmos DB database service.
|
||||
*/
|
||||
export class QueryIterator<T> {
|
||||
private toArrayTempResources: T[]; // TODO
|
||||
private toArrayLastResHeaders: IHeaders;
|
||||
private fetchAllTempResources: T[]; // TODO
|
||||
private fetchAllLastResHeaders: CosmosHeaders;
|
||||
private queryExecutionContext: IExecutionContext;
|
||||
/**
|
||||
* @hidden
|
||||
|
@ -33,7 +33,7 @@ export class QueryIterator<T> {
|
|||
this.fetchFunctions = fetchFunctions;
|
||||
this.options = options;
|
||||
this.resourceLink = resourceLink;
|
||||
this.queryExecutionContext = this._createQueryExecutionContext();
|
||||
this.queryExecutionContext = this.createQueryExecutionContext();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -55,7 +55,9 @@ export class QueryIterator<T> {
|
|||
* })
|
||||
* ```
|
||||
*/
|
||||
public async forEach(callback: (result: T, headers?: IHeaders, index?: number) => boolean | void): Promise<void> {
|
||||
public async forEach(
|
||||
callback: (result: T, headers?: CosmosHeaders, index?: number) => boolean | void
|
||||
): Promise<void> {
|
||||
this.reset();
|
||||
let index = 0;
|
||||
while (this.queryExecutionContext.hasMoreResults()) {
|
||||
|
@ -95,35 +97,23 @@ export class QueryIterator<T> {
|
|||
* }
|
||||
* ```
|
||||
*/
|
||||
public async *getAsyncIterator(): AsyncIterable<Response<T>> {
|
||||
public async *getAsyncIterator(): AsyncIterable<FeedResponse<T>> {
|
||||
this.reset();
|
||||
while (this.queryExecutionContext.hasMoreResults()) {
|
||||
const result = await this.queryExecutionContext.nextItem();
|
||||
const result = await this.queryExecutionContext.fetchMore();
|
||||
const feedResponse = new FeedResponse<T>(
|
||||
result.result,
|
||||
result.headers,
|
||||
this.queryExecutionContext.hasMoreResults()
|
||||
);
|
||||
if (result.result === undefined) {
|
||||
return;
|
||||
}
|
||||
yield result;
|
||||
yield feedResponse;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a provided function on the next element in the QueryIterator.
|
||||
*/
|
||||
public async nextItem(): Promise<Response<T>> {
|
||||
return this.queryExecutionContext.nextItem();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the QueryIterator.
|
||||
*/
|
||||
public async current(): Promise<Response<T>> {
|
||||
return this.queryExecutionContext.current();
|
||||
}
|
||||
|
||||
// TODO: why is has more results deprecated?
|
||||
/**
|
||||
* @deprecated Instead check if nextItem() or current() returns undefined.
|
||||
*
|
||||
* Determine if there are still remaining resources to processs based on the value of the continuation token or the\
|
||||
* elements remaining on the current batch in the QueryIterator.
|
||||
* @returns {Boolean} true if there is other elements to process in the QueryIterator.
|
||||
|
@ -133,48 +123,55 @@ export class QueryIterator<T> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the elements of the feed and pass them as an array to a function
|
||||
* Fetch all pages for the query and return a single FeedResponse.
|
||||
*/
|
||||
public async toArray(): Promise<Response<T[]>> {
|
||||
|
||||
public async fetchAll(): Promise<FeedResponse<T>> {
|
||||
this.reset();
|
||||
this.toArrayTempResources = [];
|
||||
return this._toArrayImplementation();
|
||||
this.fetchAllTempResources = [];
|
||||
return this.toArrayImplementation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the next batch of the feed and pass them as an array to a function
|
||||
* Retrieve the next batch from the feed.
|
||||
*
|
||||
* This may or may not fetch more pages from the backend depending on your settings
|
||||
* and the type of query. Aggregate queries will generally fetch all backend pages
|
||||
* before returning the first batch of responses.
|
||||
*/
|
||||
public async executeNext(): Promise<Response<T[]>> {
|
||||
return this.queryExecutionContext.fetchMore();
|
||||
public async fetchNext(): Promise<FeedResponse<T>> {
|
||||
const response = await this.queryExecutionContext.fetchMore();
|
||||
return new FeedResponse<T>(response.result, response.headers, this.queryExecutionContext.hasMoreResults());
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the QueryIterator to the beginning and clear all the resources inside it
|
||||
*/
|
||||
public reset() {
|
||||
this.queryExecutionContext = this._createQueryExecutionContext();
|
||||
this.queryExecutionContext = this.createQueryExecutionContext();
|
||||
}
|
||||
|
||||
private async _toArrayImplementation(): Promise<Response<T[]>> {
|
||||
private async toArrayImplementation(): Promise<FeedResponse<T>> {
|
||||
while (this.queryExecutionContext.hasMoreResults()) {
|
||||
const { result, headers } = await this.queryExecutionContext.nextItem();
|
||||
// concatinate the results and fetch more
|
||||
this.toArrayLastResHeaders = headers;
|
||||
this.fetchAllLastResHeaders = headers;
|
||||
|
||||
if (result === undefined) {
|
||||
// no more results
|
||||
break;
|
||||
}
|
||||
|
||||
this.toArrayTempResources.push(result);
|
||||
this.fetchAllTempResources.push(result);
|
||||
}
|
||||
return {
|
||||
result: this.toArrayTempResources,
|
||||
headers: this.toArrayLastResHeaders
|
||||
};
|
||||
return new FeedResponse(
|
||||
this.fetchAllTempResources,
|
||||
this.fetchAllLastResHeaders,
|
||||
this.queryExecutionContext.hasMoreResults()
|
||||
);
|
||||
}
|
||||
|
||||
private _createQueryExecutionContext() {
|
||||
private createQueryExecutionContext() {
|
||||
return new ProxyQueryExecutionContext(
|
||||
this.clientContext,
|
||||
this.query,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { Document, PartitionKey } from "../documents";
|
||||
import { CompareFunction, Range } from "./index";
|
||||
import { Document } from "../documents/Document";
|
||||
import { PartitionKey } from "../documents/PartitionKey";
|
||||
import { CompareFunction, Range } from "./Range";
|
||||
|
||||
/** @hidden */
|
||||
export type PartitionKeyExtractorFunction = (obj: object) => PartitionKey;
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
import { IHeaders } from "../queryExecutionContext";
|
||||
|
||||
export interface CosmosResponse<T, U> {
|
||||
body?: T;
|
||||
headers?: IHeaders;
|
||||
ref?: U;
|
||||
}
|
|
@ -1,10 +1,10 @@
|
|||
import { IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../index";
|
||||
|
||||
export interface ErrorResponse {
|
||||
code?: number;
|
||||
substatus?: number;
|
||||
body?: any;
|
||||
headers?: IHeaders;
|
||||
headers?: CosmosHeaders;
|
||||
activityId?: string;
|
||||
retryAfterInMilliseconds?: number;
|
||||
[key: string]: any;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../index";
|
||||
|
||||
/**
|
||||
* The feed options and query methods.
|
||||
|
@ -32,9 +32,9 @@ export interface FeedOptions {
|
|||
/** Token for use with Session consistency. */
|
||||
sessionToken?: string;
|
||||
/** (Advanced use case) Initial headers to start with when sending requests to Cosmos */
|
||||
initialHeaders?: IHeaders;
|
||||
initialHeaders?: CosmosHeaders;
|
||||
/** Indicates a change feed request. Must be set to "Incremental feed", or omitted otherwise. */
|
||||
a_im?: string;
|
||||
useIncrementalFeed?: boolean;
|
||||
/** Conditions Associated with the request. */
|
||||
accessCondition?: {
|
||||
/** Conditional HTTP method header type (IfMatch or IfNoneMatch). */
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
import { Constants } from "../common";
|
||||
import { CosmosHeaders } from "../queryExecutionContext";
|
||||
|
||||
export class FeedResponse<TResource> {
|
||||
constructor(
|
||||
public readonly resources: TResource[],
|
||||
private readonly headers: CosmosHeaders,
|
||||
public readonly hasMoreResults: boolean
|
||||
) {}
|
||||
public get continuation(): string {
|
||||
return this.headers[Constants.HttpHeaders.Continuation];
|
||||
}
|
||||
public get queryMetrics(): string {
|
||||
return this.headers[Constants.HttpHeaders.QueryMetrics];
|
||||
}
|
||||
public get requestCharge(): number {
|
||||
return this.headers[Constants.HttpHeaders.RequestCharge];
|
||||
}
|
||||
}
|
|
@ -1,11 +1,11 @@
|
|||
import { IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../index";
|
||||
|
||||
/**
|
||||
* Options associated with upload media.
|
||||
*/
|
||||
export interface MediaOptions {
|
||||
/** (Advanced use case) Initial headers to start with when sending requests to Cosmos */
|
||||
initialHeaders?: IHeaders;
|
||||
initialHeaders?: CosmosHeaders;
|
||||
/** HTTP Slug header value. */
|
||||
slug?: string;
|
||||
/** HTTP ContentType header value. */
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import { Agent, OutgoingHttpHeaders } from "http";
|
||||
import { RequestOptions } from "https"; // TYPES ONLY
|
||||
import * as querystring from "querystring";
|
||||
import { Constants } from "../common/constants";
|
||||
import { ConnectionPolicy } from "../documents";
|
||||
import { GlobalEndpointManager } from "../globalEndpointManager";
|
||||
import { Constants, IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../queryExecutionContext/CosmosHeaders";
|
||||
import * as RetryUtility from "../retry/retryUtility";
|
||||
import { bodyFromData, createRequestObject, parse, Response } from "./request";
|
||||
import { RequestContext } from "./RequestContext";
|
||||
|
@ -45,7 +46,7 @@ export class RequestHandler {
|
|||
request: RequestContext,
|
||||
data: string | Buffer,
|
||||
queryParams: any, // TODO: any query params types
|
||||
headers: IHeaders
|
||||
headers: CosmosHeaders
|
||||
): Promise<Response<any>> {
|
||||
// TODO: any
|
||||
const path = (request as { path: string }).path === undefined ? request : (request as { path: string }).path;
|
||||
|
@ -117,7 +118,7 @@ export class RequestHandler {
|
|||
}
|
||||
|
||||
/** @ignore */
|
||||
public get(urlString: string, request: RequestContext, headers: IHeaders) {
|
||||
public get(urlString: string, request: RequestContext, headers: CosmosHeaders) {
|
||||
// TODO: any
|
||||
return RequestHandler.request(
|
||||
this.globalEndpointManager,
|
||||
|
@ -133,7 +134,7 @@ export class RequestHandler {
|
|||
}
|
||||
|
||||
/** @ignore */
|
||||
public post(urlString: string, request: RequestContext, body: any, headers: IHeaders) {
|
||||
public post(urlString: string, request: RequestContext, body: any, headers: CosmosHeaders) {
|
||||
// TODO: any
|
||||
return RequestHandler.request(
|
||||
this.globalEndpointManager,
|
||||
|
@ -149,7 +150,7 @@ export class RequestHandler {
|
|||
}
|
||||
|
||||
/** @ignore */
|
||||
public put(urlString: string, request: RequestContext, body: any, headers: IHeaders) {
|
||||
public put(urlString: string, request: RequestContext, body: any, headers: CosmosHeaders) {
|
||||
// TODO: any
|
||||
return RequestHandler.request(
|
||||
this.globalEndpointManager,
|
||||
|
@ -165,7 +166,7 @@ export class RequestHandler {
|
|||
}
|
||||
|
||||
/** @ignore */
|
||||
public head(urlString: string, request: any, headers: IHeaders) {
|
||||
public head(urlString: string, request: any, headers: CosmosHeaders) {
|
||||
// TODO: any
|
||||
return RequestHandler.request(
|
||||
this.globalEndpointManager,
|
||||
|
@ -181,7 +182,7 @@ export class RequestHandler {
|
|||
}
|
||||
|
||||
/** @ignore */
|
||||
public delete(urlString: string, request: RequestContext, headers: IHeaders) {
|
||||
public delete(urlString: string, request: RequestContext, headers: CosmosHeaders) {
|
||||
return RequestHandler.request(
|
||||
this.globalEndpointManager,
|
||||
this.connectionPolicy,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { PartitionKey } from "../documents";
|
||||
import { IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../index";
|
||||
|
||||
/**
|
||||
* Options that can be specified for a requested issued to the Azure Cosmos DB servers.=
|
||||
|
@ -46,7 +46,7 @@ export interface RequestOptions {
|
|||
/** Token for use with Session consistency. */
|
||||
sessionToken?: string;
|
||||
/** (Advanced use case) Initial headers to start with when sending requests to Cosmos */
|
||||
initialHeaders?: IHeaders;
|
||||
initialHeaders?: CosmosHeaders;
|
||||
/** (Advanced use case) The url to connect to. */
|
||||
urlConnection?: string;
|
||||
/** (Advanced use case) Skip getting info on the parititon key from the container. */
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import { Constants } from "../common";
|
||||
import { CosmosHeaders } from "../queryExecutionContext/CosmosHeaders";
|
||||
import { StatusCode } from "./StatusCodes";
|
||||
|
||||
export class ResourceResponse<TResource> {
|
||||
constructor(
|
||||
public readonly resource: TResource,
|
||||
public readonly headers: CosmosHeaders,
|
||||
public readonly statusCode: StatusCode
|
||||
) {}
|
||||
public get requestCharge(): number {
|
||||
return this.headers[Constants.HttpHeaders.RequestCharge] as number;
|
||||
}
|
||||
public get activityId(): string {
|
||||
return this.headers[Constants.HttpHeaders.ActivityId] as string;
|
||||
}
|
||||
public get etag(): string {
|
||||
return this.headers[Constants.HttpHeaders.ETag] as string;
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import { IHeaders } from "../index";
|
||||
import { CosmosHeaders } from "../index";
|
||||
|
||||
export interface Response<T> {
|
||||
headers?: IHeaders;
|
||||
headers?: CosmosHeaders;
|
||||
result?: T;
|
||||
statusCode?: number;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
export const StatusCode = {
|
||||
// Success
|
||||
Ok: 200 as 200,
|
||||
Created: 201 as 201,
|
||||
Accepted: 202 as 202,
|
||||
NoContent: 204 as 204,
|
||||
NotModified: 304 as 304,
|
||||
|
||||
// Client error
|
||||
BadRequest: 400 as 400,
|
||||
Unauthorized: 401 as 401,
|
||||
Forbidden: 403 as 403,
|
||||
NotFound: 404 as 404,
|
||||
MethodNotAllowed: 405 as 405,
|
||||
RequestTimeout: 408 as 408,
|
||||
Conflict: 409 as 409,
|
||||
Gone: 410 as 410,
|
||||
PreconditionFailed: 412 as 412,
|
||||
RequestEntityTooLarge: 413 as 413,
|
||||
TooManyRequests: 429 as 429,
|
||||
RetryWith: 449 as 449,
|
||||
|
||||
// Server Error
|
||||
InternalServerError: 500 as 500,
|
||||
ServiceUnavailable: 503 as 503,
|
||||
|
||||
// Operation pause and cancel. These are FAKE status codes for QOS logging purpose only.
|
||||
OperationPaused: 1200 as 1200,
|
||||
OperationCancelled: 1201
|
||||
};
|
||||
|
||||
export type StatusCode = (typeof StatusCode)[keyof typeof StatusCode];
|
||||
|
||||
export const SubStatusCode = {
|
||||
Unknown: 0 as 0,
|
||||
|
||||
// 400: Bad Request Substatus
|
||||
CrossPartitionQueryNotServable: 1004 as 1004,
|
||||
|
||||
// 410: StatusCodeType_Gone: substatus
|
||||
PartitionKeyRangeGone: 1002 as 1002,
|
||||
|
||||
// 404: NotFound Substatus
|
||||
ReadSessionNotAvailable: 1002 as 1002,
|
||||
|
||||
// 403: Forbidden Substatus
|
||||
WriteForbidden: 3
|
||||
};
|
||||
|
||||
export type SubStatusCode = (typeof SubStatusCode)[keyof typeof SubStatusCode];
|
|
@ -4,4 +4,4 @@ export { MediaOptions } from "./MediaOptions";
|
|||
export { RequestHandler } from "./RequestHandler";
|
||||
export { RequestOptions } from "./RequestOptions";
|
||||
export { Response } from "./Response";
|
||||
export { CosmosResponse } from "./CosmosResponse";
|
||||
export { ResourceResponse } from "./ResourceResponse";
|
||||
|
|
|
@ -6,7 +6,7 @@ import * as url from "url";
|
|||
|
||||
import { Constants, HTTPMethod, jsonStringifyAndEscapeNonASCII, ResourceType } from "../common";
|
||||
import { ConnectionPolicy, MediaReadMode } from "../documents";
|
||||
import { IHeaders } from "../queryExecutionContext";
|
||||
import { CosmosHeaders } from "../queryExecutionContext";
|
||||
|
||||
import { ErrorResponse } from "./ErrorResponse";
|
||||
export { ErrorResponse }; // Should refactor this out
|
||||
|
@ -71,7 +71,7 @@ export function createRequestObject(
|
|||
});
|
||||
response.on("end", () => {
|
||||
if (response.statusCode >= 400) {
|
||||
return reject(getErrorBody(response, data, response.headers as IHeaders));
|
||||
return reject(getErrorBody(response, data, response.headers as CosmosHeaders));
|
||||
}
|
||||
|
||||
let result;
|
||||
|
@ -81,7 +81,7 @@ export function createRequestObject(
|
|||
return reject(exception);
|
||||
}
|
||||
|
||||
resolve({ result, headers: response.headers as IHeaders, statusCode: response.statusCode });
|
||||
resolve({ result, headers: response.headers as CosmosHeaders, statusCode: response.statusCode });
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -111,7 +111,7 @@ export function createRequestObject(
|
|||
* @param {object} data - the data body returned from the executon of a request.
|
||||
* @hidden
|
||||
*/
|
||||
function getErrorBody(response: http.IncomingMessage, data: string, headers: IHeaders): ErrorResponse {
|
||||
function getErrorBody(response: http.IncomingMessage, data: string, headers: CosmosHeaders): ErrorResponse {
|
||||
const errorBody: ErrorResponse = {
|
||||
code: response.statusCode,
|
||||
body: data,
|
||||
|
@ -138,7 +138,7 @@ function getErrorBody(response: http.IncomingMessage, data: string, headers: IHe
|
|||
|
||||
export async function getHeaders(
|
||||
authOptions: AuthOptions,
|
||||
defaultHeaders: IHeaders,
|
||||
defaultHeaders: CosmosHeaders,
|
||||
verb: HTTPMethod,
|
||||
path: string,
|
||||
resourceId: string,
|
||||
|
@ -146,8 +146,8 @@ export async function getHeaders(
|
|||
options: RequestOptions | FeedOptions | MediaOptions,
|
||||
partitionKeyRangeId?: string,
|
||||
useMultipleWriteLocations?: boolean
|
||||
): Promise<IHeaders> {
|
||||
const headers: IHeaders = { ...defaultHeaders };
|
||||
): Promise<CosmosHeaders> {
|
||||
const headers: CosmosHeaders = { ...defaultHeaders };
|
||||
const opts: RequestOptions & FeedOptions & MediaOptions = (options || {}) as any; // TODO: this is dirty
|
||||
|
||||
if (useMultipleWriteLocations) {
|
||||
|
@ -192,8 +192,8 @@ export async function getHeaders(
|
|||
}
|
||||
}
|
||||
|
||||
if (opts.a_im) {
|
||||
headers[Constants.HttpHeaders.A_IM] = opts.a_im;
|
||||
if (opts.useIncrementalFeed) {
|
||||
headers[Constants.HttpHeaders.A_IM] = "Incremental Feed";
|
||||
}
|
||||
|
||||
if (opts.indexingDirective) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { isReadRequest } from "../common";
|
||||
import { isReadRequest } from "../common/helper";
|
||||
import { GlobalEndpointManager } from "../globalEndpointManager";
|
||||
import { ErrorResponse } from "../request/request";
|
||||
import { RequestContext } from "../request/RequestContext";
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
import { RequestOptions } from "https";
|
||||
import * as url from "url";
|
||||
import { Constants, sleep, StatusCodes, SubStatusCodes } from "../common";
|
||||
import { ConnectionPolicy } from "../documents";
|
||||
import { Constants } from "../common/constants";
|
||||
import { sleep } from "../common/helper";
|
||||
import { StatusCodes, SubStatusCodes } from "../common/statusCodes";
|
||||
import { ConnectionPolicy } from "../documents/ConnectionPolicy";
|
||||
import { GlobalEndpointManager } from "../globalEndpointManager";
|
||||
import { Response } from "../request";
|
||||
import { LocationRouting } from "../request/LocationRouting";
|
||||
import { RequestContext } from "../request/RequestContext";
|
||||
import { DefaultRetryPolicy } from "./defaultRetryPolicy";
|
||||
import { EndpointDiscoveryRetryPolicy, ResourceThrottleRetryPolicy, SessionRetryPolicy } from "./index";
|
||||
import { EndpointDiscoveryRetryPolicy } from "./endpointDiscoveryRetryPolicy";
|
||||
import { IRetryPolicy } from "./IRetryPolicy";
|
||||
import { ResourceThrottleRetryPolicy } from "./resourceThrottleRetryPolicy";
|
||||
import { RetryContext } from "./RetryContext";
|
||||
import { SessionRetryPolicy } from "./sessionRetryPolicy";
|
||||
|
||||
/** @hidden */
|
||||
export type CreateRequestObjectStubFunction = (
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Constants } from "../common";
|
||||
import { InMemoryCollectionRoutingMap } from "./index";
|
||||
import { Constants } from "../common/constants";
|
||||
import { InMemoryCollectionRoutingMap } from "./inMemoryCollectionRoutingMap";
|
||||
|
||||
function compareRanges(a: any, b: any) {
|
||||
const aVal = a[0][Constants.PartitionKeyRange.MinInclusive];
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import semaphore from "semaphore";
|
||||
import { ClientContext } from "../ClientContext";
|
||||
import { getIdFromLink } from "../common";
|
||||
import { getIdFromLink } from "../common/helper";
|
||||
import { createCompleteRoutingMap } from "./CollectionRoutingMapFactory";
|
||||
import { InMemoryCollectionRoutingMap, QueryRange } from "./index";
|
||||
import { InMemoryCollectionRoutingMap } from "./inMemoryCollectionRoutingMap";
|
||||
import { QueryRange } from "./QueryRange";
|
||||
|
||||
/** @hidden */
|
||||
export class PartitionKeyRangeCache {
|
||||
|
@ -33,7 +34,7 @@ export class PartitionKeyRangeCache {
|
|||
let crm: InMemoryCollectionRoutingMap = this.collectionRoutingMapByCollectionId[collectionId];
|
||||
if (crm === undefined) {
|
||||
try {
|
||||
const { result: resources } = await this.clientContext.queryPartitionKeyRanges(collectionLink).toArray();
|
||||
const { resources } = await this.clientContext.queryPartitionKeyRanges(collectionLink).fetchAll();
|
||||
|
||||
crm = createCompleteRoutingMap(resources.map(r => [r, true]), collectionId);
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { ClientContext } from "../ClientContext";
|
||||
import { Constants } from "../common";
|
||||
import { PartitionKeyRangeCache, QueryRange } from "./index";
|
||||
import { Constants } from "../common/constants";
|
||||
import { PartitionKeyRangeCache } from "./partitionKeyRangeCache";
|
||||
import { QueryRange } from "./QueryRange";
|
||||
|
||||
/** @hidden */
|
||||
export const PARITIONKEYRANGE = Constants.PartitionKeyRange;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Constants, getContainerLink, trimSlashes } from "../common";
|
||||
import { IHeaders } from "../queryExecutionContext";
|
||||
import { CosmosHeaders } from "../queryExecutionContext";
|
||||
import { SessionContext } from "./SessionContext";
|
||||
import { VectorSessionToken } from "./VectorSessionToken";
|
||||
|
||||
|
@ -35,7 +35,7 @@ export class SessionContainer {
|
|||
}
|
||||
}
|
||||
|
||||
public set(request: SessionContext, resHeaders: IHeaders) {
|
||||
public set(request: SessionContext, resHeaders: CosmosHeaders) {
|
||||
// TODO: we check the master logic a few different places. Might not need it.
|
||||
if (!resHeaders || SessionContainer.isReadingFromMaster(request.resourceType, request.operationType)) {
|
||||
return;
|
||||
|
@ -146,7 +146,7 @@ export class SessionContainer {
|
|||
return false;
|
||||
}
|
||||
|
||||
private getContainerName(request: SessionContext, headers: IHeaders) {
|
||||
private getContainerName(request: SessionContext, headers: CosmosHeaders) {
|
||||
let ownerFullName = headers[Constants.HttpHeaders.OwnerFullName];
|
||||
if (!ownerFullName) {
|
||||
ownerFullName = trimSlashes(request.resourceAddress);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/** @hidden */
|
||||
export class MockedQueryIterator {
|
||||
constructor(private results: any) {}
|
||||
public async toArray() {
|
||||
return { result: this.results };
|
||||
public async fetchAll() {
|
||||
return { resources: this.results };
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
import { Container, Database, Trigger, Triggers, UserDefinedFunction, UserDefinedFunctions } from "../../client";
|
||||
import { ClientContext } from "../../ClientContext";
|
||||
|
||||
/**
|
||||
* Used only for accessing Trigger and UserDefinedFunctions which we've hidden on the new OM while it's being reworked
|
||||
* @hidden
|
||||
* @private
|
||||
*/
|
||||
export class PrivateContainer extends Container {
|
||||
public get triggers(): Triggers {
|
||||
return this.__triggers;
|
||||
}
|
||||
public get userDefinedFunctions(): UserDefinedFunctions {
|
||||
return this.__userDefinedFunctions;
|
||||
}
|
||||
constructor(public readonly database: Database, public readonly id: string, clientContext: ClientContext) {
|
||||
super(database, id, clientContext);
|
||||
}
|
||||
|
||||
public trigger(id: string): Trigger {
|
||||
return this.__trigger(id);
|
||||
}
|
||||
|
||||
public userDefinedFunction(id: string): UserDefinedFunction {
|
||||
return this.__userDefinedFunction(id);
|
||||
}
|
||||
}
|
|
@ -13,6 +13,7 @@ import {
|
|||
import { StoredProcedureResponse } from "../../client/StoredProcedure/StoredProcedureResponse";
|
||||
import { UserResponse } from "../../client/User/UserResponse";
|
||||
import { endpoint, masterKey } from "./_testConfig";
|
||||
import { PrivateContainer } from "./PrivateContainer";
|
||||
|
||||
const defaultClient = new CosmosClient({ endpoint, auth: { masterKey } });
|
||||
|
||||
|
@ -26,7 +27,7 @@ export function getEntropy(): string {
|
|||
|
||||
export async function removeAllDatabases(client: CosmosClient = defaultClient) {
|
||||
try {
|
||||
const { result: databases } = await client.databases.readAll().toArray();
|
||||
const { resources: databases } = await client.databases.readAll().fetchAll();
|
||||
const length = databases.length;
|
||||
|
||||
if (length === 0) {
|
||||
|
@ -67,6 +68,11 @@ export async function getTestContainer(
|
|||
return db.container(id);
|
||||
}
|
||||
|
||||
export function createPrivateContainer(container: Container) {
|
||||
// This is a hack to access trigger and udf for existing tests while we figure out what we're going to do
|
||||
return new PrivateContainer(container.database, container.id, (container as any).clientContext);
|
||||
}
|
||||
|
||||
export async function bulkInsertItems(
|
||||
container: Container,
|
||||
documents: any[]
|
||||
|
@ -74,7 +80,7 @@ export async function bulkInsertItems(
|
|||
const returnedDocuments = [];
|
||||
for (const doc of documents) {
|
||||
try {
|
||||
const { body: document } = await container.items.create(doc);
|
||||
const { resource: document } = await container.items.create(doc);
|
||||
returnedDocuments.push(document);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
@ -92,7 +98,7 @@ export async function bulkReadItems(container: Container, documents: any[], part
|
|||
: { partitionKey: {} };
|
||||
|
||||
// TODO: should we block or do all requests in parallel?
|
||||
const { body: doc } = await container.item(document.id).read(options);
|
||||
const { resource: doc } = await container.item(document.id).read(options);
|
||||
|
||||
assert.equal(JSON.stringify(doc), JSON.stringify(document));
|
||||
} catch (err) {
|
||||
|
@ -105,7 +111,7 @@ export async function bulkReplaceItems(container: Container, documents: any[]):
|
|||
const returnedDocuments: any[] = [];
|
||||
for (const document of documents) {
|
||||
try {
|
||||
const { body: doc } = await container.item(document.id).replace(document);
|
||||
const { resource: doc } = await container.item(document.id).replace(document);
|
||||
const expectedModifiedDocument = JSON.parse(JSON.stringify(document));
|
||||
delete expectedModifiedDocument._etag;
|
||||
delete expectedModifiedDocument._ts;
|
||||
|
@ -161,9 +167,9 @@ export async function bulkQueryItemsWithPartitionKey(
|
|||
]
|
||||
};
|
||||
|
||||
const { result: results } = await container.items.query(querySpec).toArray();
|
||||
assert.equal(results.length, 1, "Expected exactly 1 document");
|
||||
assert.equal(JSON.stringify(results[0]), JSON.stringify(document));
|
||||
const { resources } = await container.items.query(querySpec).fetchAll();
|
||||
assert.equal(resources.length, 1, "Expected exactly 1 document");
|
||||
assert.equal(JSON.stringify(resources[0]), JSON.stringify(document));
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
|
@ -251,7 +257,7 @@ export function replaceOrUpsertPermission(
|
|||
|
||||
// Trigger
|
||||
export function createOrUpsertTrigger(
|
||||
container: Container,
|
||||
container: PrivateContainer,
|
||||
body: any,
|
||||
options: any,
|
||||
isUpsertTest: boolean
|
||||
|
@ -263,7 +269,7 @@ export function createOrUpsertTrigger(
|
|||
}
|
||||
}
|
||||
export function replaceOrUpsertTrigger(
|
||||
container: Container,
|
||||
container: PrivateContainer,
|
||||
body: any,
|
||||
options: any,
|
||||
isUpsertTest: boolean
|
||||
|
@ -277,7 +283,7 @@ export function replaceOrUpsertTrigger(
|
|||
|
||||
// User Defined Function
|
||||
export function createOrUpsertUserDefinedFunction(
|
||||
container: Container,
|
||||
container: PrivateContainer,
|
||||
body: any,
|
||||
options: any,
|
||||
isUpsertTest: boolean
|
||||
|
@ -289,7 +295,7 @@ export function createOrUpsertUserDefinedFunction(
|
|||
}
|
||||
}
|
||||
export function replaceOrUpsertUserDefinedFunction(
|
||||
container: Container,
|
||||
container: PrivateContainer,
|
||||
body: any,
|
||||
options: any,
|
||||
isUpsertTest: boolean
|
||||
|
|
|
@ -14,19 +14,19 @@ describe("NodeJS CRUD Tests", function() {
|
|||
it("should handle all the key options", async function() {
|
||||
const clientOptionsKey = new CosmosClient({ endpoint, key: masterKey });
|
||||
assert(
|
||||
undefined !== (await clientOptionsKey.databases.readAll().toArray()),
|
||||
undefined !== (await clientOptionsKey.databases.readAll().fetchAll()),
|
||||
"Should be able to fetch list of databases"
|
||||
);
|
||||
|
||||
const clientOptionsAuthKey = new CosmosClient({ endpoint, auth: { key: masterKey } });
|
||||
assert(
|
||||
undefined !== (await clientOptionsAuthKey.databases.readAll().toArray()),
|
||||
undefined !== (await clientOptionsAuthKey.databases.readAll().fetchAll()),
|
||||
"Should be able to fetch list of databases"
|
||||
);
|
||||
|
||||
const clientOptionsAuthMasterKey = new CosmosClient({ endpoint, auth: { masterKey } });
|
||||
assert(
|
||||
undefined !== (await clientOptionsAuthMasterKey.databases.readAll().toArray()),
|
||||
undefined !== (await clientOptionsAuthMasterKey.databases.readAll().fetchAll()),
|
||||
"Should be able to fetch list of databases"
|
||||
);
|
||||
});
|
||||
|
@ -36,28 +36,28 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const database = await getTestDatabase("Validate Authorization database");
|
||||
// create container1
|
||||
|
||||
const { body: container1 } = await database.containers.create({ id: "Validate Authorization container" });
|
||||
const { resource: container1 } = await database.containers.create({ id: "Validate Authorization container" });
|
||||
// create document1
|
||||
const { body: document1 } = await database
|
||||
const { resource: document1 } = await database
|
||||
.container(container1.id)
|
||||
.items.create({ id: "coll1doc1", foo: "bar", key: "value" });
|
||||
// create document 2
|
||||
const { body: document2 } = await database
|
||||
const { resource: document2 } = await database
|
||||
.container(container1.id)
|
||||
.items.create({ id: "coll1doc2", foo: "bar2", key: "value2" });
|
||||
|
||||
// create container 2
|
||||
const { body: container2 } = await database.containers.create({ id: "sample container2" });
|
||||
const { resource: container2 } = await database.containers.create({ id: "sample container2" });
|
||||
|
||||
// create user1
|
||||
const { body: user1 } = await database.users.create({ id: "user1" });
|
||||
const { resource: user1 } = await database.users.create({ id: "user1" });
|
||||
let permission = {
|
||||
id: "permission On Coll1",
|
||||
permissionMode: PermissionMode.Read,
|
||||
resource: (container1 as any)._self
|
||||
}; // TODO: any rid stuff
|
||||
// create permission for container1
|
||||
const { body: permissionOnColl1 } = await createOrUpsertPermission(
|
||||
const { resource: permissionOnColl1 } = await createOrUpsertPermission(
|
||||
database.user(user1.id),
|
||||
permission,
|
||||
undefined,
|
||||
|
@ -70,7 +70,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
resource: (document2 as any)._self // TODO: any rid
|
||||
};
|
||||
// create permission for document 2
|
||||
const { body: permissionOnDoc2 } = await createOrUpsertPermission(
|
||||
const { resource: permissionOnDoc2 } = await createOrUpsertPermission(
|
||||
database.user(user1.id),
|
||||
permission,
|
||||
undefined,
|
||||
|
@ -79,14 +79,14 @@ describe("NodeJS CRUD Tests", function() {
|
|||
assert((permissionOnDoc2 as any)._token !== undefined, "permission token is invalid"); // TODO: any rid
|
||||
|
||||
// create user 2
|
||||
const { body: user2 } = await database.users.create({ id: "user2" });
|
||||
const { resource: user2 } = await database.users.create({ id: "user2" });
|
||||
permission = {
|
||||
id: "permission On coll2",
|
||||
permissionMode: PermissionMode.All,
|
||||
resource: (container2 as any)._self // TODO: any rid
|
||||
};
|
||||
// create permission on container 2
|
||||
const { body: permissionOnColl2 } = await createOrUpsertPermission(
|
||||
const { resource: permissionOnColl2 } = await createOrUpsertPermission(
|
||||
database.user(user2.id),
|
||||
permission,
|
||||
undefined,
|
||||
|
@ -111,7 +111,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const authorizationCRUDTest = async function(isUpsertTest: boolean) {
|
||||
try {
|
||||
const badclient = new CosmosClient({ endpoint, auth: undefined });
|
||||
const { result: databases } = await badclient.databases.readAll().toArray();
|
||||
const { resources: databases } = await badclient.databases.readAll().fetchAll();
|
||||
assert.fail("Must fail");
|
||||
} catch (err) {
|
||||
assert(err !== undefined, "error should not be undefined");
|
||||
|
@ -129,7 +129,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const col1Client = new CosmosClient({ endpoint, auth: { resourceTokens } });
|
||||
|
||||
// 1. Success-- Use Col1 Permission to Read
|
||||
const { body: successColl1 } = await col1Client
|
||||
const { resource: successColl1 } = await col1Client
|
||||
.database(entities.database.id)
|
||||
.container(entities.coll1.id)
|
||||
.read();
|
||||
|
@ -148,16 +148,16 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
|
||||
// 3. Success-- Use Col1 Permission to Read All Docs
|
||||
const { result: successDocuments } = await col1Client
|
||||
const { resources: successDocuments } = await col1Client
|
||||
.database(entities.database.id)
|
||||
.container(entities.coll1.id)
|
||||
.items.readAll()
|
||||
.toArray();
|
||||
.fetchAll();
|
||||
assert(successDocuments !== undefined, "error reading documents");
|
||||
assert.equal(successDocuments.length, 2, "Expected 2 Documents to be succesfully read");
|
||||
|
||||
// 4. Success-- Use Col1 Permission to Read Col1Doc1
|
||||
const { body: successDoc } = await col1Client
|
||||
const { resource: successDoc } = await col1Client
|
||||
.database(entities.database.id)
|
||||
.container(entities.coll1.id)
|
||||
.item(entities.doc1.id)
|
||||
|
@ -174,7 +174,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const doc = { id: "new doc", CustomProperty1: "BBBBBB", customProperty2: 1000 };
|
||||
const col2Container = await col2Client.databaseDatabase(entities.db.id)
|
||||
.containerContainer(entities.coll2.id);
|
||||
const { result: successDoc2 } = await createOrUpsertItem(
|
||||
const { resources: successDoc2 } = await createOrUpsertItem(
|
||||
col2Container, doc, undefined, isUpsertTest);
|
||||
assert(successDoc2 !== undefined, "error creating document");
|
||||
assert.equal(successDoc2.CustomProperty1, doc.CustomProperty1,
|
||||
|
@ -192,7 +192,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
};
|
||||
const container = await getTestContainer("authorization CRUD multiple partitons", undefined, containerDefinition);
|
||||
// create user
|
||||
const { body: userDef } = await container.database.users.create({ id: "user1" });
|
||||
const { resource: userDef } = await container.database.users.create({ id: "user1" });
|
||||
const user = container.database.user(userDef.id);
|
||||
|
||||
const key = 1;
|
||||
|
@ -204,7 +204,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
};
|
||||
|
||||
// create permission
|
||||
const { body: permission } = await user.permissions.create(permissionDefinition);
|
||||
const { resource: permission } = await user.permissions.create(permissionDefinition);
|
||||
assert((permission as any)._token !== undefined, "permission token is invalid");
|
||||
const resourceTokens: any = {};
|
||||
resourceTokens[container.id] = (permission as any)._token;
|
||||
|
|
|
@ -2,7 +2,7 @@ import assert from "assert";
|
|||
import { Agent } from "http";
|
||||
import { ConnectionPolicy, CosmosClient } from "../..";
|
||||
import { endpoint, masterKey } from "../common/_testConfig";
|
||||
import { getTestDatabase, removeAllDatabases } from "../common/TestHelpers";
|
||||
import { getTestDatabase } from "../common/TestHelpers";
|
||||
|
||||
describe("NodeJS CRUD Tests", function() {
|
||||
this.timeout(process.env.MOCHA_TIMEOUT || 20000);
|
||||
|
|
|
@ -28,7 +28,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
containerDefinition.partitionKey = { paths: ["/id"], kind: PartitionKind.Hash };
|
||||
}
|
||||
|
||||
const { body: containerDef } = await database.containers.create(containerDefinition);
|
||||
const { resource: containerDef } = await database.containers.create(containerDefinition);
|
||||
const container = database.container(containerDef.id);
|
||||
assert.equal(containerDefinition.id, containerDef.id);
|
||||
assert.equal("consistent", containerDef.indexingPolicy.indexingMode);
|
||||
|
@ -37,7 +37,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
assert.deepEqual(containerDef.partitionKey.paths, containerDefinition.partitionKey.paths);
|
||||
}
|
||||
// read containers after creation
|
||||
const { result: containers } = await database.containers.readAll().toArray();
|
||||
const { resources: containers } = await database.containers.readAll().fetchAll();
|
||||
|
||||
assert.equal(containers.length, 1, "create should increase the number of containers");
|
||||
// query containers
|
||||
|
@ -50,15 +50,15 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await database.containers.query(querySpec).toArray();
|
||||
const { resources: results } = await database.containers.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
const { result: ranges } = await container.readPartitionKeyRanges().toArray();
|
||||
const { resources: ranges } = await container.readPartitionKeyRanges().fetchAll();
|
||||
assert(ranges.length > 0, "container should have at least 1 partition");
|
||||
|
||||
// Replacing indexing policy is allowed.
|
||||
containerDef.indexingPolicy.indexingMode = IndexingMode.lazy;
|
||||
const { body: replacedContainer } = await container.replace(containerDef);
|
||||
const { resource: replacedContainer } = await container.replace(containerDef);
|
||||
assert.equal("lazy", replacedContainer.indexingPolicy.indexingMode);
|
||||
|
||||
// Replacing partition key is not allowed.
|
||||
|
@ -84,7 +84,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
// read container
|
||||
containerDef.id = containerDefinition.id; // Resume Id.
|
||||
const { body: readcontainer } = await container.read();
|
||||
const { resource: readcontainer } = await container.read();
|
||||
assert.equal(containerDefinition.id, readcontainer.id);
|
||||
|
||||
// delete container
|
||||
|
@ -169,7 +169,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const database = await getTestDatabase("container test database");
|
||||
|
||||
// create container
|
||||
const { body: containerDef } = await database.containers.create({ id: "container test container" });
|
||||
const { resource: containerDef } = await database.containers.create({ id: "container test container" });
|
||||
const container = database.container(containerDef.id);
|
||||
|
||||
assert.equal(
|
||||
|
@ -184,7 +184,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
indexingPolicy: { indexingMode: IndexingMode.lazy }
|
||||
};
|
||||
|
||||
const { body: lazyContainerDef } = await database.containers.create(lazyContainerDefinition);
|
||||
const { resource: lazyContainerDef } = await database.containers.create(lazyContainerDefinition);
|
||||
const lazyContainer = database.container(lazyContainerDef.id);
|
||||
|
||||
assert.equal(lazyContainerDef.indexingPolicy.indexingMode, IndexingMode.lazy, "indexing mode should be lazy");
|
||||
|
@ -196,7 +196,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
uniqueKeyPolicy: { uniqueKeys: [{ paths: ["/foo"] }] }
|
||||
};
|
||||
|
||||
const { body: uniqueKeysContainerDef } = await database.containers.create(uniqueKeysContainerDefinition);
|
||||
const { resource: uniqueKeysContainerDef } = await database.containers.create(uniqueKeysContainerDefinition);
|
||||
const uniqueKeysContainer = database.container(uniqueKeysContainerDef.id);
|
||||
|
||||
assert.equal(uniqueKeysContainerDef.uniqueKeyPolicy.uniqueKeys[0].paths, "/foo");
|
||||
|
@ -207,7 +207,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
id: "lazy container",
|
||||
indexingPolicy: { indexingMode: "consistent" } // tests the type flexibility
|
||||
};
|
||||
const { body: consistentContainerDef } = await database.containers.create(consistentcontainerDefinition);
|
||||
const { resource: consistentContainerDef } = await database.containers.create(consistentcontainerDefinition);
|
||||
const consistentContainer = database.container(consistentContainerDef.id);
|
||||
assert.equal(
|
||||
containerDef.indexingPolicy.indexingMode,
|
||||
|
@ -241,7 +241,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
};
|
||||
|
||||
const { body: containerWithIndexingPolicyDef } = await database.containers.create(containerDefinition);
|
||||
const { resource: containerWithIndexingPolicyDef } = await database.containers.create(containerDefinition);
|
||||
|
||||
// Two included paths.
|
||||
assert.equal(
|
||||
|
@ -287,7 +287,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
// create container with no indexing policy specified.
|
||||
const containerDefinition01: ContainerDefinition = { id: "TestCreateDefaultPolicy01" };
|
||||
const { body: containerNoIndexPolicyDef } = await database.containers.create(containerDefinition01);
|
||||
const { resource: containerNoIndexPolicyDef } = await database.containers.create(containerDefinition01);
|
||||
checkDefaultIndexingPolicyPaths(containerNoIndexPolicyDef["indexingPolicy"]);
|
||||
|
||||
// create container with partial policy specified.
|
||||
|
@ -299,7 +299,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
};
|
||||
|
||||
const { body: containerWithPartialPolicyDef } = await database.containers.create(containerDefinition02);
|
||||
const { resource: containerWithPartialPolicyDef } = await database.containers.create(containerDefinition02);
|
||||
checkDefaultIndexingPolicyPaths((containerWithPartialPolicyDef as any)["indexingPolicy"]);
|
||||
|
||||
// create container with default policy.
|
||||
|
@ -307,7 +307,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
id: "TestCreateDefaultPolicy03",
|
||||
indexingPolicy: {}
|
||||
};
|
||||
const { body: containerDefaultPolicy } = await database.containers.create(containerDefinition03);
|
||||
const { resource: containerDefaultPolicy } = await database.containers.create(containerDefinition03);
|
||||
checkDefaultIndexingPolicyPaths((containerDefaultPolicy as any)["indexingPolicy"]);
|
||||
|
||||
// create container with indexing policy missing indexes.
|
||||
|
@ -321,7 +321,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
]
|
||||
}
|
||||
};
|
||||
const { body: containerMissingIndexes } = await database.containers.create(containerDefinition04);
|
||||
const { resource: containerMissingIndexes } = await database.containers.create(containerDefinition04);
|
||||
checkDefaultIndexingPolicyPaths((containerMissingIndexes as any)["indexingPolicy"]);
|
||||
|
||||
// create container with indexing policy missing precision.
|
||||
|
@ -345,7 +345,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
]
|
||||
}
|
||||
};
|
||||
const { body: containerMissingPrecision } = await database.containers.create(containerDefinition05);
|
||||
const { resource: containerMissingPrecision } = await database.containers.create(containerDefinition05);
|
||||
checkDefaultIndexingPolicyPaths((containerMissingPrecision as any)["indexingPolicy"]);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
@ -362,9 +362,9 @@ describe("NodeJS CRUD Tests", function() {
|
|||
});
|
||||
|
||||
describe("Validate response headers", function() {
|
||||
const createThenReadcontainer = async function(database: Database, body: ContainerDefinition) {
|
||||
const createThenReadcontainer = async function(database: Database, definition: ContainerDefinition) {
|
||||
try {
|
||||
const { body: createdcontainer, headers } = await database.containers.create(body);
|
||||
const { container: createdcontainer, headers } = await database.containers.create(definition);
|
||||
const response = await database.container(createdcontainer.id).read();
|
||||
return response;
|
||||
} catch (err) {
|
||||
|
@ -419,7 +419,7 @@ describe("containers.createIfNotExists", function() {
|
|||
it("should handle container does not exist", async function() {
|
||||
const def: ContainerDefinition = { id: "does not exist" };
|
||||
const { container } = await database.containers.createIfNotExists(def);
|
||||
const { body: readDef } = await container.read();
|
||||
const { resource: readDef } = await container.read();
|
||||
assert.equal(def.id, readDef.id);
|
||||
});
|
||||
|
||||
|
@ -428,7 +428,7 @@ describe("containers.createIfNotExists", function() {
|
|||
await database.containers.create(def);
|
||||
|
||||
const { container } = await database.containers.createIfNotExists(def);
|
||||
const { body: readDef } = await container.read();
|
||||
const { resource: readDef } = await container.read();
|
||||
assert.equal(def.id, readDef.id);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,17 +14,17 @@ describe("NodeJS CRUD Tests", function() {
|
|||
describe("Validate Database CRUD", async function() {
|
||||
const databaseCRUDTest = async function() {
|
||||
// read databases
|
||||
const { result: databases } = await client.databases.readAll().toArray();
|
||||
const { resources: databases } = await client.databases.readAll().fetchAll();
|
||||
assert.equal(databases.constructor, Array, "Value should be an array");
|
||||
|
||||
// create a database
|
||||
const beforeCreateDatabasesCount = databases.length;
|
||||
const databaseDefinition = { id: "database test database", throughput: 400 };
|
||||
const { body: db } = await client.databases.create(databaseDefinition);
|
||||
const { resource: db } = await client.databases.create(databaseDefinition);
|
||||
assert.equal(db.id, databaseDefinition.id);
|
||||
|
||||
// read databases after creation
|
||||
const { result: databases2 } = await client.databases.readAll().toArray();
|
||||
const { resources: databases2 } = await client.databases.readAll().fetchAll();
|
||||
assert.equal(databases2.length, beforeCreateDatabasesCount + 1, "create should increase the number of databases");
|
||||
// query databases
|
||||
const querySpec = {
|
||||
|
@ -36,7 +36,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await client.databases.query(querySpec).toArray();
|
||||
const { resources: results } = await client.databases.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// delete database
|
||||
|
@ -59,7 +59,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
it("should handle does not exist", async function() {
|
||||
const def: DatabaseDefinition = { id: addEntropy("does not exist") };
|
||||
const { database } = await client.databases.createIfNotExists(def);
|
||||
const { body: readDef } = await database.read();
|
||||
const { resource: readDef } = await database.read();
|
||||
assert.equal(def.id, readDef.id);
|
||||
});
|
||||
|
||||
|
@ -70,7 +70,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
// Now call createIfNotExists on existing db
|
||||
const { database } = await client.databases.createIfNotExists(def);
|
||||
const { body: readDef } = await database.read();
|
||||
const { resource: readDef } = await database.read();
|
||||
assert.equal(def.id, readDef.id);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,7 +14,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
describe("validate database account functionality", function() {
|
||||
const databaseAccountTest = async function() {
|
||||
const { body: databaseAccount, headers } = await client.getDatabaseAccount();
|
||||
const { resource: databaseAccount, headers } = await client.getDatabaseAccount();
|
||||
assert.equal(databaseAccount.DatabasesLink, "/dbs/");
|
||||
assert.equal(databaseAccount.MediaLink, "/media/");
|
||||
assert.equal(databaseAccount.MaxMediaStorageUsageInMB, headers["x-ms-max-media-storage-usage-mb"]); // TODO: should use constants here
|
||||
|
|
|
@ -36,11 +36,11 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// create database
|
||||
const database = await getTestDatabase("sample 中文 database");
|
||||
// create container
|
||||
const { body: containerdef } = await database.containers.create({ id: "sample container" });
|
||||
const { resource: containerdef } = await database.containers.create({ id: "sample container" });
|
||||
const container: Container = database.container(containerdef.id);
|
||||
|
||||
// read items
|
||||
const { result: items } = await container.items.readAll().toArray();
|
||||
const { resources: items } = await container.items.readAll().fetchAll();
|
||||
assert(Array.isArray(items), "Value should be an array");
|
||||
|
||||
// create an item
|
||||
|
@ -57,11 +57,11 @@ describe("NodeJS CRUD Tests", function() {
|
|||
} catch (err) {
|
||||
assert(err !== undefined, "should throw an error because automatic id generation is disabled");
|
||||
}
|
||||
const { body: document } = await createOrUpsertItem(container, itemDefinition, undefined, isUpsertTest);
|
||||
const { resource: document } = await createOrUpsertItem(container, itemDefinition, undefined, isUpsertTest);
|
||||
assert.equal(document.name, itemDefinition.name);
|
||||
assert(document.id !== undefined);
|
||||
// read documents after creation
|
||||
const { result: documents2 } = await container.items.readAll().toArray();
|
||||
const { resources: documents2 } = await container.items.readAll().fetchAll();
|
||||
assert.equal(documents2.length, beforeCreateDocumentsCount + 1, "create should increase the number of documents");
|
||||
// query documents
|
||||
const querySpec = {
|
||||
|
@ -73,27 +73,27 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await container.items.query(querySpec).toArray();
|
||||
const { resources: results } = await container.items.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
const { result: results2 } = await container.items.query(querySpec, { enableScanInQuery: true }).toArray();
|
||||
const { resources: results2 } = await container.items.query(querySpec, { enableScanInQuery: true }).fetchAll();
|
||||
assert(results2.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// replace document
|
||||
document.name = "replaced document";
|
||||
document.foo = "not bar";
|
||||
const { body: replacedDocument } = await replaceOrUpsertItem(container, document, undefined, isUpsertTest);
|
||||
const { resource: replacedDocument } = await replaceOrUpsertItem(container, document, undefined, isUpsertTest);
|
||||
assert.equal(replacedDocument.name, "replaced document", "document name property should change");
|
||||
assert.equal(replacedDocument.foo, "not bar", "property should have changed");
|
||||
assert.equal(document.id, replacedDocument.id, "document id should stay the same");
|
||||
// read document
|
||||
const { body: document2 } = await container.item(replacedDocument.id).read<TestItem>();
|
||||
const { resource: document2 } = await container.item(replacedDocument.id).read<TestItem>();
|
||||
assert.equal(replacedDocument.id, document2.id);
|
||||
// delete document
|
||||
const { body: res } = await container.item(replacedDocument.id).delete();
|
||||
const { resource: res } = await container.item(replacedDocument.id).delete();
|
||||
|
||||
// read documents after deletion
|
||||
try {
|
||||
const { body: document3 } = await container.item(replacedDocument.id).read();
|
||||
const { resource: document3 } = await container.item(replacedDocument.id).read();
|
||||
assert.fail("must throw if document doesn't exist");
|
||||
} catch (err) {
|
||||
const notFoundErrorCode = 404;
|
||||
|
@ -112,7 +112,9 @@ describe("NodeJS CRUD Tests", function() {
|
|||
partitionKey: { paths: ["/" + partitionKey], kind: PartitionKind.Hash }
|
||||
};
|
||||
|
||||
const { body: containerdef } = await database.containers.create(containerDefinition, { offerThroughput: 12000 });
|
||||
const { resource: containerdef } = await database.containers.create(containerDefinition, {
|
||||
offerThroughput: 12000
|
||||
});
|
||||
const container = database.container(containerdef.id);
|
||||
|
||||
const documents = [
|
||||
|
@ -131,7 +133,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
return doc1.id.localeCompare(doc2.id);
|
||||
});
|
||||
await bulkReadItems(container, returnedDocuments, partitionKey);
|
||||
const { result: successDocuments } = await container.items.readAll().toArray();
|
||||
const { resources: successDocuments } = await container.items.readAll().fetchAll();
|
||||
assert(successDocuments !== undefined, "error reading documents");
|
||||
assert.equal(
|
||||
successDocuments.length,
|
||||
|
@ -157,15 +159,15 @@ describe("NodeJS CRUD Tests", function() {
|
|||
query: "SELECT * FROM Root"
|
||||
};
|
||||
try {
|
||||
const { result: badUpdate } = await container.items.query(querySpec, { enableScanInQuery: true }).toArray();
|
||||
const { resources: badUpdate } = await container.items.query(querySpec, { enableScanInQuery: true }).fetchAll();
|
||||
assert.fail("Must fail");
|
||||
} catch (err) {
|
||||
const badRequestErrorCode = 400;
|
||||
assert.equal(err.code, badRequestErrorCode, "response should return error code " + badRequestErrorCode);
|
||||
}
|
||||
const { result: results } = await container.items
|
||||
const { resources: results } = await container.items
|
||||
.query<ItemDefinition>(querySpec, { enableScanInQuery: true, enableCrossPartitionQuery: true })
|
||||
.toArray();
|
||||
.fetchAll();
|
||||
assert(results !== undefined, "error querying documents");
|
||||
results.sort(function(doc1, doc2) {
|
||||
return doc1.id.localeCompare(doc2.id);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import assert from "assert";
|
||||
import { Constants, CosmosClient } from "../..";
|
||||
import { endpoint, masterKey } from "../common/_testConfig";
|
||||
import { getEntropy, getTestContainer, removeAllDatabases } from "../common/TestHelpers";
|
||||
import { getTestContainer, removeAllDatabases } from "../common/TestHelpers";
|
||||
|
||||
const client = new CosmosClient({ endpoint, auth: { masterKey } });
|
||||
|
||||
|
@ -41,7 +41,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
);
|
||||
assert.equal(collectionSize, 10 * mbInBytes, "Collection size is unexpected");
|
||||
|
||||
const { result: offers } = await client.offers.readAll().toArray();
|
||||
const { resources: offers } = await client.offers.readAll().fetchAll();
|
||||
assert.equal(offers.length, 1);
|
||||
const expectedOffer = offers[0];
|
||||
assert.equal(
|
||||
|
@ -52,7 +52,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
validateOfferResponseBody(expectedOffer);
|
||||
|
||||
// Read the offer
|
||||
const { body: readOffer } = await client.offer(expectedOffer.id).read();
|
||||
const { resource: readOffer } = await client.offer(expectedOffer.id).read();
|
||||
validateOfferResponseBody(readOffer);
|
||||
// Check if the read offer is what we expected.
|
||||
assert.equal(expectedOffer.id, readOffer.id);
|
||||
|
@ -70,7 +70,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: offers2 } = await client.offers.query(querySpec).toArray();
|
||||
const { resources: offers2 } = await client.offers.query(querySpec).fetchAll();
|
||||
assert.equal(offers2.length, 1);
|
||||
const oneOffer = offers2[0];
|
||||
validateOfferResponseBody(oneOffer);
|
||||
|
@ -88,7 +88,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
it("nativeApi Should do offer replace operations successfully name based", async function() {
|
||||
const container = await getTestContainer("Validate Offer CRUD");
|
||||
const { result: offers } = await client.offers.readAll().toArray();
|
||||
const { resources: offers } = await client.offers.readAll().fetchAll();
|
||||
assert.equal(offers.length, 1);
|
||||
const expectedOffer = offers[0];
|
||||
validateOfferResponseBody(expectedOffer);
|
||||
|
@ -96,7 +96,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const offerToReplace = Object.assign({}, expectedOffer);
|
||||
const oldThroughput = offerToReplace.content.offerThroughput;
|
||||
offerToReplace.content.offerThroughput = oldThroughput + 100;
|
||||
const { body: replacedOffer } = await client.offer(offerToReplace.id).replace(offerToReplace);
|
||||
const { resource: replacedOffer } = await client.offer(offerToReplace.id).replace(offerToReplace);
|
||||
validateOfferResponseBody(replacedOffer);
|
||||
// Check if the replaced offer is what we expect.
|
||||
assert.equal(replacedOffer.id, offerToReplace.id);
|
||||
|
|
|
@ -20,10 +20,10 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const container = await getTestContainer("Validate Permission Crud");
|
||||
|
||||
// create user
|
||||
const { body: userDef } = await container.database.users.create({ id: "new user" });
|
||||
const { resource: userDef } = await container.database.users.create({ id: "new user" });
|
||||
const user = container.database.user(userDef.id);
|
||||
// list permissions
|
||||
const { result: permissions } = await user.permissions.readAll().toArray();
|
||||
const { resources: permissions } = await user.permissions.readAll().fetchAll();
|
||||
assert.equal(permissions.constructor, Array, "Value should be an array");
|
||||
const beforeCreateCount = permissions.length;
|
||||
const permissionDef: PermissionDefinition = {
|
||||
|
@ -33,7 +33,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
};
|
||||
|
||||
// create permission
|
||||
const { body: createdPermission } = await createOrUpsertPermission(
|
||||
const { resource: createdPermission } = await createOrUpsertPermission(
|
||||
user,
|
||||
permissionDef,
|
||||
undefined,
|
||||
|
@ -43,7 +43,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
assert.equal(createdPermission.id, "new permission", "permission name error");
|
||||
|
||||
// list permissions after creation
|
||||
const { result: permissionsAfterCreation } = await user.permissions.readAll().toArray();
|
||||
const { resources: permissionsAfterCreation } = await user.permissions.readAll().fetchAll();
|
||||
assert.equal(permissionsAfterCreation.length, beforeCreateCount + 1);
|
||||
|
||||
// query permissions
|
||||
|
@ -56,11 +56,11 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await user.permissions.query(querySpec).toArray();
|
||||
const { resources: results } = await user.permissions.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
permissionDef.permissionMode = PermissionMode.All;
|
||||
const { body: replacedPermission } = await replaceOrUpsertPermission(
|
||||
const { resource: replacedPermission } = await replaceOrUpsertPermission(
|
||||
user,
|
||||
permissionDef,
|
||||
undefined,
|
||||
|
@ -71,17 +71,17 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
// to change the id of an existing resourcewe have to use replace
|
||||
permissionDef.id = "replaced permission";
|
||||
const { body: replacedPermission2 } = await permission.replace(permissionDef);
|
||||
const { resource: replacedPermission2 } = await permission.replace(permissionDef);
|
||||
assert.equal(replacedPermission2.id, "replaced permission", "permission name should change");
|
||||
assert.equal(permissionDef.id, replacedPermission2.id, "permission id should stay the same");
|
||||
permission = user.permission(replacedPermission2.id);
|
||||
|
||||
// read permission
|
||||
const { body: permissionAfterReplace } = await permission.read();
|
||||
const { resource: permissionAfterReplace } = await permission.read();
|
||||
assert.equal(permissionAfterReplace.id, permissionDef.id);
|
||||
|
||||
// delete permission
|
||||
const { body: res } = await permission.delete();
|
||||
const { resource: res } = await permission.delete();
|
||||
|
||||
// read permission after deletion
|
||||
try {
|
||||
|
@ -112,11 +112,11 @@ describe("NodeJS CRUD Tests", function() {
|
|||
);
|
||||
|
||||
// create user
|
||||
const { body: userDef } = await container.database.users.create({ id: "new user" });
|
||||
const { resource: userDef } = await container.database.users.create({ id: "new user" });
|
||||
const user = container.database.user(userDef.id);
|
||||
|
||||
// list permissions
|
||||
const { result: permissions } = await user.permissions.readAll().toArray();
|
||||
const { resources: permissions } = await user.permissions.readAll().fetchAll();
|
||||
assert(Array.isArray(permissions), "Value should be an array");
|
||||
const beforeCreateCount = permissions.length;
|
||||
const permissionDefinition = {
|
||||
|
@ -128,7 +128,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
// create permission
|
||||
const response = await createOrUpsertPermission(user, permissionDefinition, undefined, isUpsertTest);
|
||||
const permissionDef = response.body;
|
||||
const permissionDef = response.resource;
|
||||
let permission = user.permission(permissionDef.id);
|
||||
assert.equal(permissionDef.id, permissionDefinition.id, "permission name error");
|
||||
assert.equal(
|
||||
|
@ -138,7 +138,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
);
|
||||
|
||||
// list permissions after creation
|
||||
const { result: permissionsAfterCreation } = await user.permissions.readAll().toArray();
|
||||
const { resources: permissionsAfterCreation } = await user.permissions.readAll().fetchAll();
|
||||
assert.equal(permissionsAfterCreation.length, beforeCreateCount + 1);
|
||||
|
||||
// query permissions
|
||||
|
@ -151,12 +151,12 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await user.permissions.query(querySpec).toArray();
|
||||
const { resources: results } = await user.permissions.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// Replace permission
|
||||
permissionDef.permissionMode = PermissionMode.All;
|
||||
const { body: replacedPermission } = await replaceOrUpsertPermission(
|
||||
const { resource: replacedPermission } = await replaceOrUpsertPermission(
|
||||
user,
|
||||
permissionDef,
|
||||
undefined,
|
||||
|
@ -172,16 +172,16 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
// to change the id of an existing resourcewe have to use replace
|
||||
permissionDef.id = "replaced permission";
|
||||
const { body: replacedPermission2 } = await permission.replace(permissionDef);
|
||||
const { resource: replacedPermission2 } = await permission.replace(permissionDef);
|
||||
assert.equal(replacedPermission2.id, permissionDef.id);
|
||||
permission = user.permission(replacedPermission2.id);
|
||||
|
||||
// read permission
|
||||
const { body: permissionAfterReplace } = await permission.read();
|
||||
const { resource: permissionAfterReplace } = await permission.read();
|
||||
assert.equal(permissionAfterReplace.id, replacedPermission2.id);
|
||||
|
||||
// delete permission
|
||||
const { body: res } = await permission.delete();
|
||||
const { resource: res } = await permission.delete();
|
||||
|
||||
// read permission after deletion
|
||||
try {
|
||||
|
|
|
@ -33,15 +33,15 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await client.databases.query(querySpec0).toArray();
|
||||
const { resources: results } = await client.databases.query(querySpec0).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
const querySpec1 = {
|
||||
query: "SELECT * FROM root r WHERE r.id='" + database.id + "'"
|
||||
};
|
||||
const { result: results2 } = await client.databases.query(querySpec1).toArray();
|
||||
const { resources: results2 } = await client.databases.query(querySpec1).fetchAll();
|
||||
assert(results2.length > 0, "number of results for the query should be > 0");
|
||||
const querySpec2 = "SELECT * FROM root r WHERE r.id='" + database.id + "'";
|
||||
const { result: results3 } = await client.databases.query(querySpec2).toArray();
|
||||
const { resources: results3 } = await client.databases.query(querySpec2).fetchAll();
|
||||
assert(results3.length > 0, "number of results for the query should be > 0");
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
@ -84,20 +84,6 @@ describe("NodeJS CRUD Tests", function() {
|
|||
container = await getTestContainer("query CRUD database 中文", client, containerDefinition, containerOptions);
|
||||
await bulkInsertItems(container, documentDefinitions);
|
||||
});
|
||||
|
||||
it("nativeApi validate QueryIterator nextItem on Multiple Partition Colleciton", async function() {
|
||||
// obtain an instance of queryIterator
|
||||
const queryIterator = container.items.readAll();
|
||||
let cnt = 0;
|
||||
while (queryIterator.hasMoreResults()) {
|
||||
const { result } = await queryIterator.nextItem();
|
||||
if (result === undefined) {
|
||||
break;
|
||||
}
|
||||
cnt++;
|
||||
}
|
||||
assert.equal(cnt, documentDefinitions.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validate QueryIterator Functionality", function() {
|
||||
|
@ -105,15 +91,15 @@ describe("NodeJS CRUD Tests", function() {
|
|||
let resources: { container: Container; doc1: any; doc2: any; doc3: any };
|
||||
beforeEach(async function() {
|
||||
const container = await getTestContainer("Validate QueryIterator Functionality", client);
|
||||
const { body: doc1 } = await container.items.create({ id: "doc1", prop1: "value1" });
|
||||
const { body: doc2 } = await container.items.create({ id: "doc2", prop1: "value2" });
|
||||
const { body: doc3 } = await container.items.create({ id: "doc3", prop1: "value3" });
|
||||
const { resource: doc1 } = await container.items.create({ id: "doc1", prop1: "value1" });
|
||||
const { resource: doc2 } = await container.items.create({ id: "doc2", prop1: "value2" });
|
||||
const { resource: doc3 } = await container.items.create({ id: "doc3", prop1: "value3" });
|
||||
resources = { container, doc1, doc2, doc3 };
|
||||
});
|
||||
|
||||
const queryIteratorToArrayTest = async function() {
|
||||
const queryIterator = resources.container.items.readAll({ maxItemCount: 2 });
|
||||
const { result: docs } = await queryIterator.toArray();
|
||||
const { resources: docs } = await queryIterator.fetchAll();
|
||||
assert.equal(docs.length, 3, "queryIterator should return all documents using continuation");
|
||||
assert.equal(docs[0].id, resources.doc1.id);
|
||||
assert.equal(docs[1].id, resources.doc2.id);
|
||||
|
@ -123,17 +109,16 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const queryIteratorAsyncIteratorTest = async function() {
|
||||
const queryIterator = resources.container.items.readAll({ maxItemCount: 2 });
|
||||
let counter = 0;
|
||||
for await (const { result: doc } of queryIterator.getAsyncIterator()) {
|
||||
counter++;
|
||||
if (counter === 1) {
|
||||
assert.equal(doc.id, resources.doc1.id, "first document should be doc1");
|
||||
} else if (counter === 2) {
|
||||
assert.equal(doc.id, resources.doc2.id, "second document should be doc2");
|
||||
} else if (counter === 3) {
|
||||
assert.equal(doc.id, resources.doc3.id, "third document should be doc3");
|
||||
for await (const { resources: docs } of queryIterator.getAsyncIterator()) {
|
||||
if (counter === 0) {
|
||||
assert.equal(docs[0].id, resources.doc1.id, "first document should be doc1");
|
||||
assert.equal(docs[1].id, resources.doc2.id, "second document should be doc2");
|
||||
} else {
|
||||
assert.equal(docs[0].id, resources.doc3.id, "third document should be doc3");
|
||||
}
|
||||
counter++;
|
||||
}
|
||||
assert(counter === 3, "iterator should have run 3 times");
|
||||
assert(counter === 2, "iterator should have run 3 times");
|
||||
};
|
||||
|
||||
const queryIteratorForEachTest = async function() {
|
||||
|
@ -152,53 +137,27 @@ describe("NodeJS CRUD Tests", function() {
|
|||
assert(counter === 3, "iterator should have run 3 times");
|
||||
};
|
||||
|
||||
const queryIteratorNextAndMoreTest = async function() {
|
||||
const queryIterator = resources.container.items.readAll({ maxItemCount: 2 });
|
||||
assert.equal(queryIterator.hasMoreResults(), true);
|
||||
const { result: doc2 } = await queryIterator.nextItem();
|
||||
assert.equal(doc2.id, resources.doc1.id, "call queryIterator.nextItem after reset should return first document");
|
||||
const { result: doc1 } = await queryIterator.current();
|
||||
assert.equal(doc1.id, resources.doc1.id, "call queryIterator.current after reset should return first document");
|
||||
assert.equal(queryIterator.hasMoreResults(), true);
|
||||
const { result: doc4 } = await queryIterator.nextItem();
|
||||
assert.equal(doc4.id, resources.doc2.id, "call queryIterator.nextItem again should return second document");
|
||||
const { result: doc3 } = await queryIterator.current();
|
||||
assert.equal(doc3.id, resources.doc2.id, "call queryIterator.current should return second document");
|
||||
assert.equal(queryIterator.hasMoreResults(), true);
|
||||
const { result: doc6 } = await queryIterator.nextItem();
|
||||
assert.equal(doc6.id, resources.doc3.id, "call queryIterator.nextItem again should return third document");
|
||||
const { result: doc5 } = await queryIterator.current();
|
||||
assert.equal(doc5.id, resources.doc3.id, "call queryIterator.current should return third document");
|
||||
const { result: doc7 } = await queryIterator.nextItem();
|
||||
assert.equal(doc7, undefined, "queryIterator should return undefined if there is no elements");
|
||||
};
|
||||
|
||||
const queryIteratorExecuteNextTest = async function() {
|
||||
let queryIterator = resources.container.items.readAll({ maxItemCount: 2 });
|
||||
const { result: docs, headers } = await queryIterator.executeNext();
|
||||
const firstResponse = await queryIterator.fetchNext();
|
||||
|
||||
assert(headers !== undefined, "executeNext should pass headers as the third parameter to the callback");
|
||||
assert(headers[Constants.HttpHeaders.RequestCharge] > 0, "RequestCharge has to be non-zero");
|
||||
assert.equal(docs.length, 2, "first batch size should be 2");
|
||||
assert.equal(docs[0].id, resources.doc1.id, "first batch first document should be doc1");
|
||||
assert.equal(docs[1].id, resources.doc2.id, "batch first second document should be doc2");
|
||||
const { result: docs2 } = await queryIterator.executeNext();
|
||||
assert(firstResponse.requestCharge > 0, "RequestCharge has to be non-zero");
|
||||
assert.equal(firstResponse.resources.length, 2, "first batch size should be 2");
|
||||
assert.equal(firstResponse.resources[0].id, resources.doc1.id, "first batch first document should be doc1");
|
||||
assert.equal(firstResponse.resources[1].id, resources.doc2.id, "batch first second document should be doc2");
|
||||
const { resources: docs2 } = await queryIterator.fetchNext();
|
||||
assert.equal(docs2.length, 1, "second batch size is unexpected");
|
||||
assert.equal(docs2[0].id, resources.doc3.id, "second batch element should be doc3");
|
||||
|
||||
// validate Iterator.executeNext with continuation token
|
||||
queryIterator = resources.container.items.readAll({
|
||||
maxItemCount: 2,
|
||||
continuation: headers[Constants.HttpHeaders.Continuation] as string
|
||||
continuation: firstResponse.continuation as string
|
||||
});
|
||||
const { result: docsWithContinuation, headers: headersWithContinuation } = await queryIterator.executeNext();
|
||||
assert(
|
||||
headersWithContinuation !== undefined,
|
||||
"executeNext should pass headers as the third parameter to the callback"
|
||||
);
|
||||
assert(headersWithContinuation[Constants.HttpHeaders.RequestCharge] > 0, "RequestCharge has to be non-zero");
|
||||
assert.equal(docsWithContinuation.length, 1, "second batch size with continuation token is unexpected");
|
||||
assert.equal(docsWithContinuation[0].id, resources.doc3.id, "second batch element should be doc3");
|
||||
const secondResponse = await queryIterator.fetchNext();
|
||||
assert(secondResponse.requestCharge > 0, "RequestCharge has to be non-zero");
|
||||
assert.equal(secondResponse.resources.length, 1, "second batch size with continuation token is unexpected");
|
||||
assert.equal(secondResponse.resources[0].id, resources.doc3.id, "second batch element should be doc3");
|
||||
};
|
||||
|
||||
it("nativeApi validate QueryIterator iterator toArray name based", async function() {
|
||||
|
@ -213,10 +172,6 @@ describe("NodeJS CRUD Tests", function() {
|
|||
await queryIteratorForEachTest();
|
||||
});
|
||||
|
||||
it("nativeApi validate queryIterator nextItem and hasMoreResults name based", async function() {
|
||||
await queryIteratorNextAndMoreTest();
|
||||
});
|
||||
|
||||
it("nativeApi validate queryIterator iterator executeNext name based", async function() {
|
||||
await queryIteratorExecuteNextTest();
|
||||
});
|
||||
|
|
|
@ -32,7 +32,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
]
|
||||
};
|
||||
const entropy = Math.floor(Math.random() * 10000);
|
||||
const { body: containerDef } = await database.containers.create({
|
||||
const { resource: containerDef } = await database.containers.create({
|
||||
id: `sample container${entropy}`,
|
||||
indexingPolicy
|
||||
});
|
||||
|
@ -56,7 +56,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
await createOrUpsertItem(container, location2, undefined, isUpsertTest);
|
||||
const query =
|
||||
"SELECT * FROM root WHERE (ST_DISTANCE(root.Location, {type: 'Point', coordinates: [20.1, 20]}) < 20000) ";
|
||||
const { result: results } = await container.items.query(query).toArray();
|
||||
const { resources: results } = await container.items.query(query).fetchAll();
|
||||
assert.equal(1, results.length);
|
||||
assert.equal("location1", results[0].id);
|
||||
} catch (err) {
|
||||
|
|
|
@ -19,7 +19,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
it("nativeApi Should do sproc CRUD operations successfully with create/replace", async function() {
|
||||
// read sprocs
|
||||
const { result: sprocs } = await container.storedProcedures.readAll().toArray();
|
||||
const { resources: sprocs } = await container.storedProcedures.readAll().fetchAll();
|
||||
assert.equal(sprocs.constructor, Array, "Value should be an array");
|
||||
|
||||
// create a sproc
|
||||
|
@ -29,13 +29,13 @@ describe("NodeJS CRUD Tests", function() {
|
|||
body: "function () { const x = 10; }"
|
||||
};
|
||||
|
||||
const { body: sproc } = await container.storedProcedures.create(sprocDefinition);
|
||||
const { resource: sproc } = await container.storedProcedures.create(sprocDefinition);
|
||||
|
||||
assert.equal(sproc.id, sprocDefinition.id);
|
||||
assert.equal(sproc.body, "function () { const x = 10; }");
|
||||
|
||||
// read sprocs after creation
|
||||
const { result: sprocsAfterCreation } = await container.storedProcedures.readAll().toArray();
|
||||
const { resources: sprocsAfterCreation } = await container.storedProcedures.readAll().fetchAll();
|
||||
assert.equal(
|
||||
sprocsAfterCreation.length,
|
||||
beforeCreateSprocsCount + 1,
|
||||
|
@ -46,19 +46,19 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const querySpec = {
|
||||
query: "SELECT * FROM root r"
|
||||
};
|
||||
const { result: queriedSprocs } = await container.storedProcedures.query(querySpec).toArray();
|
||||
const { resources: queriedSprocs } = await container.storedProcedures.query(querySpec).fetchAll();
|
||||
assert(queriedSprocs.length > 0, "number of sprocs for the query should be > 0");
|
||||
|
||||
// replace sproc
|
||||
// prettier-ignore
|
||||
sproc.body = function() { const x = 20; };
|
||||
const { body: replacedSproc } = await container.storedProcedure(sproc.id).replace(sproc);
|
||||
sproc.body = function () { const x = 20; };
|
||||
const { resource: replacedSproc } = await container.storedProcedure(sproc.id).replace(sproc);
|
||||
|
||||
assert.equal(replacedSproc.id, sproc.id);
|
||||
assert.equal(replacedSproc.body, "function () { const x = 20; }");
|
||||
|
||||
// read sproc
|
||||
const { body: sprocAfterReplace } = await container.storedProcedure(replacedSproc.id).read();
|
||||
const { resource: sprocAfterReplace } = await container.storedProcedure(replacedSproc.id).read();
|
||||
assert.equal(replacedSproc.id, sprocAfterReplace.id);
|
||||
|
||||
// delete sproc
|
||||
|
@ -76,7 +76,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
it("nativeApi Should do sproc CRUD operations successfully name based with upsert", async function() {
|
||||
// read sprocs
|
||||
const { result: sprocs } = await container.storedProcedures.readAll().toArray();
|
||||
const { resources: sprocs } = await container.storedProcedures.readAll().fetchAll();
|
||||
assert.equal(sprocs.constructor, Array, "Value should be an array");
|
||||
|
||||
// create a sproc
|
||||
|
@ -84,16 +84,16 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const sprocDefinition: StoredProcedureDefinition = {
|
||||
id: "sample sproc",
|
||||
// prettier-ignore
|
||||
body: function() { const x = 10; } // tslint:disable-line:object-literal-shorthand
|
||||
body: function () { const x = 10; } // tslint:disable-line:object-literal-shorthand
|
||||
};
|
||||
|
||||
const { body: sproc } = await container.storedProcedures.upsert(sprocDefinition);
|
||||
const { resource: sproc } = await container.storedProcedures.upsert(sprocDefinition);
|
||||
|
||||
assert.equal(sproc.id, sprocDefinition.id);
|
||||
assert.equal(sproc.body, "function () { const x = 10; }");
|
||||
|
||||
// read sprocs after creation
|
||||
const { result: sprocsAfterCreation } = await container.storedProcedures.readAll().toArray();
|
||||
const { resources: sprocsAfterCreation } = await container.storedProcedures.readAll().fetchAll();
|
||||
assert.equal(
|
||||
sprocsAfterCreation.length,
|
||||
beforeCreateSprocsCount + 1,
|
||||
|
@ -104,19 +104,19 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const querySpec = {
|
||||
query: "SELECT * FROM root r"
|
||||
};
|
||||
const { result: queriedSprocs } = await container.storedProcedures.query(querySpec).toArray();
|
||||
const { resources: queriedSprocs } = await container.storedProcedures.query(querySpec).fetchAll();
|
||||
assert(queriedSprocs.length > 0, "number of sprocs for the query should be > 0");
|
||||
|
||||
// replace sproc
|
||||
// prettier-ignore
|
||||
sproc.body = function() { const x = 20; };
|
||||
const { body: replacedSproc } = await container.storedProcedures.upsert(sproc);
|
||||
sproc.body = function () { const x = 20; };
|
||||
const { resource: replacedSproc } = await container.storedProcedures.upsert(sproc);
|
||||
|
||||
assert.equal(replacedSproc.id, sproc.id);
|
||||
assert.equal(replacedSproc.body, "function () { const x = 20; }");
|
||||
|
||||
// read sproc
|
||||
const { body: sprocAfterReplace } = await container.storedProcedure(replacedSproc.id).read();
|
||||
const { resource: sprocAfterReplace } = await container.storedProcedure(replacedSproc.id).read();
|
||||
assert.equal(replacedSproc.id, sprocAfterReplace.id);
|
||||
|
||||
// delete sproc
|
||||
|
@ -186,15 +186,15 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// tslint:enable:no-string-throw
|
||||
// tslint:enable:object-literal-shorthand
|
||||
|
||||
const { body: retrievedSproc } = await container.storedProcedures.create(sproc1);
|
||||
const { body: result } = await container.storedProcedure(retrievedSproc.id).execute();
|
||||
const { resource: retrievedSproc } = await container.storedProcedures.create(sproc1);
|
||||
const { resource: result } = await container.storedProcedure(retrievedSproc.id).execute();
|
||||
assert.equal(result, 999);
|
||||
|
||||
const { body: retrievedSproc2 } = await container.storedProcedures.create(sproc2);
|
||||
const { body: result2 } = await container.storedProcedure(retrievedSproc2.id).execute();
|
||||
const { resource: retrievedSproc2 } = await container.storedProcedures.create(sproc2);
|
||||
const { resource: result2 } = await container.storedProcedure(retrievedSproc2.id).execute();
|
||||
assert.equal(result2, 123456789);
|
||||
const { body: retrievedSproc3 } = await container.storedProcedures.create(sproc3);
|
||||
const { body: result3 } = await container.storedProcedure(retrievedSproc3.id).execute([{ temp: "so" }]);
|
||||
const { resource: retrievedSproc3 } = await container.storedProcedures.create(sproc3);
|
||||
const { resource: result3 } = await container.storedProcedure(retrievedSproc3.id).execute([{ temp: "so" }]);
|
||||
assert.equal(result3, "aso");
|
||||
});
|
||||
|
||||
|
@ -245,15 +245,15 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// tslint:enable:no-string-throw
|
||||
// tslint:enable:object-literal-shorthand
|
||||
|
||||
const { body: retrievedSproc } = await container.storedProcedures.upsert(sproc1);
|
||||
const { body: result } = await container.storedProcedure(retrievedSproc.id).execute();
|
||||
const { resource: retrievedSproc } = await container.storedProcedures.upsert(sproc1);
|
||||
const { resource: result } = await container.storedProcedure(retrievedSproc.id).execute();
|
||||
assert.equal(result, 999);
|
||||
|
||||
const { body: retrievedSproc2 } = await container.storedProcedures.upsert(sproc2);
|
||||
const { body: result2 } = await container.storedProcedure(retrievedSproc2.id).execute();
|
||||
const { resource: retrievedSproc2 } = await container.storedProcedures.upsert(sproc2);
|
||||
const { resource: result2 } = await container.storedProcedure(retrievedSproc2.id).execute();
|
||||
assert.equal(result2, 123456789);
|
||||
const { body: retrievedSproc3 } = await container.storedProcedures.upsert(sproc3);
|
||||
const { body: result3 } = await container.storedProcedure(retrievedSproc3.id).execute([{ temp: "so" }]);
|
||||
const { resource: retrievedSproc3 } = await container.storedProcedures.upsert(sproc3);
|
||||
const { resource: result3 } = await container.storedProcedure(retrievedSproc3.id).execute([{ temp: "so" }]);
|
||||
assert.equal(result3, "aso");
|
||||
});
|
||||
});
|
||||
|
@ -268,7 +268,9 @@ describe("NodeJS CRUD Tests", function() {
|
|||
partitionKey: { paths: ["/" + partitionKey], kind: PartitionKind.Hash }
|
||||
};
|
||||
|
||||
const { body: containerResult } = await database.containers.create(containerDefinition, { offerThroughput: 12000 });
|
||||
const { resource: containerResult } = await database.containers.create(containerDefinition, {
|
||||
offerThroughput: 12000
|
||||
});
|
||||
const container = await database.container(containerResult.id);
|
||||
|
||||
// tslint:disable:no-var-keyword
|
||||
|
@ -315,13 +317,13 @@ describe("NodeJS CRUD Tests", function() {
|
|||
];
|
||||
|
||||
const returnedDocuments = await bulkInsertItems(container, documents);
|
||||
const { body: sproc } = await container.storedProcedures.create(querySproc);
|
||||
const { body: result } = await container.storedProcedure(sproc.id).execute([], { partitionKey: null });
|
||||
const { resource: sproc } = await container.storedProcedures.create(querySproc);
|
||||
const { resource: result } = await container.storedProcedure(sproc.id).execute([], { partitionKey: null });
|
||||
assert(result !== undefined);
|
||||
assert.equal(result.length, 1);
|
||||
assert.equal(JSON.stringify(result[0]), JSON.stringify(documents[1]));
|
||||
|
||||
const { body: result2 } = await container.storedProcedure(sproc.id).execute(null, { partitionKey: 1 });
|
||||
const { resource: result2 } = await container.storedProcedure(sproc.id).execute(null, { partitionKey: 1 });
|
||||
assert(result2 !== undefined);
|
||||
assert.equal(result2.length, 1);
|
||||
assert.equal(JSON.stringify(result2[0]), JSON.stringify(documents[4]));
|
||||
|
@ -331,7 +333,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// create database
|
||||
const database = await getTestDatabase("sproc test database");
|
||||
// create container
|
||||
const { body: containerResult } = await database.containers.create({ id: "sample container" });
|
||||
const { resource: containerResult } = await database.containers.create({ id: "sample container" });
|
||||
|
||||
const container = await database.container(containerResult.id);
|
||||
|
||||
|
@ -364,20 +366,20 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// tslint:enable:one-line
|
||||
// tslint:enable:object-literal-shorthand
|
||||
|
||||
const { body: retrievedSproc } = await container.storedProcedures.create(sproc1);
|
||||
const { body: result1, headers: headers1 } = await container.storedProcedure(retrievedSproc.id).execute();
|
||||
const { resource: retrievedSproc } = await container.storedProcedures.create(sproc1);
|
||||
const { resource: result1, headers: headers1 } = await container.storedProcedure(retrievedSproc.id).execute();
|
||||
assert.equal(result1, "Success!");
|
||||
assert.equal(headers1[Constants.HttpHeaders.ScriptLogResults], undefined);
|
||||
|
||||
let requestOptions = { enableScriptLogging: true };
|
||||
const { body: result2, headers: headers2 } = await container
|
||||
const { resource: result2, headers: headers2 } = await container
|
||||
.storedProcedure(retrievedSproc.id)
|
||||
.execute([], requestOptions);
|
||||
assert.equal(result2, "Success!");
|
||||
assert.equal(headers2[Constants.HttpHeaders.ScriptLogResults], encodeURIComponent("The value of x is 1."));
|
||||
|
||||
requestOptions = { enableScriptLogging: false };
|
||||
const { body: result3, headers: headers3 } = await container
|
||||
const { resource: result3, headers: headers3 } = await container
|
||||
.storedProcedure(retrievedSproc.id)
|
||||
.execute([], requestOptions);
|
||||
assert.equal(result3, "Success!");
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import assert from "assert";
|
||||
import { TriggerOperation, TriggerType } from "../..";
|
||||
import { Container, TriggerDefinition } from "../../client";
|
||||
import { getTestContainer, removeAllDatabases } from "../common/TestHelpers";
|
||||
import { TriggerDefinition } from "../../client";
|
||||
import { PrivateContainer } from "../common/PrivateContainer";
|
||||
import { createPrivateContainer, getTestContainer, removeAllDatabases } from "../common/TestHelpers";
|
||||
|
||||
const notFoundErrorCode = 404;
|
||||
|
||||
|
@ -10,17 +11,17 @@ declare var getContext: any;
|
|||
|
||||
describe("NodeJS CRUD Tests", function() {
|
||||
this.timeout(process.env.MOCHA_TIMEOUT || 10000);
|
||||
let container: Container;
|
||||
let container: PrivateContainer;
|
||||
|
||||
beforeEach(async function() {
|
||||
await removeAllDatabases();
|
||||
container = await getTestContainer("trigger container");
|
||||
container = createPrivateContainer(await getTestContainer("trigger container"));
|
||||
});
|
||||
|
||||
describe("Validate Trigger CRUD", function() {
|
||||
it("nativeApi Should do trigger CRUD operations successfully name based", async function() {
|
||||
// read triggers
|
||||
const { result: triggers } = await container.triggers.readAll().toArray();
|
||||
const { resources: triggers } = await container.triggers.readAll().fetchAll();
|
||||
assert.equal(Array.isArray(triggers), true);
|
||||
|
||||
// create a trigger
|
||||
|
@ -36,13 +37,13 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// tslint:enable:no-var-keyword
|
||||
// tslint:enable:prefer-const
|
||||
|
||||
const { body: trigger } = await container.triggers.create(triggerDefinition);
|
||||
const { resource: trigger } = await container.triggers.create(triggerDefinition);
|
||||
|
||||
assert.equal(trigger.id, triggerDefinition.id);
|
||||
assert.equal(trigger.body, "serverScript() { var x = 10; }");
|
||||
|
||||
// read triggers after creation
|
||||
const { result: triggersAfterCreation } = await container.triggers.readAll().toArray();
|
||||
const { resources: triggersAfterCreation } = await container.triggers.readAll().fetchAll();
|
||||
assert.equal(
|
||||
triggersAfterCreation.length,
|
||||
beforeCreateTriggersCount + 1,
|
||||
|
@ -59,19 +60,19 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await container.triggers.query(querySpec).toArray();
|
||||
const { resources: results } = await container.triggers.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// replace trigger
|
||||
// prettier-ignore
|
||||
trigger.body = function() { const x = 20; };
|
||||
const { body: replacedTrigger } = await container.trigger(trigger.id).replace(trigger);
|
||||
trigger.body = function () { const x = 20; };
|
||||
const { resource: replacedTrigger } = await container.trigger(trigger.id).replace(trigger);
|
||||
|
||||
assert.equal(replacedTrigger.id, trigger.id);
|
||||
assert.equal(replacedTrigger.body, "function () { const x = 20; }");
|
||||
|
||||
// read trigger
|
||||
const { body: triggerAfterReplace } = await container.trigger(replacedTrigger.id).read();
|
||||
const { resource: triggerAfterReplace } = await container.trigger(replacedTrigger.id).read();
|
||||
assert.equal(replacedTrigger.id, triggerAfterReplace.id);
|
||||
|
||||
// delete trigger
|
||||
|
@ -88,7 +89,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
it("nativeApi Should do trigger CRUD operations successfully name based with upsert", async function() {
|
||||
// read triggers
|
||||
const { result: triggers } = await container.triggers.readAll().toArray();
|
||||
const { resources: triggers } = await container.triggers.readAll().fetchAll();
|
||||
assert.equal(Array.isArray(triggers), true);
|
||||
|
||||
// create a trigger
|
||||
|
@ -104,13 +105,13 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// tslint:enable:no-var-keyword
|
||||
// tslint:enable:prefer-const
|
||||
|
||||
const { body: trigger } = await container.triggers.upsert(triggerDefinition);
|
||||
const { resource: trigger } = await container.triggers.upsert(triggerDefinition);
|
||||
|
||||
assert.equal(trigger.id, triggerDefinition.id);
|
||||
assert.equal(trigger.body, "serverScript() { var x = 10; }");
|
||||
|
||||
// read triggers after creation
|
||||
const { result: triggersAfterCreation } = await container.triggers.readAll().toArray();
|
||||
const { resources: triggersAfterCreation } = await container.triggers.readAll().fetchAll();
|
||||
assert.equal(
|
||||
triggersAfterCreation.length,
|
||||
beforeCreateTriggersCount + 1,
|
||||
|
@ -127,19 +128,19 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await container.triggers.query(querySpec).toArray();
|
||||
const { resources: results } = await container.triggers.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// replace trigger
|
||||
// prettier-ignore
|
||||
trigger.body = function() { const x = 20; };
|
||||
const { body: replacedTrigger } = await container.triggers.upsert(trigger);
|
||||
trigger.body = function () { const x = 20; };
|
||||
const { resource: replacedTrigger } = await container.triggers.upsert(trigger);
|
||||
|
||||
assert.equal(replacedTrigger.id, trigger.id);
|
||||
assert.equal(replacedTrigger.body, "function () { const x = 20; }");
|
||||
|
||||
// read trigger
|
||||
const { body: triggerAfterReplace } = await container.trigger(replacedTrigger.id).read();
|
||||
const { resource: triggerAfterReplace } = await container.trigger(replacedTrigger.id).read();
|
||||
assert.equal(replacedTrigger.id, triggerAfterReplace.id);
|
||||
|
||||
// delete trigger
|
||||
|
@ -229,27 +230,27 @@ describe("NodeJS CRUD Tests", function() {
|
|||
await container.triggers.create(trigger);
|
||||
}
|
||||
// create document
|
||||
const { body: document } = await container.items.create(
|
||||
const { resource: document } = await container.items.create(
|
||||
{ id: "doc1", key: "value" },
|
||||
{ preTriggerInclude: "t1" }
|
||||
);
|
||||
assert.equal(document.id, "DOC1t1", "name should be capitalized");
|
||||
const { body: document2 } = await container.items.create(
|
||||
const { resource: document2 } = await container.items.create(
|
||||
{ id: "doc2", key2: "value2" },
|
||||
{ preTriggerInclude: "t2" }
|
||||
);
|
||||
assert.equal(document2.id, "doc2", "name shouldn't change");
|
||||
const { body: document3 } = await container.items.create(
|
||||
const { resource: document3 } = await container.items.create(
|
||||
{ id: "Doc3", prop: "empty" },
|
||||
{ preTriggerInclude: "t3" }
|
||||
);
|
||||
assert.equal(document3.id, "doc3t3");
|
||||
const { body: document4 } = await container.items.create(
|
||||
const { resource: document4 } = await container.items.create(
|
||||
{ id: "testing post trigger" },
|
||||
{ postTriggerInclude: "response1", preTriggerInclude: "t1" }
|
||||
);
|
||||
assert.equal(document4.id, "TESTING POST TRIGGERt1");
|
||||
const { body: document5, headers } = await container.items.create(
|
||||
const { resource: document5, headers } = await container.items.create(
|
||||
{ id: "responseheaders" },
|
||||
{ preTriggerInclude: "t1" }
|
||||
);
|
||||
|
@ -267,27 +268,27 @@ describe("NodeJS CRUD Tests", function() {
|
|||
await container.triggers.upsert(trigger);
|
||||
}
|
||||
// create document
|
||||
const { body: document } = await container.items.upsert(
|
||||
const { resource: document } = await container.items.upsert(
|
||||
{ id: "doc1", key: "value" },
|
||||
{ preTriggerInclude: "t1" }
|
||||
);
|
||||
assert.equal(document.id, "DOC1t1", "name should be capitalized");
|
||||
const { body: document2 } = await container.items.upsert(
|
||||
const { resource: document2 } = await container.items.upsert(
|
||||
{ id: "doc2", key2: "value2" },
|
||||
{ preTriggerInclude: "t2" }
|
||||
);
|
||||
assert.equal(document2.id, "doc2", "name shouldn't change");
|
||||
const { body: document3 } = await container.items.upsert(
|
||||
const { resource: document3 } = await container.items.upsert(
|
||||
{ id: "Doc3", prop: "empty" },
|
||||
{ preTriggerInclude: "t3" }
|
||||
);
|
||||
assert.equal(document3.id, "doc3t3");
|
||||
const { body: document4 } = await container.items.upsert(
|
||||
const { resource: document4 } = await container.items.upsert(
|
||||
{ id: "testing post trigger" },
|
||||
{ postTriggerInclude: "response1", preTriggerInclude: "t1" }
|
||||
);
|
||||
assert.equal(document4.id, "TESTING POST TRIGGERt1");
|
||||
const { body: document5, headers } = await container.items.upsert(
|
||||
const { resource: document5, headers } = await container.items.upsert(
|
||||
{ id: "responseheaders" },
|
||||
{ preTriggerInclude: "t1" }
|
||||
);
|
||||
|
|
|
@ -52,7 +52,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
id: "sample container1",
|
||||
defaultTtl: 5
|
||||
};
|
||||
const { body: containerResult } = await database.containers.create(containerDefinition);
|
||||
const { resource: containerResult } = await database.containers.create(containerDefinition);
|
||||
|
||||
assert.equal(containerDefinition.defaultTtl, containerResult.defaultTtl);
|
||||
const container = database.container(containerResult.id);
|
||||
|
@ -89,7 +89,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
|
||||
async function checkItemExists(container: Container, createdItem: any) {
|
||||
const { body: readItem } = await container.item(createdItem.id).read();
|
||||
const { resource: readItem } = await container.item(createdItem.id).read();
|
||||
assert.equal(readItem.ttl, createdItem.ttl);
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
itemDefinition.id = "doc4";
|
||||
itemDefinition.ttl = 8;
|
||||
|
||||
const { body: doc } = await container.items.create(itemDefinition);
|
||||
const { resource: doc } = await container.items.create(itemDefinition);
|
||||
await sleep(6000);
|
||||
await positiveDefaultTtlStep4(container, doc);
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
itemDefinition.id = "doc3";
|
||||
itemDefinition.ttl = 2;
|
||||
|
||||
const { body: doc } = await container.items.create(itemDefinition);
|
||||
const { resource: doc } = await container.items.create(itemDefinition);
|
||||
await sleep(4000);
|
||||
await positiveDefaultTtlStep3(container, doc, itemDefinition);
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
itemDefinition.id = "doc2";
|
||||
itemDefinition.ttl = -1;
|
||||
|
||||
const { body: doc } = await container.items.create(itemDefinition);
|
||||
const { resource: doc } = await container.items.create(itemDefinition);
|
||||
await sleep(5000);
|
||||
await positiveDefaultTtlStep2(container, doc, itemDefinition);
|
||||
}
|
||||
|
@ -141,7 +141,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
defaultTtl: 5
|
||||
};
|
||||
|
||||
const { body: containerResult } = await database.containers.create(containerDefinition);
|
||||
const { resource: containerResult } = await database.containers.create(containerDefinition);
|
||||
|
||||
const container = await database.container(containerResult.id);
|
||||
|
||||
|
@ -151,7 +151,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
key: "value"
|
||||
};
|
||||
|
||||
const { body: createdItem } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem } = await container.items.create(itemDefinition);
|
||||
await sleep(7000);
|
||||
await positiveDefaultTtlStep1(container, createdItem, itemDefinition);
|
||||
});
|
||||
|
@ -166,10 +166,10 @@ describe("NodeJS CRUD Tests", function() {
|
|||
await checkItemGone(container, createdItem3);
|
||||
|
||||
// The Items with id doc1 and doc2 will never expire
|
||||
const { body: readItem1 } = await container.item(createdItem1.id).read();
|
||||
const { resource: readItem1 } = await container.item(createdItem1.id).read();
|
||||
assert.equal(readItem1.id, createdItem1.id);
|
||||
|
||||
const { body: readItem2 } = await container.item(createdItem2.id).read();
|
||||
const { resource: readItem2 } = await container.item(createdItem2.id).read();
|
||||
assert.equal(readItem2.id, createdItem2.id);
|
||||
}
|
||||
|
||||
|
@ -181,7 +181,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
defaultTtl: -1
|
||||
};
|
||||
|
||||
const { body: createdContainer } = await database.containers.create(containerDefinition);
|
||||
const { resource: createdContainer } = await database.containers.create(containerDefinition);
|
||||
|
||||
const container = await database.container(createdContainer.id);
|
||||
|
||||
|
@ -192,18 +192,18 @@ describe("NodeJS CRUD Tests", function() {
|
|||
};
|
||||
|
||||
// the created Item 's ttl value would be -1 inherited from the container' s defaultTtl and this Item will never expire
|
||||
const { body: createdItem1 } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem1 } = await container.items.create(itemDefinition);
|
||||
|
||||
// This Item is also set to never expire explicitly
|
||||
itemDefinition.id = "doc2";
|
||||
itemDefinition.ttl = -1;
|
||||
|
||||
const { body: createdItem2 } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem2 } = await container.items.create(itemDefinition);
|
||||
|
||||
itemDefinition.id = "doc3";
|
||||
itemDefinition.ttl = 2;
|
||||
|
||||
const { body: createdItem3 } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem3 } = await container.items.create(itemDefinition);
|
||||
await sleep(4000);
|
||||
await minusOneDefaultTtlStep1(container, createdItem1, createdItem2, createdItem3);
|
||||
});
|
||||
|
@ -213,7 +213,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
const containerDefinition = { id: "sample container" };
|
||||
|
||||
const { body: createdContainer } = await database.containers.create(containerDefinition);
|
||||
const { resource: createdContainer } = await database.containers.create(containerDefinition);
|
||||
|
||||
const container = await database.container(createdContainer.id);
|
||||
|
||||
|
@ -224,7 +224,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
ttl: 5
|
||||
};
|
||||
|
||||
const { body: createdItem } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem } = await container.items.create(itemDefinition);
|
||||
|
||||
// Created Item still exists even after ttl time has passed since the TTL is disabled at container level(no defaultTtl property defined)
|
||||
await sleep(7000);
|
||||
|
@ -240,7 +240,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// the upserted Item should be gone now after 10 secs from the last write(upsert) of the Item
|
||||
await checkItemGone(container, upsertedItem);
|
||||
const query = "SELECT * FROM root r";
|
||||
const { result: results } = await container.items.query(query).toArray();
|
||||
const { resources: results } = await container.items.query(query).fetchAll();
|
||||
assert.equal(results.length, 0);
|
||||
|
||||
// Use a container definition without defaultTtl to disable ttl at container level
|
||||
|
@ -250,7 +250,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
|
||||
itemDefinition.id = "doc2";
|
||||
|
||||
const { body: createdItem } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem } = await container.items.create(itemDefinition);
|
||||
await sleep(5000);
|
||||
await miscCasesStep4(container, createdItem, itemDefinition);
|
||||
}
|
||||
|
@ -258,7 +258,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
async function miscCasesStep2(container: Container, itemDefinition: any) {
|
||||
// Upsert the Item after 3 secs to reset the Item 's ttl
|
||||
itemDefinition.key = "value2";
|
||||
const { body: upsertedItem } = await container.items.upsert(itemDefinition);
|
||||
const { resource: upsertedItem } = await container.items.upsert(itemDefinition);
|
||||
await sleep(7000);
|
||||
// Upserted Item still exists after (3+7)10 secs from Item creation time( with container 's defaultTtl set to 8) since it' s ttl was reset after 3 secs by upserting it
|
||||
await checkItemExists(container, upsertedItem);
|
||||
|
@ -270,7 +270,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// the created Item should be gone now as the ttl time expired
|
||||
await checkItemGone(container, createdItem);
|
||||
// We can create a Item with the same id after the ttl time has expired
|
||||
const { body: doc } = await container.items.create(itemDefinition);
|
||||
const { resource: doc } = await container.items.create(itemDefinition);
|
||||
assert.equal(itemDefinition.id, doc.id);
|
||||
await sleep(3000);
|
||||
await miscCasesStep2(container, itemDefinition);
|
||||
|
@ -284,7 +284,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
defaultTtl: 8
|
||||
};
|
||||
|
||||
const { body: containerResult } = await database.containers.create(containerDefinition);
|
||||
const { resource: containerResult } = await database.containers.create(containerDefinition);
|
||||
|
||||
const container = await database.container(containerResult.id);
|
||||
|
||||
|
@ -294,7 +294,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
key: "value"
|
||||
};
|
||||
|
||||
const { body: createdItem } = await container.items.create(itemDefinition);
|
||||
const { resource: createdItem } = await container.items.create(itemDefinition);
|
||||
|
||||
await sleep(10000);
|
||||
await miscCasesStep1(container, createdItem, itemDefinition);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import assert from "assert";
|
||||
import { Container } from "../..";
|
||||
import { UserDefinedFunctionDefinition } from "../../client";
|
||||
import { getTestDatabase, removeAllDatabases } from "../common/TestHelpers";
|
||||
import { PrivateContainer } from "../common/PrivateContainer";
|
||||
import { createPrivateContainer, getTestDatabase, removeAllDatabases } from "../common/TestHelpers";
|
||||
|
||||
const containerId = "sample container";
|
||||
|
||||
|
@ -13,7 +13,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
});
|
||||
|
||||
describe("User Defined Function", function() {
|
||||
let container: Container;
|
||||
let container: PrivateContainer;
|
||||
|
||||
beforeEach(async function() {
|
||||
// create database
|
||||
|
@ -22,10 +22,10 @@ describe("NodeJS CRUD Tests", function() {
|
|||
// create container
|
||||
await database.containers.create({ id: containerId });
|
||||
|
||||
container = await database.container(containerId);
|
||||
container = createPrivateContainer(await database.container(containerId));
|
||||
});
|
||||
it("nativeApi Should do UDF CRUD operations successfully", async function() {
|
||||
const { result: udfs } = await container.userDefinedFunctions.readAll().toArray();
|
||||
const { resources: udfs } = await container.userDefinedFunctions.readAll().fetchAll();
|
||||
|
||||
// create a udf
|
||||
const beforeCreateUdfsCount = udfs.length;
|
||||
|
@ -35,13 +35,13 @@ describe("NodeJS CRUD Tests", function() {
|
|||
};
|
||||
|
||||
// TODO also handle upsert case
|
||||
const { body: udf } = await container.userDefinedFunctions.create(udfDefinition);
|
||||
const { resource: udf } = await container.userDefinedFunctions.create(udfDefinition);
|
||||
|
||||
assert.equal(udf.id, udfDefinition.id);
|
||||
assert.equal(udf.body, "function () { const x = 10; }");
|
||||
|
||||
// read udfs after creation
|
||||
const { result: udfsAfterCreate } = await container.userDefinedFunctions.readAll().toArray();
|
||||
const { resources: udfsAfterCreate } = await container.userDefinedFunctions.readAll().fetchAll();
|
||||
assert.equal(udfsAfterCreate.length, beforeCreateUdfsCount + 1, "create should increase the number of udfs");
|
||||
|
||||
// query udfs
|
||||
|
@ -54,27 +54,27 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await container.userDefinedFunctions.query(querySpec).toArray();
|
||||
const { resources: results } = await container.userDefinedFunctions.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// replace udf
|
||||
udfDefinition.body = "function () { const x = 10; }";
|
||||
const { body: replacedUdf } = await container.userDefinedFunction(udfDefinition.id).replace(udfDefinition);
|
||||
const { resource: replacedUdf } = await container.userDefinedFunction(udfDefinition.id).replace(udfDefinition);
|
||||
|
||||
assert.equal(replacedUdf.id, udfDefinition.id);
|
||||
assert.equal(replacedUdf.body, "function () { const x = 10; }");
|
||||
|
||||
// read udf
|
||||
const { body: udfAfterReplace } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
const { resource: udfAfterReplace } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
|
||||
assert.equal(replacedUdf.id, udfAfterReplace.id);
|
||||
|
||||
// delete udf
|
||||
const { body: res } = await container.userDefinedFunction(replacedUdf.id).delete();
|
||||
const { resource: res } = await container.userDefinedFunction(replacedUdf.id).delete();
|
||||
|
||||
// read udfs after deletion
|
||||
try {
|
||||
const { body: badudf } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
const { resource: badudf } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
assert.fail("Must fail to read after delete");
|
||||
} catch (err) {
|
||||
const notFoundErrorCode = 404;
|
||||
|
@ -83,7 +83,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
});
|
||||
|
||||
it("nativeApi Should do UDF CRUD operations successfully", async function() {
|
||||
const { result: udfs } = await container.userDefinedFunctions.readAll().toArray();
|
||||
const { resources: udfs } = await container.userDefinedFunctions.readAll().fetchAll();
|
||||
|
||||
// create a udf
|
||||
const beforeCreateUdfsCount = udfs.length;
|
||||
|
@ -92,13 +92,13 @@ describe("NodeJS CRUD Tests", function() {
|
|||
body: "function () { const x = 10; }"
|
||||
};
|
||||
|
||||
const { body: udf } = await container.userDefinedFunctions.upsert(udfDefinition);
|
||||
const { resource: udf } = await container.userDefinedFunctions.upsert(udfDefinition);
|
||||
|
||||
assert.equal(udf.id, udfDefinition.id);
|
||||
assert.equal(udf.body, "function () { const x = 10; }");
|
||||
|
||||
// read udfs after creation
|
||||
const { result: udfsAfterCreate } = await container.userDefinedFunctions.readAll().toArray();
|
||||
const { resources: udfsAfterCreate } = await container.userDefinedFunctions.readAll().fetchAll();
|
||||
assert.equal(udfsAfterCreate.length, beforeCreateUdfsCount + 1, "create should increase the number of udfs");
|
||||
|
||||
// query udfs
|
||||
|
@ -111,27 +111,27 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await container.userDefinedFunctions.query(querySpec).toArray();
|
||||
const { resources: results } = await container.userDefinedFunctions.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// replace udf
|
||||
udfDefinition.body = "function () { const x = 10; }";
|
||||
const { body: replacedUdf } = await container.userDefinedFunctions.upsert(udfDefinition);
|
||||
const { resource: replacedUdf } = await container.userDefinedFunctions.upsert(udfDefinition);
|
||||
|
||||
assert.equal(replacedUdf.id, udfDefinition.id);
|
||||
assert.equal(replacedUdf.body, "function () { const x = 10; }");
|
||||
|
||||
// read udf
|
||||
const { body: udfAfterReplace } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
const { resource: udfAfterReplace } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
|
||||
assert.equal(replacedUdf.id, udfAfterReplace.id);
|
||||
|
||||
// delete udf
|
||||
const { body: res } = await container.userDefinedFunction(replacedUdf.id).delete();
|
||||
const { resource: res } = await container.userDefinedFunction(replacedUdf.id).delete();
|
||||
|
||||
// read udfs after deletion
|
||||
try {
|
||||
const { body: badudf } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
const { resource: badudf } = await container.userDefinedFunction(replacedUdf.id).read();
|
||||
assert.fail("Must fail to read after delete");
|
||||
} catch (err) {
|
||||
const notFoundErrorCode = 404;
|
||||
|
|
|
@ -13,17 +13,17 @@ describe("NodeJS CRUD Tests", function() {
|
|||
const database = await getTestDatabase("Validate user CRUD");
|
||||
|
||||
// list users
|
||||
const { result: users } = await database.users.readAll().toArray();
|
||||
const { resources: users } = await database.users.readAll().fetchAll();
|
||||
assert.equal(users.constructor, Array, "Value should be an array");
|
||||
const beforeCreateCount = users.length;
|
||||
|
||||
// create user
|
||||
const { body: userDef } = await createOrUpsertUser(database, { id: "new user" }, undefined, isUpsertTest);
|
||||
const { resource: userDef } = await createOrUpsertUser(database, { id: "new user" }, undefined, isUpsertTest);
|
||||
assert.equal(userDef.id, "new user", "user name error");
|
||||
let user = database.user(userDef.id);
|
||||
|
||||
// list users after creation
|
||||
const { result: usersAfterCreation } = await database.users.readAll().toArray();
|
||||
const { resources: usersAfterCreation } = await database.users.readAll().fetchAll();
|
||||
assert.equal(usersAfterCreation.length, beforeCreateCount + 1);
|
||||
|
||||
// query users
|
||||
|
@ -36,7 +36,7 @@ describe("NodeJS CRUD Tests", function() {
|
|||
}
|
||||
]
|
||||
};
|
||||
const { result: results } = await database.users.query(querySpec).toArray();
|
||||
const { resources: results } = await database.users.query(querySpec).fetchAll();
|
||||
assert(results.length > 0, "number of results for the query should be > 0");
|
||||
|
||||
// replace user
|
||||
|
@ -44,21 +44,21 @@ describe("NodeJS CRUD Tests", function() {
|
|||
let replacedUser: UserDefinition;
|
||||
if (isUpsertTest) {
|
||||
const r = await database.users.upsert(userDef);
|
||||
replacedUser = r.body;
|
||||
replacedUser = r.resource;
|
||||
} else {
|
||||
const r = await user.replace(userDef);
|
||||
replacedUser = r.body;
|
||||
replacedUser = r.resource;
|
||||
}
|
||||
assert.equal(replacedUser.id, "replaced user", "user name should change");
|
||||
assert.equal(userDef.id, replacedUser.id, "user id should stay the same");
|
||||
user = database.user(replacedUser.id);
|
||||
|
||||
// read user
|
||||
const { body: userAfterReplace } = await user.read();
|
||||
const { resource: userAfterReplace } = await user.read();
|
||||
assert.equal(replacedUser.id, userAfterReplace.id);
|
||||
|
||||
// delete user
|
||||
const { body: res } = await user.delete();
|
||||
const { resource: res } = await user.delete();
|
||||
|
||||
// read user after deletion
|
||||
try {
|
||||
|
|
|
@ -69,7 +69,7 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
|
||||
const validateToArray = async function(queryIterator: QueryIterator<any>, expectedResults: any) {
|
||||
try {
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, expectedResults.length, "invalid number of results");
|
||||
assert.equal(queryIterator.hasMoreResults(), false, "hasMoreResults: no more results is left");
|
||||
} catch (err) {
|
||||
|
@ -77,59 +77,6 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
}
|
||||
};
|
||||
|
||||
const validateNextItem = async function(queryIterator: QueryIterator<any>, expectedResults: any) {
|
||||
let results: any = [];
|
||||
|
||||
try {
|
||||
while (results.length < expectedResults.length) {
|
||||
const { result: item } = await queryIterator.nextItem();
|
||||
if (item === undefined) {
|
||||
assert(!queryIterator.hasMoreResults(), "hasMoreResults must signal results exhausted");
|
||||
validateResult(results, expectedResults);
|
||||
return;
|
||||
}
|
||||
results = results.concat(item);
|
||||
|
||||
if (results.length < expectedResults.length) {
|
||||
assert(queryIterator.hasMoreResults(), "hasMoreResults must indicate more results");
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const validateNextItemAndCurrentAndHasMoreResults = async function(
|
||||
queryIterator: QueryIterator<any>,
|
||||
expectedResults: any[]
|
||||
) {
|
||||
// curent and nextItem recursively invoke each other till queryIterator is exhausted
|
||||
////////////////////////////////
|
||||
// validate nextItem()
|
||||
////////////////////////////////
|
||||
|
||||
const results: any[] = [];
|
||||
try {
|
||||
while (results.length <= expectedResults.length) {
|
||||
const { result: item } = await queryIterator.nextItem();
|
||||
const { result: currentItem } = await queryIterator.current();
|
||||
if (item === undefined) {
|
||||
break;
|
||||
}
|
||||
results.push(item);
|
||||
if (results.length < expectedResults.length) {
|
||||
assert(queryIterator.hasMoreResults(), "hasMoreResults must indicate more results");
|
||||
}
|
||||
assert.equal(item, currentItem, "current must give the previously item returned by nextItem");
|
||||
}
|
||||
|
||||
assert(!queryIterator.hasMoreResults(), "hasMoreResults must signal results exhausted");
|
||||
validateResult(results, expectedResults);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const validateExecuteNextAndHasMoreResults = async function(
|
||||
queryIterator: QueryIterator<any>,
|
||||
options: any,
|
||||
|
@ -140,15 +87,12 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
////////////////////////////////
|
||||
const pageSize = options["maxItemCount"];
|
||||
const listOfResultPages: any[] = [];
|
||||
const listOfHeaders: any[] = [];
|
||||
|
||||
let totalFetchedResults: any[] = [];
|
||||
|
||||
try {
|
||||
while (totalFetchedResults.length <= expectedResults.length) {
|
||||
const { result: results, headers } = await queryIterator.executeNext();
|
||||
const { resources: results } = await queryIterator.fetchNext();
|
||||
listOfResultPages.push(results);
|
||||
listOfHeaders.push(headers);
|
||||
|
||||
if (results === undefined || totalFetchedResults.length === expectedResults.length) {
|
||||
break;
|
||||
|
@ -180,22 +124,23 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
}
|
||||
};
|
||||
|
||||
const validateForEach = async function(queryIterator: QueryIterator<any>, expectedResults: any[]) {
|
||||
const ValidateAsyncIterator = async function(queryIterator: QueryIterator<any>, expectedResults: any[]) {
|
||||
////////////////////////////////
|
||||
// validate forEach()
|
||||
// validate AsyncIterator()
|
||||
////////////////////////////////
|
||||
|
||||
const results: any[] = [];
|
||||
let callbackSingnalledEnd = false;
|
||||
let completed = false;
|
||||
// forEach uses callbacks still, so just wrap in a promise
|
||||
for await (const { result: item } of queryIterator.getAsyncIterator()) {
|
||||
for await (const { resources: items } of queryIterator.getAsyncIterator()) {
|
||||
// if the previous invocation returned false, forEach must avoid invoking the callback again!
|
||||
assert.equal(callbackSingnalledEnd, false, "forEach called callback after the first false returned");
|
||||
results.push(item);
|
||||
assert.equal(completed, false, "forEach called callback after the first false returned");
|
||||
results.push(...items);
|
||||
if (results.length === expectedResults.length) {
|
||||
callbackSingnalledEnd = true;
|
||||
completed = true;
|
||||
}
|
||||
}
|
||||
assert.equal(completed, true, "AsyncIterator should fetch expected number of results");
|
||||
validateResult(results, expectedResults);
|
||||
};
|
||||
|
||||
|
@ -207,8 +152,7 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
queryIterator.reset();
|
||||
await validateExecuteNextAndHasMoreResults(queryIterator, options, expectedResults);
|
||||
queryIterator.reset();
|
||||
await validateNextItemAndCurrentAndHasMoreResults(queryIterator, expectedResults);
|
||||
await validateForEach(queryIterator, expectedResults);
|
||||
await ValidateAsyncIterator(queryIterator, expectedResults);
|
||||
};
|
||||
|
||||
const generateTestConfigs = function() {
|
||||
|
@ -221,7 +165,8 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
expected: testdata.sum / testdata.numberOfDocumentsWithNumbericId,
|
||||
condition: util.format("IS_NUMBER(r.%s)", partitionKey)
|
||||
},
|
||||
{ operator: "AVG", expected: undefined, condition: "true" },
|
||||
// TODO: Remove this test since query team says its invalid now
|
||||
// { operator: "AVG", expected: undefined, condition: "true" },
|
||||
{
|
||||
operator: "COUNT",
|
||||
expected: testdata.numberOfDocuments,
|
||||
|
@ -233,8 +178,9 @@ describe("NodeJS Aggregate Query Tests", async function() {
|
|||
operator: "SUM",
|
||||
expected: testdata.sum,
|
||||
condition: util.format("IS_NUMBER(r.%s)", partitionKey)
|
||||
},
|
||||
{ operator: "SUM", expected: undefined, condition: "true" }
|
||||
}
|
||||
// TODO: Remove this test since query team says its invalid now
|
||||
// { operator: "SUM", expected: undefined, condition: "true" }
|
||||
];
|
||||
|
||||
aggregateConfigs.forEach(function(config) {
|
||||
|
|
|
@ -31,26 +31,26 @@ describe("Authorization", function() {
|
|||
database = container.database;
|
||||
|
||||
// create userReadPermission
|
||||
const { body: userDef } = await container.database.users.create(userReadDefinition);
|
||||
const { resource: userDef } = await container.database.users.create(userReadDefinition);
|
||||
assert.equal(userReadDefinition.id, userDef.id, "userReadPermission is not created properly");
|
||||
userReadDefinition = userDef;
|
||||
const userRead = container.database.user(userDef.id);
|
||||
|
||||
// give permission to read container, to userReadPermission
|
||||
collReadPermission.resource = container.url;
|
||||
const { body: readPermission } = await userRead.permissions.create(collReadPermission);
|
||||
const { resource: readPermission } = await userRead.permissions.create(collReadPermission);
|
||||
assert.equal(readPermission.id, collReadPermission.id, "permission to read coll1 is not created properly");
|
||||
collReadPermission = readPermission;
|
||||
|
||||
// create userAllPermission
|
||||
const { body: userAllDef } = await container.database.users.create(userAllDefinition);
|
||||
const { resource: userAllDef } = await container.database.users.create(userAllDefinition);
|
||||
assert.equal(userAllDefinition.id, userAllDef.id, "userAllPermission is not created properly");
|
||||
userAllDefinition = userAllDef;
|
||||
const userAll = container.database.user(userAllDef.id);
|
||||
|
||||
// create collAllPermission
|
||||
collAllPermission.resource = container.url;
|
||||
const { body: allPermission } = await userAll.permissions.create(collAllPermission);
|
||||
const { resource: allPermission } = await userAll.permissions.create(collAllPermission);
|
||||
assert.equal(collAllPermission.id, allPermission.id, "permission to read coll2 is not created properly");
|
||||
collAllPermission = allPermission;
|
||||
});
|
||||
|
@ -68,7 +68,7 @@ describe("Authorization", function() {
|
|||
auth: { resourceTokens: rTokens }
|
||||
});
|
||||
|
||||
const { body: coll } = await clientReadPermission
|
||||
const { resource: coll } = await clientReadPermission
|
||||
.database(database.id)
|
||||
.container(container.id)
|
||||
.read();
|
||||
|
@ -82,7 +82,7 @@ describe("Authorization", function() {
|
|||
});
|
||||
|
||||
// self link must be used to access a resource using permissionFeed
|
||||
const { body: coll } = await clientReadPermission
|
||||
const { resource: coll } = await clientReadPermission
|
||||
.database(database.id)
|
||||
.container(container.id)
|
||||
.read();
|
||||
|
@ -104,7 +104,7 @@ describe("Authorization", function() {
|
|||
});
|
||||
|
||||
it("Accessing document by permissionFeed of parent container", async function() {
|
||||
const { body: createdDoc } = await container.items.create({
|
||||
const { resource: createdDoc } = await container.items.create({
|
||||
id: "document1"
|
||||
});
|
||||
const clientReadPermission = new CosmosClient({
|
||||
|
@ -113,7 +113,7 @@ describe("Authorization", function() {
|
|||
});
|
||||
assert.equal("document1", createdDoc.id, "invalid documnet create");
|
||||
|
||||
const { body: readDoc } = await clientReadPermission
|
||||
const { resource: readDoc } = await clientReadPermission
|
||||
.database(database.id)
|
||||
.container(container.id)
|
||||
.item(createdDoc.id)
|
||||
|
|
|
@ -109,7 +109,7 @@ describe("Cross Partition", function() {
|
|||
////////////////////////////////
|
||||
options.continuation = undefined;
|
||||
try {
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, expectedOrderIds.length, "invalid number of results");
|
||||
assert.equal(queryIterator.hasMoreResults(), false, "hasMoreResults: no more results is left");
|
||||
|
||||
|
@ -119,58 +119,6 @@ describe("Cross Partition", function() {
|
|||
}
|
||||
};
|
||||
|
||||
const validateNextItem = async function(queryIterator: QueryIterator<any>, expectedOrderIds: string[]) {
|
||||
////////////////////////////////
|
||||
// validate nextItem()
|
||||
////////////////////////////////
|
||||
const results: any[] = [];
|
||||
try {
|
||||
while (results.length < expectedOrderIds.length) {
|
||||
assert(queryIterator.hasMoreResults(), "hasMoreResults must indicate more results");
|
||||
const { result: item } = await queryIterator.nextItem();
|
||||
if (item === undefined) {
|
||||
break;
|
||||
}
|
||||
results.push(item);
|
||||
}
|
||||
|
||||
assert(!queryIterator.hasMoreResults(), "hasMoreResults must signal results exhausted");
|
||||
validateResults(results, expectedOrderIds);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const validateNextItemAndCurrentAndHasMoreResults = async function(
|
||||
queryIterator: QueryIterator<any>,
|
||||
expectedOrderIds: string[]
|
||||
) {
|
||||
// curent and nextItem recursively invoke each other till queryIterator is exhausted
|
||||
////////////////////////////////
|
||||
// validate nextItem()
|
||||
////////////////////////////////
|
||||
const results: any[] = [];
|
||||
try {
|
||||
while (results.length <= expectedOrderIds.length) {
|
||||
const { result: currentItem } = await queryIterator.current();
|
||||
const { result: item } = await queryIterator.nextItem();
|
||||
if (!item) {
|
||||
break;
|
||||
}
|
||||
results.push(item);
|
||||
if (results.length < expectedOrderIds.length) {
|
||||
assert(queryIterator.hasMoreResults(), "hasMoreResults must indicate more results");
|
||||
}
|
||||
assert.equal(item, currentItem, "current must give the previously item returned by nextItem");
|
||||
}
|
||||
|
||||
assert(!queryIterator.hasMoreResults(), "hasMoreResults must signal results exhausted");
|
||||
validateResults(results, expectedOrderIds);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const validateExecuteNextAndHasMoreResults = async function(
|
||||
options: any,
|
||||
queryIterator: QueryIterator<any>,
|
||||
|
@ -184,15 +132,13 @@ describe("Cross Partition", function() {
|
|||
////////////////////////////////
|
||||
|
||||
const listOfResultPages: any[] = [];
|
||||
const listOfHeaders: any[] = [];
|
||||
|
||||
let totalFetchedResults: any[] = [];
|
||||
|
||||
try {
|
||||
while (totalFetchedResults.length <= expectedOrderIds.length) {
|
||||
const { result: results, headers } = await queryIterator.executeNext();
|
||||
const { resources: results } = await queryIterator.fetchNext();
|
||||
listOfResultPages.push(results);
|
||||
listOfHeaders.push(headers);
|
||||
|
||||
if (results === undefined || totalFetchedResults.length === expectedOrderIds.length) {
|
||||
break;
|
||||
|
@ -240,28 +186,29 @@ describe("Cross Partition", function() {
|
|||
// validate forEach()
|
||||
////////////////////////////////
|
||||
const results: any[] = [];
|
||||
let callbackSingnalledEnd = false;
|
||||
let completed = false;
|
||||
// forEach uses callbacks still, so just wrap in a promise
|
||||
for await (const { result: item } of queryIterator.getAsyncIterator()) {
|
||||
for await (const { resources: items } of queryIterator.getAsyncIterator()) {
|
||||
// if the previous invocation returned false, forEach must avoid invoking the callback again!
|
||||
assert.equal(callbackSingnalledEnd, false, "forEach called callback after the first false returned");
|
||||
results.push(item);
|
||||
assert.equal(completed, false, "forEach called callback after the first false returned");
|
||||
results.push(...items);
|
||||
if (results.length === expectedOrderIds.length) {
|
||||
callbackSingnalledEnd = true;
|
||||
completed = true;
|
||||
}
|
||||
}
|
||||
assert.equal(completed, true, "AsyncIterator should see all expected results");
|
||||
validateResults(results, expectedOrderIds);
|
||||
};
|
||||
|
||||
const validateQueryMetrics = async function(queryIterator: QueryIterator<any>) {
|
||||
try {
|
||||
while (queryIterator.hasMoreResults()) {
|
||||
const { result: results, headers } = await queryIterator.executeNext();
|
||||
const { resources: results, queryMetrics } = await queryIterator.fetchNext();
|
||||
if (results === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
assert.notEqual(headers[Constants.HttpHeaders.QueryMetrics], null);
|
||||
assert.notEqual(queryMetrics, null);
|
||||
}
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
@ -287,7 +234,6 @@ describe("Cross Partition", function() {
|
|||
validateExecuteNextWithContinuationToken
|
||||
);
|
||||
queryIterator.reset();
|
||||
await validateNextItemAndCurrentAndHasMoreResults(queryIterator, expectedOrderIds);
|
||||
await validateForEach(queryIterator, expectedOrderIds);
|
||||
await validateQueryMetrics(queryIterator);
|
||||
};
|
||||
|
@ -297,11 +243,10 @@ describe("Cross Partition", function() {
|
|||
let totalRequestCharge = 0;
|
||||
|
||||
while (queryIterator.hasMoreResults()) {
|
||||
const { result: results, headers } = await queryIterator.executeNext();
|
||||
const rc: number = (headers || {})[Constants.HttpHeaders.RequestCharge] as number;
|
||||
const { resources: results, requestCharge } = await queryIterator.fetchNext();
|
||||
|
||||
if (counter === 0) {
|
||||
assert(rc > 0);
|
||||
assert(requestCharge > 0);
|
||||
counter += 1;
|
||||
}
|
||||
|
||||
|
@ -309,8 +254,8 @@ describe("Cross Partition", function() {
|
|||
assert(totalRequestCharge > 0);
|
||||
return;
|
||||
} else {
|
||||
totalRequestCharge += rc;
|
||||
assert(rc >= 0);
|
||||
totalRequestCharge += requestCharge;
|
||||
assert(requestCharge >= 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -660,7 +605,7 @@ describe("Cross Partition", function() {
|
|||
// prepare expected behaviour verifier
|
||||
const queryIterator = container.items.query(query, options);
|
||||
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, topCount);
|
||||
|
||||
// select unique ids
|
||||
|
@ -687,7 +632,7 @@ describe("Cross Partition", function() {
|
|||
// prepare expected behaviour verifier
|
||||
const queryIterator = container.items.query(query, options);
|
||||
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, topCount);
|
||||
|
||||
// select unique ids
|
||||
|
@ -714,7 +659,7 @@ describe("Cross Partition", function() {
|
|||
// prepare expected behaviour verifier
|
||||
const queryIterator = container.items.query(query, options);
|
||||
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, topCount);
|
||||
|
||||
// select unique ids
|
||||
|
@ -745,7 +690,7 @@ describe("Cross Partition", function() {
|
|||
// prepare expected behaviour verifier
|
||||
const queryIterator = container.items.query(querySpec, options);
|
||||
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, topCount);
|
||||
|
||||
// select unique ids
|
||||
|
@ -820,7 +765,7 @@ describe("Cross Partition", function() {
|
|||
// prepare expected behaviour verifier
|
||||
try {
|
||||
const queryIterator = container.items.query(query, options);
|
||||
await queryIterator.toArray();
|
||||
await queryIterator.fetchAll();
|
||||
} catch (err) {
|
||||
assert.notEqual(err, undefined);
|
||||
}
|
||||
|
@ -867,7 +812,7 @@ describe("Cross Partition", function() {
|
|||
};
|
||||
|
||||
const queryIterator = container.items.query(query, options);
|
||||
const { result: results } = await queryIterator.toArray();
|
||||
const { resources: results } = await queryIterator.fetchAll();
|
||||
assert.equal(results.length, documentDefinitions.length);
|
||||
|
||||
let index = 0;
|
||||
|
@ -903,7 +848,7 @@ describe("Cross Partition", function() {
|
|||
|
||||
let firstTime = true;
|
||||
|
||||
const { result } = await queryIterator.current();
|
||||
await queryIterator.fetchNext();
|
||||
|
||||
if (firstTime) {
|
||||
firstTime = false;
|
||||
|
|
|
@ -31,17 +31,17 @@ describe("Create And Read Validation", function() {
|
|||
};
|
||||
|
||||
// Create a container inside the database
|
||||
const { body: containerDef } = await database.containers.create(containerBody);
|
||||
const { resource: containerDef } = await database.containers.create(containerBody);
|
||||
const container = database.container(containerDef.id);
|
||||
|
||||
assert.equal(containerDef.id, containerBody.id, "invalid container Id");
|
||||
|
||||
// Add the document in the container
|
||||
const { body: doc } = await container.items.create(testDoc);
|
||||
const { resource: doc } = await container.items.create(testDoc);
|
||||
assert.equal(doc.id, testDoc.id, "invalid document Id");
|
||||
|
||||
// Read the container and see if it matches to the initial document
|
||||
const { body: resultDoc } = await container.item(doc.id).read<{ id: string; content: string }>();
|
||||
const { resource: resultDoc } = await container.item(doc.id).read<{ id: string; content: string }>();
|
||||
assert.equal(testDoc.content, resultDoc.content, "read document result is different from initial document");
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
|
|
@ -45,7 +45,7 @@ describe("Change Feed Iterator", function() {
|
|||
assert.equal(items[0].id, "item2", "should find the newest item, but not the old");
|
||||
const item = { id: "item2", name: "xyz" };
|
||||
|
||||
const { body: replaced } = await container.item(item.id).replace(item);
|
||||
const { resource: replaced } = await container.item(item.id).replace(item);
|
||||
assert.deepEqual(replaced.name, "xyz", "replaced item should be valid");
|
||||
|
||||
// Should continue from last etag
|
||||
|
@ -89,7 +89,7 @@ describe("Change Feed Iterator", function() {
|
|||
const item = items[1];
|
||||
item.name = "xyz";
|
||||
|
||||
const { body: replaced } = await container.item(item.id).replace(item);
|
||||
const { resource: replaced } = await container.item(item.id).replace(item);
|
||||
assert.deepEqual(replaced.name, "xyz", "replaced item should be valid");
|
||||
|
||||
// Should continue from last etag
|
||||
|
@ -137,7 +137,7 @@ describe("Change Feed Iterator", function() {
|
|||
const item = items[1];
|
||||
item.name = "xyz";
|
||||
|
||||
const { body: replaced } = await container.item(item.id).replace(item);
|
||||
const { resource: replaced } = await container.item(item.id).replace(item);
|
||||
assert.deepEqual(replaced.name, "xyz", "replaced item should be valid");
|
||||
|
||||
// Should continue from last etag
|
||||
|
@ -174,7 +174,7 @@ describe("Change Feed Iterator", function() {
|
|||
assert(headers.etag, "change feed response should have etag header");
|
||||
assert.equal(items.length, 0, "change feed response should have no items on it initially");
|
||||
|
||||
const { body: itemThatWasCreated } = await container.items.create({
|
||||
const { resource: itemThatWasCreated } = await container.items.create({
|
||||
id: "item2",
|
||||
prop: 1
|
||||
});
|
||||
|
@ -259,7 +259,7 @@ describe("Change Feed Iterator", function() {
|
|||
const item = items[1];
|
||||
item.name = "xyz";
|
||||
|
||||
const { body: replaced } = await container.item(item.id).replace(item);
|
||||
const { resource: replaced } = await container.item(item.id).replace(item);
|
||||
assert.deepEqual(replaced.name, "xyz", "replaced item should be valid");
|
||||
|
||||
const { result: itemsAfterUpdate } = await iterator.executeNext();
|
||||
|
@ -302,7 +302,7 @@ describe("Change Feed Iterator", function() {
|
|||
assert(headers.etag, "change feed response should have etag header");
|
||||
assert.equal(items.length, 0, "change feed response should have no items on it initially");
|
||||
|
||||
const { body: itemThatWasCreated, headers: createHeaders } = await container.items.create({
|
||||
const { resource: itemThatWasCreated, headers: createHeaders } = await container.items.create({
|
||||
id: "item2",
|
||||
prop: 1,
|
||||
key: "0"
|
||||
|
|
|
@ -13,7 +13,7 @@ import { endpoint, masterKey } from "../common/_testConfig";
|
|||
|
||||
before(async function() {
|
||||
const client = new CosmosClient({ endpoint, auth: { masterKey } });
|
||||
({ body: dbAccount } = await client.getDatabaseAccount());
|
||||
({ resource: dbAccount } = await client.getDatabaseAccount());
|
||||
// We reverse the order of the preferred locations list to make sure
|
||||
// we don't just follow the order we got back from the server
|
||||
preferredLocations = dbAccount.readableLocations.map(v => v.name).reverse();
|
||||
|
|
|
@ -35,8 +35,8 @@ describe("ResourceLink Trimming of leading and trailing slashes", function() {
|
|||
const queryOptions = { partitionKey: "pk" };
|
||||
const queryIterator = container.items.query(query, queryOptions);
|
||||
|
||||
const { result } = await queryIterator.toArray();
|
||||
assert.equal(result[0]["id"], "myId");
|
||||
const { resources } = await queryIterator.fetchAll();
|
||||
assert.equal(resources[0]["id"], "myId");
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -51,7 +51,10 @@ describe("Test Query Metrics On Single Partition Collection", function() {
|
|||
const collectionDefinition = { id: collectionId };
|
||||
const collectionOptions = { offerThroughput: 4000 };
|
||||
|
||||
const { body: createdCollectionDef } = await database.containers.create(collectionDefinition, collectionOptions);
|
||||
const { resource: createdCollectionDef } = await database.containers.create(
|
||||
collectionDefinition,
|
||||
collectionOptions
|
||||
);
|
||||
const createdContainer = database.container(createdCollectionDef.id);
|
||||
|
||||
await createdContainer.items.create(document);
|
||||
|
@ -61,14 +64,14 @@ describe("Test Query Metrics On Single Partition Collection", function() {
|
|||
const queryIterator = createdContainer.items.query(query, queryOptions);
|
||||
|
||||
while (queryIterator.hasMoreResults()) {
|
||||
const { result: results, headers } = await queryIterator.executeNext();
|
||||
const { resources: results, queryMetrics } = await queryIterator.fetchNext();
|
||||
|
||||
if (results === undefined) {
|
||||
// no more results
|
||||
break;
|
||||
}
|
||||
|
||||
assert.notEqual(headers[Constants.HttpHeaders.QueryMetrics]["0"], null);
|
||||
assert.notEqual(queryMetrics, null);
|
||||
}
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
|
|
@ -1,85 +0,0 @@
|
|||
import assert from "assert";
|
||||
import { Constants, CosmosClient, Database } from "../..";
|
||||
import { endpoint, masterKey } from "../common/_testConfig";
|
||||
import { getTestDatabase, removeAllDatabases } from "../common/TestHelpers";
|
||||
|
||||
const client = new CosmosClient({ endpoint, auth: { masterKey } });
|
||||
|
||||
// TODO: these tests are all disabled
|
||||
|
||||
describe("RU Per Minute", function() {
|
||||
let database: Database;
|
||||
|
||||
// - removes all the databases,
|
||||
// - creates a new database,
|
||||
beforeEach(async () => {
|
||||
await removeAllDatabases();
|
||||
database = await getTestDatabase("RU Per minute");
|
||||
});
|
||||
|
||||
// - removes all the databases,
|
||||
afterEach(async () => {
|
||||
await removeAllDatabases();
|
||||
});
|
||||
|
||||
xit("Create container with RU Per Minute Offer", async function() {
|
||||
const containerDefinition = {
|
||||
id: "sample col"
|
||||
};
|
||||
|
||||
const options = {
|
||||
offerEnableRUPerMinuteThroughput: true,
|
||||
offerVersion: "V2",
|
||||
offerThroughput: 400
|
||||
};
|
||||
|
||||
await database.containers.create(containerDefinition, options);
|
||||
const { result: offers } = await client.offers.readAll().toArray();
|
||||
assert.equal(offers.length, 1);
|
||||
const offer = offers[0];
|
||||
|
||||
assert.equal(offer.offerType, "Invalid");
|
||||
assert.notEqual(offer.content, undefined);
|
||||
assert.equal(offer.content.offerIsRUPerMinuteThroughputEnabled, true);
|
||||
});
|
||||
|
||||
xit("Create container without RU Per Minute Offer", async function() {
|
||||
const containerDefinition = {
|
||||
id: "sample col"
|
||||
};
|
||||
|
||||
const options = {
|
||||
offerVersion: "V2",
|
||||
offerThroughput: 400
|
||||
};
|
||||
|
||||
await database.containers.create(containerDefinition, options);
|
||||
const { result: offers } = await client.offers.readAll().toArray();
|
||||
assert.equal(offers.length, 1);
|
||||
const offer = offers[0];
|
||||
|
||||
assert.equal(offer.offerType, "Invalid");
|
||||
assert.notEqual(offer.content, undefined);
|
||||
assert.equal(offer.content.offerIsRUPerMinuteThroughputEnabled, false);
|
||||
});
|
||||
|
||||
xit("Create container with RU Per Minute Offer and insert Document with disableRUPerMinuteUsage options", async function() {
|
||||
const containerDefinition = {
|
||||
id: "sample col"
|
||||
};
|
||||
|
||||
const options = {
|
||||
offerEnableRUPerMinuteThroughput: true,
|
||||
offerVersion: "V2",
|
||||
offerThroughput: 400
|
||||
};
|
||||
|
||||
await database.containers.create(containerDefinition, options);
|
||||
const container = database.container(containerDefinition.id);
|
||||
const options2: any = {
|
||||
disableRUPerMinuteUsage: true
|
||||
};
|
||||
const { headers } = await container.items.create({ id: "sample document" }, options2);
|
||||
assert(headers[Constants.HttpHeaders.IsRUPerMinuteUsed] !== true);
|
||||
});
|
||||
});
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче