Merge pull request #370 from Azure/v3

Version 3.0.x Release
This commit is contained in:
Steve Faulkner 2019-07-17 14:37:54 -05:00 коммит произвёл GitHub
Родитель 5cb1628114 0c35292b6a
Коммит 5861d011ad
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
276 изменённых файлов: 9955 добавлений и 12699 удалений

18
.gitignore поставляемый
Просмотреть файл

@ -261,9 +261,15 @@ lib/**
*.tgz
ts-test/package-lock.json
ts-test/package.json
ts-test/*.js
dist-esm
dist-test
dist-esm/
dist-test/
dist-commonjs/
temp/
consumer-test/package-lock.json
consumer-test/package.json
consumer-test/*.js
test/**/*.js
test/**/*.js.map
test/**/*.d.ts
tsconfig.tsbuildinfo
.env

Просмотреть файл

@ -5,4 +5,8 @@ samples
.gitignore
*.code-workspace
*.tgz
ts-test/
tsdoc-metadata.json
consumer-test/
test/
src/
lib/

1
.prettierignore Normal file
Просмотреть файл

@ -0,0 +1 @@
*.d.ts

Просмотреть файл

@ -1,4 +1,4 @@
{
"printWidth": 120,
"tabWidth": 2
}
"printWidth": 120,
"tabWidth": 2
}

16
.vscode/launch.json поставляемый
Просмотреть файл

@ -9,25 +9,27 @@
"request": "launch",
"name": "Mocha Tests",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"args": ["-u", "tdd", "--colors", "${workspaceFolder}/lib/test/**/*.js", "-g", ".*Location Cache.*"],
"outFiles": ["${workspaceFolder}/dist-esm/**/*.js"],
"preLaunchTask": "npm: compile",
"args": ["-r", "esm", "-r", "test/common/setup.js", "${workspaceFolder}/test/**/*.spec.js"],
"internalConsoleOptions": "openOnSessionStart",
"sourceMaps": true,
"outFiles": ["${workspaceFolder}/lib/**"],
"env": {
"MOCHA_TIMEOUT": "999999"
}
},
"protocol": "inspector"
},
{
"type": "node",
"request": "attach",
"request": "launch",
"name": "Attach by Process ID",
"processId": "${command:PickProcess}"
},
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/samples/TodoApp/bin/www"
"name": "build",
"program": "${workspaceFolder}/node_modules/typescript/bin/tsc",
"args": ["-b --verbose"]
},
{
"type": "node",

3
.vscode/settings.json поставляемый
Просмотреть файл

@ -1,4 +1,5 @@
{
"mocha.files.glob":"test/legacy/**/*.js",
"editor.formatOnSave": true
"editor.formatOnSave": true,
"typescript.tsdk": "node_modules/typescript/lib"
}

Просмотреть файл

@ -12,7 +12,7 @@ const CosmosClient = cosmos.CosmosClient;
const endpoint = "[hostendpoint]"; // Add your endpoint
const masterKey = "[database account masterkey]"; // Add the masterkey of the endpoint
const client = new CosmosClient({ endpoint, auth: { masterKey } });
const client = new CosmosClient({ endpoint, key: masterKey });
const databaseDefinition = { id: "sample database" };
const collectionDefinition = { id: "sample collection" };

24
api-extractor.json Normal file
Просмотреть файл

@ -0,0 +1,24 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/api-extractor.schema.json",
"compiler": {
"configType": "tsconfig",
"rootFolder": "./src"
},
"project": {
"entryPointSourceFile": "../dist-esm/index.d.ts"
},
"validationRules": {
"missingReleaseTags": "allow"
},
"dtsRollup": {
"enabled": true,
"publishFolder": "",
"mainDtsRollupPath": "./dist/index.d.ts"
},
"apiReviewFile": {
"enabled": false
},
"apiJsonFile": {
"enabled": false
}
}

Просмотреть файл

@ -7,13 +7,90 @@ trigger:
- master
jobs:
# - job: Staging
- job: Emulator
pool:
vmImage: windows-2019
steps:
- powershell: |
$localEtag = ""
try {
$localEtag = Get-Content "$env:ProgramFiles\Azure Cosmos DB Emulator\etag.txt"
}
catch {}
Write-Host "Local emulator etag $localEtag"
$headResponse = Invoke-WebRequest 'https://aka.ms/cosmosdb-emulator' -Method 'HEAD'
Write-Host $headResponse.headers
$remoteEtag = $headResponse.headers.ETag
Write-Host "Remote emulator etag $remoteEtag"
if($localEtag -ne $remoteEtag){
Write-Host "Emulator is out of date"
Write-Host "Downloading Cosmos Emulator"
wget "https://aka.ms/cosmosdb-emulator" -outfile "$env:temp\azure-cosmosdb-emulator.msi"
Write-Host "Finished Downloading Cosmos Emulator - $env:temp\azure-cosmosdb-emulator.msi"
dir "$env:temp"
Write-Host "Deleting Azure Cosmos DB Emulator directory"
dir "$env:ProgramFiles\"
rm "$env:ProgramFiles\Azure Cosmos DB Emulator" -Recurse -Force
Write-Host "Directory after deleting"
dir "$env:ProgramFiles\"
choco install lessmsi
choco upgrade lessmsi
Write-Host "Checking directory"
dir "$env:ProgramFiles"
mkdir "$env:ProgramFiles\Azure Cosmos DB Emulator"
lessmsi x "$env:temp\azure-cosmosdb-emulator.msi" "$env:ProgramFiles\Azure Cosmos DB Emulator\"
Set-Content -Path "$env:ProgramFiles\Azure Cosmos DB Emulator\etag.txt" -Value $remoteEtag
dir "$env:ProgramFiles\Azure Cosmos DB Emulator"
Get-Content "$env:ProgramFiles\Azure Cosmos DB Emulator\etag.txt" | Write-Host
} else {
Write-Host "Emulator is already up to date"
}
Write-Host "Starting Comsos DB Emulator"
Start-Process "$env:ProgramFiles\Azure Cosmos DB Emulator\SourceDir\Azure Cosmos DB Emulator\CosmosDB.Emulator.exe" "/NoExplorer /NoUI" -Verb RunAs
displayName: "Refresh and Run Public Cosmos DB Emulator"
- task: NodeTool@0
inputs:
versionSpec: "8.x"
displayName: "Install Node.js"
- script: npm install
displayName: "npm install"
- script: npm run build
displayName: "npm run build"
- bash: npm run test -- --forbid-only
displayName: "npm test"
env:
MOCHA_TIMEOUT: 100000
- bash: npm run test-consumer
displayName: "TypeScript consumer tests"
- bash: npm run compile:samples
displayName: "Typecheck Samples"
- bash: |
npm run UserManagement --prefix ./samples
npm run ItemManagement --prefix ./samples
npm run DatabaseManagement --prefix ./samples
npm run ContainerManagement --prefix ./samples
npm run ServerSideScripts --prefix ./samples
npm run ChangeFeed --prefix ./samples
displayName: "Run Samples"
# - job: NightlyEmulator
# pool:
# vmImage: ubuntu-16.04
# vmImage: vs2017-win2016
# steps:
# - task: azure-cosmosdb.emulator-internal-preview.run-cosmosdbemulatorcontainer.CosmosDbEmulator@2
# displayName: "Run Azure Cosmos DB Emulator container"
# inputs:
# username: "$(cosmosdb.azurecr.io.Username)"
# password: "$(cosmosdb.azurecr.io.Password)"
# defaultPartitionCount: 25
# - task: NodeTool@0
# inputs:
# versionSpec: "6.x"
# versionSpec: "8.x"
# displayName: "Install Node.js"
# - script: npm install
@ -22,61 +99,9 @@ jobs:
# - script: npm run build
# displayName: "npm run build"
# - script: npm run test
# displayName: "npm run test"
# - bash: ACCOUNT_HOST=$COSMOSDBEMULATOR_ENDPOINT NODE_TLS_REJECT_UNAUTHORIZED="0" npm run test
# failOnStderr: true
# continueOnError: true
# displayName: "npm test"
# env:
# ACCOUNT_HOST: $(stagingEndpoint)
# ACCOUNT_KEY: $(stagingKey)
# MOCHA_TIMEOUT: 100000
# TESTS_MULTIREGION: true
- job: Emulator
pool:
vmImage: vs2017-win2016
steps:
- task: azure-cosmosdb.emulator-public-preview.run-cosmosdbemulatorcontainer.CosmosDbEmulator@2
displayName: "Run Azure Cosmos DB Emulator container"
inputs:
defaultPartitionCount: 25
- task: NodeTool@0
inputs:
versionSpec: "6.x"
displayName: "Install Node.js"
- script: npm install
displayName: "npm install"
- script: npm run build
displayName: "npm run build"
- bash: ACCOUNT_HOST=$COSMOSDBEMULATOR_ENDPOINT NODE_TLS_REJECT_UNAUTHORIZED="0" npm run test
failOnStderr: true
displayName: "npm test"
env:
MOCHA_TIMEOUT: 100000
- job: NightlyEmulator
pool:
vmImage: vs2017-win2016
steps:
- task: azure-cosmosdb.emulator-internal-preview.run-cosmosdbemulatorcontainer.CosmosDbEmulator@2
displayName: "Run Azure Cosmos DB Emulator container"
inputs:
username: "$(cosmosdb.azurecr.io.Username)"
password: "$(cosmosdb.azurecr.io.Password)"
defaultPartitionCount: 25
- task: NodeTool@0
inputs:
versionSpec: "6.x"
displayName: "Install Node.js"
- script: npm install
displayName: "npm install"
- script: npm run build
displayName: "npm run build"
- bash: ACCOUNT_HOST=$COSMOSDBEMULATOR_ENDPOINT NODE_TLS_REJECT_UNAUTHORIZED="0" npm run test
failOnStderr: true
continueOnError: true
displayName: "npm test"
env:
MOCHA_TIMEOUT: 100000

14
bundle-types.js Normal file
Просмотреть файл

@ -0,0 +1,14 @@
// TODO. The api-extractor CLI command forces us into their docs generation and will error.
// By invoking the node API we avoid this.
// But we also swallow errors.
// See https://github.com/Microsoft/web-build-tools/issues/920
const ApiExtractor = require("@microsoft/api-extractor");
const NodeCoreLib = require("@microsoft/node-core-library");
const config = NodeCoreLib.JsonFile.loadAndValidate("api-extractor.json", ApiExtractor.Extractor.jsonSchema);
// This interface provides additional runtime state that is NOT part of the config file
const options = {
localBuild: process.argv.indexOf("--ship") < 0
};
const extractor = new ApiExtractor.Extractor(config, options);
extractor.processProject();

Просмотреть файл

@ -1,3 +1,183 @@
## 3.0.2
Fixes a long outstanding bug where RUs were always being reported as 0 for aggregate queries (#366)
## 3.0.1
Fixes broken session tokens in the browser. Cosmos uses file system friendly base64 to represent resources internally but does not work with the builtin browser atob function (#363)
## 3.0.0
🎉 v3 release! 🎉 Many new features, bug fixes, and a few breaking changes. Primary goals of this release:
- Implement major new features:
- DISTINCT queries
- LIMIT/OFFSET queries
- User cancelable requests
- Update to the latest Cosmos REST API version where [all containers have unlimited scale](https://docs.microsoft.com/en-us/azure/cosmos-db/migrate-containers-partitioned-to-nonpartitioned)
- Make it easier to use Cosmos from the browser
- Better align with the new [Azure JS SDK guidlines](https://azuresdkspecs.z5.web.core.windows.net/TypeScriptSpec.html)
### Migration Guide for Breaking Changes
#### Improved Client Constructor Options (#246)
Constructor options have been simplified:
- `masterKey` was renamed `key` and moved to the top-level
- Properties previously under `options.auth` have moved to the top-level
```js
// v2
const client = new CosmosClient({
endpoint: "https://your-database.cosmos.azure.com",
auth: {
masterKey: "your-primary-key"
}
})
// v3
const client = new CosmosClient({
endpoint: "https://your-database.cosmos.azure.com",
key: "your-primary-key"
})
```
#### Simplified QueryIterator API (#238 #316)
In v2 there were many different ways to iterate or retrieve results from a query. We have attempted to simplify the v3 API and remove similar or duplciate APIs:
- Remove iterator.next() and iterator.current(). Use fetchNext() to get pages of results.
- Remove iterator.forEach(). Use async iterators instead.
- iterator.executeNext() renamed to iterator.fetchNext()
- iterator.toArray() renamed to iterator.fetchAll()
- Pages are now proper `Response` objects intead of plain JS objects
``` js
const container = client.database(dbId).container(containerId)
// v2
container.items.query('SELECT * from c').toArray()
container.items.query('SELECT * from c').executeNext()
container.items.query('SELECT * from c').forEach(({ body: item }) => { console.log(item.id) })
// v3
container.items.query('SELECT * from c').fetchAll()
container.items.query('SELECT * from c').fetchNext()
for await(const { result: item } in client.databases.readAll().getAsyncIterator()) {
console.log(item.id)
}
```
#### Fixed Containers are now Paritioned (#308)
[The Cosmos service now supports partition keys on all containers, including those that were previously created as fixed containers](https://docs.microsoft.com/en-us/azure/cosmos-db/migrate-containers-partitioned-to-nonpartitioned). The v3 SDK updates to the latest API version that implements this change, but it is not breaking. If you do not supply a partition key for operations, we will default to a system key that works with all your existing containers and documents.
#### `upsert` removed for Stored Procedures (#356)
Previously `upsert` was allowed for non-partitioned collections, but with the API version update, all collections are partitioned so we removed it entirely.
#### Item reads will not throw on 404 (#343, Community Request)
``` js
const container = client.database(dbId).container(containerId)
// v2
try {
container.items.read(id, undefined)
} catch (e) {
if (e.code === 404) { console.log('item not found') }
}
// v3
const { result: item } = container.items.read(id, undefined)
if (item === undefined) { console.log('item not found') }
```
#### Default Multi Region Write (#335)
The SDK will now write to multiple regions by default if your database configuration supports it. This was previously opt-in behavior.
#### Proper Error Objects (#334, Community Request)
Failed requests now throw proper `Error` or subclasses of `Error`. Previously they threw plain JS objects.
### New Features
#### User Cancellable Requests (#263, Community Request)
The move to `fetch` internally allows us to use the browser `AbortController` API to support user cancellable operations. In the case of operations where multiple requests are potentially in progress (like cross partition queries), all requests for the operation will be canceled. Modern browser users will already have `AbortController`. Node.js users will need to use a [polyfill library](https://www.npmjs.com/package/node-abort-controller)
``` js
const controller = new AbortController()
const {result: item} = await items.query('SELECT * from c', { abortSignal: controller.signal});
controller.abort()
```
#### Set throughput as part of db/container create operation (#220)
``` js
const { database } = client.databases.create({ id: 'my-database', throughput: 10000 })
database.containers.create({ id: 'my-container', throughput: 10000 })
```
#### @azure/cosmos-sign (#213)
Header token generation was split out into a new library, @azure/cosmos-sign. Anyone calling the Cosmos REST API directly can use this to sign headers using the same code we call inside @azure/cosmos.
#### UUID for generated IDs (#355)
v2 had custom code to generate item IDs. We have switched to the well known and maintained community library `uuid`.
#### Connection Strings (#350, Community Request)
It is now possible to pass a connection string copied from the Azure portal:
``` js
const client = new CosmosClient("AccountEndpoint=https://test-account.documents.azure.com:443/;AccountKey=c213asdasdefgdfgrtweaYPpgoeCsHbpRTHhxuMsTaw==;")
```
#### Add DISTINCT and LIMIT/OFFSET queries (#306)
``` js
const { results } = await items.query('SELECT DISTINCT VALUE r.name FROM ROOT').fetchAll()
const { results } = await items.query('SELECT * FROM root r OFFSET 1 LIMIT 2').fetchAll()
```
### Improved Browser Experience
While it was possible to use the v2 SDK in the browser it was not an ideal experience. You needed to polyfill several node.js built-in libraries and use a bundler like Webpack or Parcel. The v3 SDK makes the out of the box experience much better for browser users.
- Replace request internals with `fetch` (#245)
- Remove usage of Buffer (#330)
- Remove node builtin usage in favor of universal packages/APIs (#328)
- Switch to node-abort-controller (#294)
### Bug Fixes
- Fix offer read and bring back offer tests (#224)
- Fix EnableEndpointDiscovery (#207)
- Fix missing RUs on paginated results (#360)
- Expand SQL query parameter type (#346)
- Add ttl to ItemDefinition (#341)
- Fix CP query metrics (#311)
- Add activityId to FeedResponse (#293)
- Switch _ts type from string to number (#252)(#295)
- Fix Request Charge Aggregation (#289)
- Allow blank string partition keys (#277)
- Add string to conflict query type (#237)
- Add uniqueKeyPolicy to container (#234)
### Engineering Systems
Not always the most visible changes, but they help our team ship better code, faster.
- Use rollup for production builds (#104)
- Update to Typescript 3.5 (#327)
- Convert to TS project references. Extract test folder (#270)
- Enable noUnusedLocals and noUnusedParameters (#275)
- Azure Pipelines YAML for CI builds (#298)
## Changes in 2.0.1
- Fix type issue (See #141)

Просмотреть файл

@ -1,13 +1,13 @@
const execa = require("execa");
let versions = ["3.0", "3.1"];
let versions = ["3.0", "3.1", "3.2", "3.3", "3.4"];
if (!process.env.SKIP_LATEST) {
versions.push("latest");
}
async function exec(cmd) {
const command = execa.shell(cmd, { cwd: "./ts-test" });
const command = execa.shell(cmd, { cwd: "./consumer-test" });
command.stderr.pipe(process.stderr);
command.stdout.pipe(process.stdout);
return command;

Просмотреть файл

4260
package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -11,65 +11,94 @@
"database",
"cloud"
],
"version": "2.1.7",
"version": "3.0.2",
"author": "Microsoft Corporation",
"main": "./lib/src/index.js",
"types": "./lib/src/index.d.ts",
"main": "./dist/index.js",
"module": "./dist-esm/src/index.js",
"types": "./dist/index.d.ts",
"engine": {
"node": ">=6.0.0"
},
"scripts": {
"clean": "rimraf lib",
"lint": "tslint --project tsconfig.json",
"format": "prettier --write --config .prettierrc.json \"src/**/*.ts\"",
"check-format": "prettier --list-different --config .prettierrc.json \"src/**/*.ts\"",
"compile": "echo Using TypeScript && tsc --version && tsc --pretty",
"compile-prod": "echo Using TypeScript && tsc --version && tsc -p tsconfig.prod.json --pretty",
"docs": "typedoc --excludePrivate --exclude \"**/test/**\" --mode file --out ./lib/docs ./src",
"clean": "rimraf dist && rimraf dist-esm",
"lint": "tslint --project ./src/tsconfig.json",
"format": "prettier --write --config .prettierrc.json \"src/**/*.ts\" \"test/**/*.ts\"",
"check-format": "prettier --list-different --config .prettierrc.json \"src/**/*.ts\" \"test/**/*.ts\"",
"compile": "echo Using TypeScript && tsc --version && tsc -b --pretty",
"compile:samples": "tsc -b samples",
"docs": "typedoc --tsconfig ./src/tsconfig.json --excludePrivate --mode file --out ./dist/docs ./src",
"bundle": "rollup -c",
"bundle-types": "node bundle-types.js",
"build": "npm run clean && npm run check-format && npm run lint && npm run compile && node writeSDKVersion.js && npm run docs && npm run bundle && npm run bundle-types",
"test": "tsc -b src test --verbose && mocha -r esm -r dotenv/config -r ./test/common/setup.js \"./test/**/*.spec.js\" --timeout 100000",
"prepack": "npm install && npm run build",
"webpack": "webpack -d",
"webpack-prod": "webpack -p",
"build": "npm run clean && npm run check-format && npm run lint && npm run compile && npm run docs && npm run webpack",
"build-prod": "npm run clean && npm run check-format && npm run lint && npm run compile-prod && npm run docs && npm run webpack-prod",
"test": "mocha -r ./src/test/common/setup.ts ./lib/src/test/ --recursive --timeout 100000 -i -g .*ignore.js",
"test-ts": "mocha -r ts-node/register -r ./src/test/common/setup.ts ./src/test/**/*.spec.ts --recursive --timeout 100000 -i -g .*ignore.js",
"ci": "npm run build && npm run test && node ts-test.js"
"test-browser": "karma start ./karma.config.js --single-run",
"test-consumer": "node consumer-test.js"
},
"devDependencies": {
"@microsoft/api-extractor": "6.3.0",
"@types/fast-json-stable-stringify": "2.0.0",
"@types/mocha": "^5.2.5",
"@types/node": "^8.10.22",
"@types/node": "^10.5.8",
"@types/node-fetch": "2.3.7",
"@types/priorityqueuejs": "^1.0.1",
"@types/semaphore": "^1.1.0",
"@types/sinon": "^4.3.3",
"@types/sinon": "7.0.10",
"@types/tunnel": "^0.0.0",
"@types/underscore": "^1.8.8",
"@types/uuid": "3.4.4",
"abort-controller": "3.0.0",
"cross-env": "5.2.0",
"dotenv": "8.0.0",
"esm": "3.2.18",
"execa": "1.0.0",
"karma": "^4.0.1",
"karma-chrome-launcher": "^2.2.0",
"karma-cli": "^1.0.1",
"karma-firefox-launcher": "^1.1.0",
"karma-mocha": "^1.3.0",
"karma-mocha-reporter": "^2.2.5",
"karma-requirejs": "^1.1.0",
"karma-sourcemap-loader": "^0.3.7",
"karma-webpack": "^3.0.5",
"mocha": "^5.2.0",
"mocha-junit-reporter": "^1.15.0",
"mocha-multi-reporters": "^1.1.6",
"prettier": "1.14.3",
"proxy-agent": "3.0.3",
"requirejs": "^2.3.5",
"sinon": "^5.1.1",
"ts-node": "^6.2.0",
"tslint": "5.11.0",
"rollup": "^0.64.1",
"rollup-plugin-json": "3.1.0",
"rollup-plugin-local-resolve": "^1.0.7",
"rollup-plugin-multi-entry": "2.0.2",
"sinon": "7.2.7",
"source-map-support": "0.5.11",
"ts-node": "^8.0.2",
"tslint": "5.16.0",
"tslint-config-prettier": "^1.14.0",
"typedoc": "0.13.0",
"typescript": "3.1.4",
"webpack": "^4.16.3",
"webpack-cli": "^3.2.3"
"typedoc": "0.14.2",
"typescript": "3.5.1"
},
"dependencies": {
"@azure/cosmos-sign": "1.0.2",
"atob": "2.1.2",
"binary-search-bounds": "2.0.3",
"create-hmac": "^1.1.7",
"crypto-hash": "1.1.0",
"fast-json-stable-stringify": "2.0.0",
"node-abort-controller": "1.0.3",
"node-fetch": "2.6.0",
"priorityqueuejs": "1.0.0",
"semaphore": "1.0.5",
"stream-http": "^2.8.3",
"tslib": "^1.9.3",
"tunnel": "0.0.5"
"universal-user-agent": "2.1.0",
"uuid": "3.3.2"
},
"repository": {
"type": "git",
"url": "https://github.com/Azure/azure-cosmos-js"
},
"license": "MIT"
"license": "MIT",
"tsdoc": {
"tsdocFlavor": "AEDoc"
}
}

13
rollup.config.js Normal file
Просмотреть файл

@ -0,0 +1,13 @@
import resolve from "rollup-plugin-local-resolve";
export default [
{
input: "dist-esm/index.js",
output: {
file: "dist/index.js",
format: "umd",
name: "Microsoft.Azure.Cosmos",
sourcemap: true
},
plugins: [resolve()]
}
];

Просмотреть файл

@ -1,22 +1,15 @@
// @ts-check
"use strict";
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const databaseId = config.names.database;
const containerId = config.names.container;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
import { finish, handleError, logSampleHeader } from "./Shared/handleError";
import { CosmosClient } from "../dist";
import { database as databaseId, container as containerId, endpoint, key } from "./Shared/config";
logSampleHeader("Change Feed");
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
const client = new CosmosClient({ endpoint, key });
// We'll use the same pk value for all these samples
const pk = "0";
function doesMatch(actual, expected) {
function doesMatch(actual: any[], expected: any[]) {
for (let i = 0; i < actual.length; i++) {
if (actual[i] !== expected[i]) {
return "❌";
@ -25,13 +18,17 @@ function doesMatch(actual, expected) {
return "✅";
}
function logResult(scenario, actual, expected) {
function logResult(scenario: string, actual: any[], expected: any[]) {
const status = doesMatch(actual, expected);
console.log(` ${status} ${scenario} - expected: [${expected.join(", ")}] - actual: [${actual.join(", ")}]`);
}
async function run() {
const container = await init();
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container } = await database.containers.createIfNotExists({
id: containerId,
partitionKey: { paths: ["/pk"] }
});
try {
console.log(`
@ -69,34 +66,34 @@ async function run() {
console.log(` 👉 Inserted id=3`);
const specificContinuationIterator = container.items.readChangeFeed(pk, { continuation: lsn });
const specificContinuationIterator = container.items.readChangeFeed(pk, { continuation: lsn.toString() });
const specificPointInTimeIterator = container.items.readChangeFeed(pk, { startTime: now });
const fromBeginningIterator = container.items.readChangeFeed(pk, { startFromBeginning: true });
const fromNowIterator = container.items.readChangeFeed(pk, {});
const { result: specificContinuationResult } = await specificContinuationIterator.executeNext();
const { result: specificContinuationResult } = await specificContinuationIterator.fetchNext();
logResult("initial specific Continuation scenario", [3], specificContinuationResult.map(v => parseInt(v.id)));
// First page is empty. It is catching up to a valid continuation.
const { result: shouldBeEmpty } = await specificPointInTimeIterator.executeNext();
const { result: shouldBeEmpty } = await specificPointInTimeIterator.fetchNext();
logResult(
"initial specific point in time scenario should be empty while it finds the right continuation",
[],
shouldBeEmpty.map(v => parseInt(v.id))
);
// Second page should have results
const { result: specificPointInTimeResults } = await specificPointInTimeIterator.executeNext();
const { result: specificPointInTimeResults } = await specificPointInTimeIterator.fetchNext();
logResult(
"second specific point in time scenario should have caught up now",
[2, 3],
specificPointInTimeResults.map(v => parseInt(v.id))
);
const { result: fromBeginningResults } = await fromBeginningIterator.executeNext();
const { result: fromBeginningResults } = await fromBeginningIterator.fetchNext();
logResult("initial from beginning scenario", [1, 2, 3], fromBeginningResults.map(v => parseInt(v.id)));
const { result: fromNowResultsShouldBeEmpty } = await fromNowIterator.executeNext();
const { result: fromNowResultsShouldBeEmpty } = await fromNowIterator.fetchNext();
logResult("initial from now scenario should be empty", [], fromNowResultsShouldBeEmpty.map(v => parseInt(v.id)));
// Now they should all be caught up to the point after id=3, so if we insert a id=4, they should all get it.
@ -105,53 +102,29 @@ async function run() {
await container.items.create({ id: "4", pk });
console.log(" 👉 Inserting id=4 - all scenarios should see this");
const { result: specificContinuationResult2 } = await specificContinuationIterator.executeNext();
const { result: specificContinuationResult2 } = await specificContinuationIterator.fetchNext();
logResult(
"after insert, Specific Continuation scenario",
[4],
specificContinuationResult2.map(v => parseInt(v.id))
);
const { result: specificPointInTimeResults2 } = await specificPointInTimeIterator.executeNext();
const { result: specificPointInTimeResults2 } = await specificPointInTimeIterator.fetchNext();
logResult(
"after insert, specific point in time scenario",
[4],
specificPointInTimeResults2.map(v => parseInt(v.id))
);
const { result: fromBeginningResults2 } = await fromBeginningIterator.executeNext();
const { result: fromBeginningResults2 } = await fromBeginningIterator.fetchNext();
logResult("after insert, from beginning scenario", [4], fromBeginningResults2.map(v => parseInt(v.id)));
const { result: fromNowResults2 } = await fromNowIterator.executeNext();
const { result: fromNowResults2 } = await fromNowIterator.fetchNext();
logResult("after insert, from now scenario", [4], fromNowResults2.map(v => parseInt(v.id)));
} catch (err) {
handleError(err);
} finally {
await finish(container);
await finish();
}
}
async function init() {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container } = await database.containers.createIfNotExists({
id: containerId,
partitionKey: { kind: "Hash", paths: ["/pk"] }
});
return container;
}
async function handleError(error) {
console.log(`\nAn error with code '${error.code}' has occurred:`);
console.log(`\t${error}`);
}
async function finish(container) {
try {
await container.database.delete();
console.log("\nEnd of demo.");
} catch (err) {
console.log(`Database[${databaseId}] might not have deleted properly. You might need to delete it manually.`);
}
}
run().catch(handleError);

Просмотреть файл

Просмотреть файл

@ -1,11 +0,0 @@
{
"name": "cosmos-change-feed",
"version": "0.0.0",
"private": true,
"description": "A sample showing usage of the change feed in Cosmos DB",
"main": "app.js",
"dependencies": {},
"scripts": {
"start": "node app.js"
}
}

Просмотреть файл

@ -0,0 +1,33 @@
import { finish, handleError, logStep, logSampleHeader } from "./Shared/handleError";
import { CosmosClient } from "../dist";
import { database as databaseId, container as containerId, endpoint, key } from "./Shared/config";
logSampleHeader("Container Management");
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, key });
//ensuring a database exists for us to work with
async function run() {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
logStep(`Create container with id : ${containerId}`);
await database.containers.createIfNotExists({ id: containerId });
logStep("Read all containers in database");
const iterator = database.containers.readAll();
const { resources: containersList } = await iterator.fetchAll();
console.log(" --- Priting via iterator.fetchAll()");
console.log(containersList);
logStep("Read container definition");
const container = database.container(containerId);
const { resource: containerDef } = await container.read();
console.log(`Container with url "${container.url}" was found its id is "${containerDef.id}`);
logStep(`Delete container ${containerDef.id}`);
await container.delete();
await finish();
}
run().catch(handleError);

Просмотреть файл

@ -1,7 +0,0 @@
Samples for performing basic CRUD operations on an Azure Cosmos DB collection
- createCollection - given an id, create a new Collection with the default indexingPolicy
- listCollections - example of using the QueryIterator to get a list of Collections in a Database
- getOfferType - get the Offer.OfferType for a collection. This is what determines if a Collection is S1, S2, or S3
- modifyOfferType - change the Offer.OfferType for a collection. This is how you scale a Collection up or down
- deleteCollection - given just the collection id, delete the collection

Просмотреть файл

@ -1,80 +0,0 @@
// @ts-check
"use strict";
console.log();
console.log("Azure Cosmos DB Node.js Samples");
console.log("================================");
console.log();
console.log("container MANAGEMENT");
console.log("=====================");
console.log();
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const databaseId = config.names.database;
const containerId = config.names.container;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
//---------------------------------------------------------------------------------
// This demo performs a few steps
// 1. create container - given an id, create a new container with thedefault indexingPolicy
// 2. read all containers - example of using the QueryIterator to get a list of containers in a Database
// 3. read container - Read a container by its _self
// 4. delete container - given just the container id, delete the container
//---------------------------------------------------------------------------------
//ensuring a database exists for us to work with
async function run() {
const database = await init(databaseId);
//1.
console.log(`1. create container with id '${containerId}'`);
await database.containers.createIfNotExists({ id: containerId });
//2.
console.log("\n2. read all containers in database");
const iterator = database.containers.readAll();
const { result: containersList } = await iterator.toArray();
console.log(" --- Priting via iterator.toArray");
console.log(containersList);
//3.
console.log("\n3. read container definition");
const container = database.container(containerId);
const { body: containerDef } = await container.read();
console.log(`container with url '${container.url}' was found its id is '${containerDef.id}'`);
//4.
console.log(`\n4. deletecontainer '${containerId}'`);
await container.delete();
await finish(database);
}
async function init(databaseId) {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
return database;
}
async function handleError(error) {
console.log(`\nAn error with code '${error.code}' has occurred:`);
console.log("\t" + error);
await finish();
}
async function finish(database) {
try {
await database.delete();
console.log("\nEnd of demo.");
} catch (err) {
console.log(`Database[${databaseId}] might not have deleted properly. You might need to delete it manually.`);
}
}
run().catch(handleError);

Просмотреть файл

@ -1,11 +0,0 @@
{
"name": "cosmos-container-management",
"version": "0.0.0",
"private": true,
"description": "A sample showing managing containers in Cosmos DB",
"main": "app.js",
"dependencies": {},
"scripts": {
"start": "node app.js"
}
}

Просмотреть файл

@ -0,0 +1,35 @@
import { handleError, logStep, logSampleHeader, finish } from "./Shared/handleError";
import { CosmosClient } from "../dist";
import { endpoint, key, database as databaseId } from "./Shared/config";
import assert from "assert";
logSampleHeader("Database Management");
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, key });
async function run() {
logStep("Create database, if it doesn't already exist");
await client.databases.createIfNotExists({ id: databaseId });
console.log("Database with id " + databaseId + " created.");
logStep("Read all databases");
const { resources: dbDefList } = await client.databases.readAll().fetchAll();
console.log(dbDefList);
logStep("ReadDatabase with id '" + databaseId + "'");
const { resource: dbDef } = await client.database(databaseId).read();
// This uses Object deconstruction to just grab the body of the response,
// but you can also grab the whole response object to use
const databaseResponse = await client.database(databaseId).read();
const alsoDbDef = databaseResponse.resource;
assert.equal(dbDef.id, alsoDbDef.id); // The bodies will also almost be equal, _ts will defer based on the read time
// This applies for all response types, not just DatabaseResponse.
console.log("Database with id of " + dbDef.id + "' was found");
logStep("delete database with id '" + databaseId + "'");
await finish();
}
run().catch(handleError);

Просмотреть файл

@ -1,9 +0,0 @@
Samples for performing basic CRUD operations on an Azure Cosmos DB database
- createCollection - given an id, create a new Collectionwith thedefault indexingPolicy
- listCollections - example of using the QueryIterator to get a list of Collections in a Database
- readCollection - Read a collection by its _self
- readCollection - Read a collection by its id (using new ID Based Routing)
- getOfferType - get the Offer.OfferType for a collection. This is what determines if aCollection is S1, S2, or S3
- modifyOfferType - change the Offer.OfferType for a collection. This is how you scale a Collection up or down
- deleteCollection - given just the collection id, delete the collection

Просмотреть файл

@ -1,77 +0,0 @@
// @ts-check
"use strict";
console.log();
console.log("Azure Cosmos DB Node.js Samples");
console.log("================================");
console.log();
console.log("DATABASE MANAGEMENT");
console.log("===================");
console.log();
const assert = require("assert");
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const databaseId = config.names.database;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
//---------------------------------------------------------------------------------------------------
// This demo performs the following CRUD operations on a Database
//
// 1. create Database - If the database was not found, try create it
// 2. read all Databases - Once the database was created, list all the databases on the account
// 3. read Database - Read a database by its id
// 4. delete Database - Delete a database given its id
//
//---------------------------------------------------------------------------------------------------
async function run() {
// 1.
console.log(`\n1. Create database, if it doesn't already exist '${databaseId}'`);
await client.databases.createIfNotExists({ id: databaseId });
console.log("Database with id " + databaseId + " created.");
// 2.
console.log("\n2. Read all databases");
const { result: dbDefList } = await client.databases.readAll().toArray();
console.log(dbDefList);
// 3.
console.log(`\n3. readDatabase - with id '${databaseId}'`);
const { body: dbDef } = await client.database(databaseId).read();
// This uses Object deconstruction to just grab the body of the response,
// but you can also grab the whole response object to use
const databaseResponse = await client.database(databaseId).read();
const alsoDbDef = databaseResponse.body;
assert.equal(dbDef.id, alsoDbDef.id); // The bodies will also almost be equal, _ts will defer based on the read time
// This applies for all response types, not just DatabaseResponse.
console.log(`Database with id of ${dbDef.id}' was found`);
// 4.
console.log(`\n4. delete database with id '${databaseId}'`);
await client.database(databaseId).delete();
await finish();
}
function handleError(error) {
console.log();
console.log(`An error with code '${error.code}' has occurred:`);
console.log(`\t${error.body || error}`);
console.log();
finish();
}
function finish() {
console.log();
console.log("End of demo.");
}
run().catch(handleError);

Просмотреть файл

@ -1,11 +0,0 @@
{
"name": "cosmos-database-management",
"version": "0.0.0",
"private": true,
"description": "A sample showing managing databases in Cosmos DB",
"main": "app.js",
"dependencies": {},
"scripts": {
"start": "node app.js"
}
}

213
samples/IndexManagement.ts Normal file
Просмотреть файл

@ -0,0 +1,213 @@
import { logSampleHeader, handleError, finish, logStep } from "./Shared/handleError";
import { CosmosClient, IndexKind, DataType } from "../dist";
import { endpoint, key, database as databaseId, container as containerId } from "./Shared/config";
logSampleHeader("Index Management");
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, key });
async function run() {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
// We're using the default indexing policy because by default indexingMode == consistent & automatic == true
// which means that by default all items added to a container are indexed as the item is written
const { container, resource: containerDef } = await database.containers.createIfNotExists({ id: containerId });
logStep("Manually exclude an item from being indexed");
console.log("create container with default index policy");
// We're using the default indexing policy because by default indexingMode == consistent & automatic == true
// which means that by default all items added to a container are indexed as the item is written
// One of items.create() options is indexingDirectives which can be include, or exclude
// We're using exclude this time to manually exclude this item from being indexed
console.log("Create item, but exclude from index");
const { resource: itemDef, item } = await container.items.create(
{ id: "item1", foo: "bar" },
{ indexingDirective: "exclude" }
);
console.log("Item with id '" + itemDef.id + "' created");
const querySpec = {
query: "SELECT * FROM root r WHERE r.foo=@foo",
parameters: [
{
name: "@foo",
value: "bar"
}
]
};
console.log("Querying all items for the given item should not find any results");
const { resources: results } = await container.items.query(querySpec).fetchAll();
if (results.length !== 0) {
throw new Error("there were not meant to be results");
}
console.log("No results found");
console.log("item.read() should still find the item");
await item.read();
console.log("item.read() found item");
logStep("Switch auto indexing off, and manually index item");
console.log("Update container indexingPolicy.automatic : false");
const indexingPolicySpec = { automatic: false };
//Index transform is an async operation that is performed on a Container
//You can contiue to use the container while this is happening, but depending
//on the transform and your queries you may get inconsistent results as the index is updated
//In this case it will be almost instant because we only have one item
//but this can take some time on larger containers
await container.replace({
id: containerId,
partitionKey: containerDef.partitionKey,
indexingPolicy: indexingPolicySpec
});
// items.create() takes RequestOptions as 2nd parameter.
// One of these options is indexingDirectives which can be include, or exclude
// we're using include this time to manually index this particular item
console.log("Create item, and explicitly include in index");
const { resource: itemDef2 } = await container.items.create(
{ id: "item2", foo: "bar" },
{ indexingDirective: "include" }
);
console.log("Item with id '" + itemDef2.id + "' created");
console.log("Querying all items for a given item should find a result as it was indexed");
const { resources: results2 } = await container.items.query(querySpec).fetchAll();
if (results2.length === 0) {
throw new Error("There were meant to be results");
} else {
const itemDef = results2[0];
console.log("Item with id '" + itemDef.id + "' found");
}
logStep("Create a range index on string path");
// Azure Cosmos DB index knows about 3 datatypes - numbers, strings and geojson
// Next we are going to create a custom index policy which enables range index on a string path
console.log("update container with range index on string paths");
await container.replace({
id: containerId,
partitionKey: containerDef.partitionKey,
indexingPolicy: {
includedPaths: [
{
path: "/*",
indexes: [
{
kind: IndexKind.Range,
dataType: DataType.String
},
{
kind: IndexKind.Range,
dataType: DataType.Number
}
]
}
]
}
});
console.log("Container '" + containerDef.id + "' updated with new index policy");
//create an item
console.log("Creating item");
await container.items.create({ id: "item3", stringField: "a string value" });
console.log("Item created");
console.log("Querying for item where stringField > 'a', should return results");
//notice how we're switching to queryIterator.executeNext instead of calling .fetchAll() as before
//reason being, fetchAll will issue multiple requests to the server until it has fetched all results
//here we can control this using executeNext.
//now we can get the headers for each request which includes the charge, continuation tokens etc.
const queryIterator = container.items.query(
{
query: "SELECT * FROM root r WHERE r.stringField > @value",
parameters: [
{
name: "@value",
value: "a"
}
]
},
{ enableScanInQuery: true }
);
const { resources: items, requestCharge } = await queryIterator.fetchNext();
const itemDef3 = items[0];
console.log("Item '" + itemDef3.id + "' found, request charge: " + requestCharge);
logStep("Update index to exclude paths from indexing");
await container.replace({
id: containerId,
partitionKey: containerDef.partitionKey,
indexingPolicy: {
//the special "/" must always be included somewhere. in this case we're including root
//and then excluding specific paths
includedPaths: [
{
path: "/",
indexes: [
{
kind: IndexKind.Range,
dataType: DataType.Number,
precision: 2
}
]
}
],
excludedPaths: [
{
path: "/metaData/*"
}
]
}
});
console.log("Container '" + containerDef.id + "' updated with excludedPaths");
//create an item
console.log("Creating item");
const { item: item4 } = await container.items.create({
id: "item4",
metaData: "meta",
subDoc: {
searchable: "searchable",
subSubDoc: { someProperty: "value" }
}
});
console.log("Item created");
try {
//expecting an exception on this query due to the fact that it includes paths that
//have been excluded. If you want to force a scan, then enableScanInQuery
console.log("Querying for item where metaData = 'meta', should throw an exception");
const result = await container.items
.query({
query: "SELECT * FROM root r WHERE r.metaData = @value",
parameters: [
{
name: "@value",
value: "meta"
}
]
})
.fetchAll();
console.log(result.resources);
throw new Error("Should've produced an error");
} catch (err) {
if (err.code !== undefined) {
console.log("Threw, as expected");
} else {
throw err;
}
}
//You can still read the item by its id
console.log("Can still item.read() using '" + item4.id + "'");
await item.read();
await finish();
}
run().catch(handleError);

Просмотреть файл

@ -1,15 +0,0 @@
While Azure Cosmos DB automatically indexes all paths of all documents in a consistent manner, you have the ability to tweak and customize this
behavior should you need (or want) to.
Samples for working with Azure Cosmos DB IndexPolicy on a Collection
1. explictlyExcludeFromIndex - how to manually exclude a document from being indexed
2. useManualIndexing - switch auto indexing off, and then manually add individual docs
3. useLazyIndexing - create a collection with indexing mode set to Lazy instead of consistent
4. forceScanOnHashIndexPath - use a directive to allow a scan on a string path during a range operation
5. useRangeIndexOnStrings - create a range index on string path
6. excludePathsFromIndex - create a custom indexPolicy that excludes specific path in document
7. performIndexTransforms - create a collection with default indexPolicy, then update this online
8. waitForIndexTransforms - waits for index transform to complete by repeatedly doing a readCollection checking and checking headers

Просмотреть файл

@ -1,555 +0,0 @@
// @ts-check
console.log();
console.log("Azure CosmosDB Node.js Samples");
console.log("================================");
console.log();
console.log("INDEX MANAGEMENT");
console.log("================");
console.log();
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const fs = require("fs");
const databaseId = config.names.database;
const containerId = config.names.container;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
//IMPORTANT:
//this sample creates and delete containers at least 7 times.
//each time you execute containers.create() you are charged for 1hr (our smallest billing unit)
//even if that container is only alive for a few seconds.
//so please take note of this before running this sample
//TODO: Now that index transforms exist, refactor to create only 1 container and just reuse each time
//NOTE:
//when using the new IDBased Routing URIs, instead of the _self, as we 're doing in this sample
//ensure that the URI does not end with a trailing '/' character
//so dbs/databaseId instead of dbs/databaseId/
//also, ensure there is no leading space
//-----------------------------------------------------------------------------------------------------------
// This demo performs a few steps
// 1. explictlyExcludeFromIndex - how to manually exclude an item from being indexed
// 2. useManualIndexing - switch auto indexing off, and then manually add individual items
// 3. useLazyIndexing - create a container with indexing mode set to Lazy instead of consistent
// 4. forceScanOnHashIndexPath - use a directive to allow a scan on a string path during a range operation
// 5. useRangeIndexOnStrings - create a range index on string path
// 6. excludePathsFromIndex - create a custom indexPolicy that excludes specific path in an item
// 7. performIndexTransforms - create a container with default indexPolicy, then update this online
//------------------------------------------------------------------------------------------------------------
async function run() {
// Gets a database for us to work with
const { database } = await init();
//1.
console.log("\n1.");
console.log("explictlyExcludeFromIndex - manually exclude an item from being indexed");
await explictlyExcludeFromIndex(database);
//2.
console.log("\n2.");
console.log("useManualIndexing - switch auto indexing off, and manually index item");
await useManualIndexing(database);
//3.
console.log("\n3.");
console.log("useLazyIndexing - create container lazy index");
await useLazyIndexing(database);
//4.
console.log("\n4.");
console.log("forceScanOnHashIndexPath - use index directive to allow range scan on path without range index");
await forceScanOnHashIndexPath(database);
//5.
console.log("\n5.");
console.log("useRangeIndexOnStrings - create a range index on string path");
await useRangeIndexOnStrings(database);
//6.
console.log("\n6.");
console.log("excludePathsFromIndex - create a range index on string path");
await excludePathsFromIndex(database);
//7.
console.log("\n7.");
console.log("performIndexTransforms - update an index policy online");
await performIndexTransforms(database);
await finish();
}
async function init(callback) {
return client.databases.createIfNotExists({ id: databaseId });
}
/**
*
* @param {cosmos.Database} database
*/
async function explictlyExcludeFromIndex(database) {
console.log("create container with default index policy");
//we're using the default indexing policy because by default indexingMode == consistent & automatic == true
//which means that by default all items added to a container are indexed as the item is written
const containerId = "ExplictExcludeDemo";
const { body: containerDef, container } = await database.containers.create({ id: containerId });
const itemSpec = { id: "item1", foo: "bar" };
console.log("Create item, but exclude from index");
//items.create() takes RequestOptions as 3rd parameter.
//One of these options is indexingDirectives which can be include, or exclude
//we're using exclude this time to manually exclude this item from being indexed
const { body: itemDef, item } = await container.items.create(itemSpec, { indexingDirective: "exclude" });
console.log(`Item with id '${itemDef.id}' created`);
const querySpec = {
query: "SELECT * FROM root r WHERE r.foo=@foo",
parameters: [
{
name: "@foo",
value: "bar"
}
]
};
console.log("Querying all items for the given item should not find any results");
const { result: results } = await container.items.query(querySpec).toArray();
if (results.length !== 0) {
throw new Error("there were not meant to be results");
}
console.log("No results found");
console.log("item.read() should still find the item");
const { body: readItemDef } = await item.read();
console.log(`item.read() found item and its _self is '${readItemDef._self}'`);
await container.delete();
console.log(`Container '${containerId}' deleted`);
}
/**
*
* @param {cosmos.Database} database
*/
async function useManualIndexing(database) {
console.log("create container with indexingPolicy.automatic : false");
const containerId = "ManualIndexDemo";
const indexingPolicySpec = { automatic: false };
const { container } = await database.containers.create({
id: containerId,
indexingPolicy: indexingPolicySpec
});
// items.create() takes RequestOptions as 2nd parameter.
// One of these options is indexingDirectives which can be include, or exclude
// we're using include this time to manually index this particular item
console.log("Create item, and explicitly include in index");
const itemSpec = { id: "item1", foo: "bar" };
const { body: itemDef } = await container.items.create(itemSpec, { indexingDirective: "include" });
console.log("Item with id '" + itemDef.id + "' created");
const querySpec = {
query: "SELECT * FROM root r WHERE r.foo=@foo",
parameters: [
{
name: "@foo",
value: "bar"
}
]
};
console.log("Querying all items for a given item should find a result as it was indexed");
const { result: results } = await container.items.query(querySpec).toArray();
if (results.length === 0) {
throw new Error("There were meant to be results");
} else {
const itemDef = results[0];
console.log("Item with id '" + itemDef.id + "' found");
await container.delete();
console.log("Container '" + containerId + "' deleted");
}
}
/**
*
* @param {cosmos.Database} database
*/
async function useLazyIndexing(database) {
// Azure Cosmos DB offers synchronous (consistent) and asynchronous (lazy) index updates.
// By default, the index is updated synchronously on each insert, replace or delete of a item to the container.
// There are times when you might want to configure certain containers to update their index asynchronously.
// Lazy indexing boosts the write performance and lowers RU charge of each insert
// and is ideal for bulk ingestion scenarios for primarily read-heavy containers
// It is important to note that you might get inconsistent reads whilst the writes are in progress,
// However once the write volume tapers off and the index catches up, then the reads continue as normal
// It is difficult to demonstrate this is a code sample as you only really notice this under sustained
// heavy-write workloads. So this code sample shows just how to create the custom index polixy needed
console.log("create container with indexingPolicy.indexMode : lazy");
// allowed values for IndexingMode are consistent (default), lazy and none
const containerId = "LazyIndexDemo";
/** @type cosmos.DocumentBase.IndexingPolicy */
const indexingPolicySpec = { indexingMode: cosmos.DocumentBase.IndexingMode.lazy };
// You can also set the indexing policy Mode via string
indexingPolicySpec.indexingMode = "lazy";
const { body: containerDef, container } = await database.containers.create({
id: containerId,
indexingPolicy: indexingPolicySpec
});
console.log("Container '" + containerDef.id + "' created with index policy: ");
console.log(containerDef.indexingPolicy);
await container.delete();
console.log("Container '" + containerId + "' deleted");
}
/**
*
* @param {cosmos.Database} database
*/
async function forceScanOnHashIndexPath(database) {
// Azure Cosmos DB index knows about 3 datatypes - numbers, strings and geojson
// By default, the index on a container does not put range indexes on to string paths
// Therefore, if you try and do a range operation on a string path with a default index policy, you will get an error
// You can override this by using an request option, that is what this demonstrates
// NOTE - it is not recommended to do this often due to the high charge associated with a full container scan
// if you find yourself doing this often on a particular path, create a range index for strings on that path
console.log("create container with default index policy");
const containerId = "ForceScanDemo";
const { body: containerDef, container } = await database.containers.create({ id: containerId });
console.log("Container '" + containerDef.id + "' created with default index policy (i.e. no range on strings)");
//create an item
console.log("Creating item");
await container.items.create({ id: "item1", stringField: "a string value" });
console.log("Item created");
//try a range query on the item, expect an error
const querySpec = {
query: "SELECT * FROM root r WHERE r.stringField > @value",
parameters: [
{
name: "@value",
value: "a"
}
]
};
console.log("Querying for item where stringField > 'a', should fail");
try {
await container.items.query(querySpec).toArray();
} catch (err) {
console.log("Query failed with " + err.code);
}
//try same range query again, this time specifying the directive to do a scan,
//be wary of high RU cost that you could get for even a single item!
//we won't particularly see a high charge this time because there is only 1 item in the container
//so a scan on 1 item isn't costly. a few thousand items will be very different
console.log("Repeating query for item where stringField > 'a', this time with enableScanInQuery: true");
//notice how we're switching to queryIterator.executeNext instead of calling .toArray() as before
//reason being, toArray will issue multiple requests to the server until it has fetched all results
//here we can control this using executeNext.
//now we can get the headers for each request which includes the charge, continuation tokens etc.
const queryIterator = container.items.query(querySpec, { enableScanInQuery: true });
const { result: items, headers } = await queryIterator.executeNext();
const charge = headers["x-ms-request-charge"];
const itemDef = items[0];
console.log("Item '" + itemDef.id + "' found, request charge: " + charge);
await container.delete();
console.log("Container '" + containerId + "' deleted");
}
/**
*
* @param {cosmos.Database} database
*/
async function useRangeIndexOnStrings(database) {
// Azure Cosmos DB index knows about 3 datatypes - numbers, strings and geojson
// By default, the index on a container does not put range indexes on to string paths
// In this demo we are going to create a custom index policy which enables range index on a string path
console.log("create container with range index on string paths");
const containerId = "RangeIndexDemo";
/**
* @type cosmos.DocumentBase.IndexingPolicy
*/
const indexPolicySpec = {
includedPaths: [
{
path: "/*",
indexes: [
{
kind: cosmos.DocumentBase.IndexKind.Range,
dataType: cosmos.DocumentBase.DataType.String
},
{
kind: cosmos.DocumentBase.IndexKind.Range,
dataType: cosmos.DocumentBase.DataType.Number
}
]
}
]
};
const { body: containerDef, container } = await database.containers.create({
id: containerId,
indexingPolicy: indexPolicySpec
});
console.log("Container '" + containerDef.id + "' created with custom index policy");
//create an item
console.log("Creating item");
await container.items.create({ id: "item1", stringField: "a string value" });
console.log("Item created");
//try a range query on the item, expect an error
const querySpec = {
query: "SELECT * FROM root r WHERE r.stringField > @value",
parameters: [
{
name: "@value",
value: "a"
}
]
};
console.log("Querying for item where stringField > 'a', should return results");
//notice how we're switching to queryIterator.executeNext instead of calling .toArray() as before
//reason being, toArray will issue multiple requests to the server until it has fetched all results
//here we can control this using executeNext.
//now we can get the headers for each request which includes the charge, continuation tokens etc.
const queryIterator = container.items.query(querySpec, { enableScanInQuery: true });
const { result: items, headers } = await queryIterator.executeNext();
const charge = headers["x-ms-request-charge"];
const itemDef = items[0];
console.log("Item '" + itemDef.id + "' found, request charge: " + charge);
await container.delete();
console.log("Container '" + containerId + "' deleted");
}
/**
*
* @param {cosmos.Database} database
*/
async function excludePathsFromIndex(database) {
console.log("create container with an excluded path");
const containerId = "ExcludePathDemo";
const indexPolicySpec = {
//the special "/" must always be included somewhere. in this case we're including root
//and then excluding specific paths
includedPaths: [
{
path: "/",
indexes: [
{
kind: cosmos.DocumentBase.IndexKind.Hash,
dataType: cosmos.DocumentBase.DataType.Number,
precision: 2
}
]
}
],
excludedPaths: [
{
path: "/metaData/*"
}
]
};
const { body: containerDef, container } = await database.containers.create({
id: containerId,
indexingPolicy: indexPolicySpec
});
console.log("Container '" + containerDef.id + "' created with excludedPaths");
const itemId = "item1";
const itemSpec = {
id: itemId,
metaData: "meta",
subDoc: {
searchable: "searchable",
subSubDoc: { someProperty: "value" }
}
};
//create an item
console.log("Creating item");
const { item } = await container.items.create(itemSpec);
console.log("Item created");
//try a query on an excluded property, expect no results
const querySpec = {
query: "SELECT * FROM root r WHERE r.metaData = @value",
parameters: [
{
name: "@value",
value: "meta"
}
]
};
try {
//expecting an exception on this query due to the fact that it includes paths that
//have been excluded. If you want to force a scan, then enableScanInQuery like we did in forceScanOnHashIndexPath()
console.log("Querying for item where metaData = 'meta', should throw an exception");
await container.items.query(querySpec).toArray();
throw new Error("Should've produced an error");
} catch (err) {
if (err.code !== undefined) {
console.log("Threw, as expected");
} else {
throw err;
}
} //show that you can still read the item by its id
console.log("Can still item.read() using '" + item.id + "'");
const { body: itemDef } = await item.read();
console.log("Item '" + item.id + "' read and it's _self is '" + itemDef._self + "'");
await container.delete();
console.log("Container '" + containerId + "' deleted");
}
/**
*
* @param {cosmos.Database} database
*/
async function performIndexTransforms(database) {
//create container with default index policy
console.log("Creating container with default index policy (i.e. no range on strings)");
const containterId = "IndexTransformsDemo";
const { body: containerDef, container } = await database.containers.create({ id: containterId });
console.log("Container '" + containerDef.id + "' created");
//create item
const itemSpec = {
id: "item1",
stringField: "a string"
};
console.log("Creating item");
const { body: itemDef, item } = await container.items.create(itemSpec);
console.log("Item with id '" + itemDef.id + "' created");
//define a new indexPolicy which includes Range on all string paths (and Hash on all numbers)
const indexPolicySpec = {
includedPaths: [
{
path: "/*",
indexes: [
{
kind: "Range",
dataType: "String"
},
{
kind: "Range",
dataType: "Number"
}
]
}
]
};
const containerSpec = { id: containterId };
containerSpec.indexingPolicy = indexPolicySpec;
//container.replace() to update the indexPolicy
await container.replace(containerSpec);
console.log("Waiting for index transform to be completed");
//Index transform is an async operation that is performed on a Container
//You can contiue to use the container while this is happening, but depending
//on the transform and your queries you may get inconsistent results as the index is updated
//Here, we'll just wait for index transform to complete.
//this will be almost instant because we only have one item
//but this can take some time on larger containers
await waitForIndexTransformToComplete(container);
console.log("Index transform completed");
const querySpec = {
query: "SELECT * FROM root r WHERE r.stringField > @value",
parameters: [
{
name: "@value",
value: "a"
}
]
};
// Querying all items doing a range operation on a string (this would've failed without the transform)
const { result: results } = await container.items.query(querySpec).toArray();
if (results.length == 0) {
throw new Error("Should've found an item");
} else {
const queryDoc = results[0];
console.log("Item with id '" + queryDoc.id + "' found");
}
}
async function sleep(timeMS) {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve();
}, timeMS);
});
}
/**
* To figure out the progress of and index transform,
do a container read and check the header property of the response.
The headers container includes a header that indicates progress between 0 and 100
* @param {cosmos.Container} container
*/
async function waitForIndexTransformToComplete(container) {
// To figure out the progress of and index transform,
// do a container.read() and check the 3rd parameter of the callback
// The headers container includes a header that indicates progress between 0 and 100
let progress = 0;
let count = 0;
while (progress >= 0 && progress < 100) {
console.log("Reading container");
const { headers } = await container.read();
progress = headers["x-ms-documentdb-collection-index-transformation-progress"];
console.log("Progress is currently " + progress);
console.log("Waiting for 100ms");
await sleep(100);
}
console.log("Done waiting, progress == 100");
}
async function handleError(error) {
console.log(`\nAn error with code '${error.code}' has occurred:`);
console.log("\t" + error.body || error);
await finish();
}
async function finish() {
await client.database(databaseId).delete();
console.log("\nEnd of demo.");
}
run().catch(handleError);

Просмотреть файл

@ -1,10 +0,0 @@
{
"name": "cosmosdb-index-management",
"version": "0.0.0",
"private": true,
"description": "A Sample to explain the many ways to set indexes on Azure Cosmos DB",
"scripts": {
"start": "node app.js"
},
"dependencies": {}
}

142
samples/ItemManagement.ts Normal file
Просмотреть файл

@ -0,0 +1,142 @@
import { logSampleHeader, handleError, finish, logStep } from "./Shared/handleError";
import { CosmosClient } from "../dist";
import { endpoint, key, database as databaseId, container as containerId } from "./Shared/config";
import { readFileSync } from "fs";
logSampleHeader("Item Management");
const itemDefs = JSON.parse(readFileSync("./Shared/Data/Families.json", "utf8")).Families;
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, key });
async function run() {
//ensuring a database & container exists for us to work with
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container } = await database.containers.createIfNotExists({ id: containerId });
logStep("Insert items in to database '" + databaseId + "' and container '" + containerId + "'");
await Promise.all(itemDefs.map((itemDef: any) => container.items.create(itemDef)));
console.log(itemDefs.length + " items created");
logStep("List items in container '" + container.id + "'");
const { resources: itemDefList } = await container.items.readAll().fetchAll();
for (const itemDef of itemDefList) {
console.log(itemDef.id);
}
const item = container.item(itemDefList[0].id, undefined);
logStep("Read item '" + item.id + "'");
const { resource: readDoc } = await item.read();
console.log("item with id '" + item.id + "' found");
logStep("Read item with AccessCondition and no change to _etag");
const { resource: item2, headers } = await item.read({
accessCondition: { type: "IfNoneMatch", condition: readDoc._etag }
});
if (!item2 && headers["content-length"] == 0) {
console.log(
"As expected, no item returned. This is because the etag sent matched the etag on the server. i.e. you have the latest version of the item already"
);
}
//if we someone else updates this item, its etag on the server would change.
//repeating the above read with the old etag would then get a item in the response
readDoc.foo = "bar";
await item.replace(readDoc);
const { resource: item3, headers: headers3 } = await item.read({
accessCondition: { type: "IfNoneMatch", condition: readDoc._etag }
});
if (!item3 && headers3["content-length"] === 0) {
throw "Expected item this time. Something is wrong!";
} else {
console.log("This time the read request returned the item because the etag values did not match");
}
const querySpec = {
query: "SELECT * FROM Families f WHERE f.lastName = @lastName",
parameters: [
{
name: "@lastName",
value: "Andersen"
}
]
};
logStep("Query items in container '" + container.id + "'");
const { resources: results } = await container.items.query(querySpec).fetchAll();
if (results.length == 0) {
throw "No items found matching";
} else if (results.length > 1) {
throw "More than 1 item found matching";
}
const person = results[0];
console.log("The '" + person.id + "' family has lastName '" + person.lastName + "'");
console.log("The '" + person.id + "' family has " + person.children.length + " children '");
//add a new child to this family, and change the family's lastName
const childDef = {
firstName: "Newborn",
gender: "unknown",
fingers: 10,
toes: 10
};
person.children.push(childDef);
person.lastName = "Updated Family";
logStep("Replace item with id '" + item.id + "'");
const { resource: updatedPerson } = await container.items.upsert(person);
console.log("The '" + person.id + "' family has lastName '" + updatedPerson.lastName + "'");
console.log("The '" + person.id + "' family has " + updatedPerson.children.length + " children '");
logStep("Trying to replace item when item has changed in the database");
// The replace item above will work even if there's a new version of item on the server from what you originally read
// If you want to prevent this from happening you can opt-in to a conditional update
// Using accessCondition and etag you can specify that the replace only occurs if the etag you are sending matches the etag on the server
// i.e. Only replace if the item hasn't changed
// let's go update item
person.foo = "bar";
await item.replace(person);
// now let's try another update to item with accessCondition and etag set
person.foo = "should never get set";
try {
await item.replace(person, { accessCondition: { type: "IfMatch", condition: person._etag } });
throw new Error("This should have failed!");
} catch (err) {
if (err.code == 412) {
console.log("As expected, the replace item failed with a pre-condition failure");
} else {
throw err;
}
}
const upsertSource = itemDefList[1];
logStep(`Upserting person ${upsertSource.id} with id ${upsertSource.id}...`);
// a non-identity change will cause an update on upsert
upsertSource.foo = "baz";
const { resource: upsertedPerson1 } = await container.items.upsert(upsertSource);
console.log(`Upserted ${upsertedPerson1.id} to id ${upsertedPerson1.id}.`);
// an identity change will cause an insert on upsert
upsertSource.id = "HazzardFamily";
const { resource: upsertedPerson2 } = await container.items.upsert(upsertSource);
console.log(`Upserted ${upsertedPerson2.id} to id ${upsertedPerson2.id}.`);
if (upsertedPerson1.id === upsertedPerson2.id)
throw new Error("These two upserted records should have different resource IDs.");
logStep("Delete item '" + item.id + "'");
await item.delete();
await finish();
}
run().catch(handleError);

Просмотреть файл

@ -1,8 +0,0 @@
Samples for performing basic CRUD operations on Azure Cosmos DB documents
- createDocuments - Insert some documents in to collection
- listDocuments - Read the document feed for a collection
- readDocument - Read a single document by its id
- queryDocuments - Query for documents by some property
- replaceDocument - Update some properties and replace the document
- deleteDocument - Given a document id, delete it

Просмотреть файл

@ -1,198 +0,0 @@
// @ts-check
console.log();
console.log("Azure Cosmos DB Node.js Samples");
console.log("================================");
console.log();
console.log("ITEM MANAGEMENT");
console.log("===================");
console.log();
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const fs = require("fs");
const databaseId = config.names.database;
const containerId = config.names.container;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
const getItemDefinitions = function() {
const data = fs.readFileSync("../Shared/Data/Families.json", "utf8");
return JSON.parse(data).Families;
};
// Establish a new instance of the CosmosClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
//-------------------------------------------------------------------------------------------------------
// This demo performs a few steps
// 1. create items - Insert some items in to container
// 2. list items - Read the item feed for a container
// 3. read item
// 3.1 - Read a single item by its id
// 3.2 - Use ETag and AccessCondition to only return a item if ETag does not match
// 4. query items - Query for items by some property
// 5. replace item
// 5.1 - Update some properties and replace the item
// 5.2 - Use ETag and AccessCondition to only replace item if it has not changed
// 6. upsert item - Update a item if it exists, else create new item
// 7. delete item - Given a item id, delete it
//-------------------------------------------------------------------------------------------------------
async function run() {
//ensuring a database & container exists for us to work with
const { container, database } = await init();
//1.
console.log(`\n1. insert items in to database '${databaseId}' and container '${containerId}'`);
const promises = getItemDefinitions().map(itemDef => container.items.create(itemDef));
const items = await Promise.all(promises);
console.log(`${items.length} items created`);
//2.
console.log(`\n2. list items in container '${container.id}'`);
const { result: itemDefList } = await container.items.readAll().toArray();
itemDefList.forEach(({ id }) => console.log(id));
//3.1
const item = container.item(itemDefList[0].id);
console.log(`\n3.1 read item '${item.id}'`);
const { body: readDoc } = await item.read();
console.log(`item with id '${item.id}' found`);
//3.2
console.log("\n3.2 read item with AccessCondition and no change to _etag");
const { body: item2, headers } = await item.read({
accessCondition: { type: "IfNoneMatch", condition: readDoc._etag }
});
if (!item2 && headers["content-length"] == 0) {
console.log(
"As expected, no item returned. This is because the etag sent matched the etag on the server. i.e. you have the latest version of the item already"
);
}
//if we someone else updates this item, its etag on the server would change.
//repeating the above read with the old etag would then get a item in the response
readDoc.foo = "bar";
await item.replace(readDoc);
const { body: item3, headers: headers3 } = await item.read({
accessCondition: { type: "IfNoneMatch", condition: readDoc._etag }
});
if (!item3 && headers3["content-length"] === 0) {
throw "Expected item this time. Something is wrong!";
} else {
console.log("This time the read request returned the item because the etag values did not match");
}
//4.
const querySpec = {
query: "SELECT * FROM Families f WHERE f.lastName = @lastName",
parameters: [
{
name: "@lastName",
value: "Andersen"
}
]
};
console.log(`\n4. query items in container '${container.id}'`);
const { result: results } = await container.items.query(querySpec).toArray();
if (results.length == 0) {
throw "No items found matching";
} else if (results.length > 1) {
throw "More than 1 item found matching";
}
const person = results[0];
console.log(`The '${person.id}' family has lastName '${person.lastName}'`);
console.log(`The '${person.id}' family has ${person.children.length} children '`);
//add a new child to this family, and change the family's lastName
const childDef = {
firstName: "Newborn",
gender: "unknown",
fingers: 10,
toes: 10
};
person.children.push(childDef);
person.lastName = "Updated Family";
//5.1
console.log(`\n5.1 replace item with id '${item.id}'`);
const { body: updatedPerson } = await item.replace(person);
console.log(`The '${person.id}' family has lastName '${updatedPerson.lastName}'`);
console.log(`The '${person.id}' family has ${updatedPerson.children.length} children '`);
// 5.2
console.log("\n5.2 trying to replace item when item has changed in the database");
// The replace item above will work even if there's a new version of item on the server from what you originally read
// If you want to prevent this from happening you can opt-in to a conditional update
// Using accessCondition and etag you can specify that the replace only occurs if the etag you are sending matches the etag on the server
// i.e. Only replace if the item hasn't changed
// let's go update item
person.foo = "bar";
await item.replace(person);
// now let's try another update to item with accessCondition and etag set
person.foo = "should never get set";
try {
await item.replace(person, { accessCondition: { type: "IfMatch", condition: person._etag } });
throw new Error("This should have failed!");
} catch (err) {
if (err.code == 412) {
console.log("As expected, the replace item failed with a pre-condition failure");
} else {
throw err;
}
}
//6.
const upsertSource = itemDefList[1];
console.log(`6. Upserting person ${upsertSource.id} with _rid ${upsertSource._rid}...`);
// a non-identity change will cause an update on upsert
upsertSource.foo = "baz";
const { body: upsertedPerson1 } = await container.items.upsert(upsertSource);
console.log(`Upserted ${upsertedPerson1.id} to _rid ${upsertedPerson1._rid}.`);
// an identity change will cause an insert on upsert
upsertSource.id = "HazzardFamily";
const { body: upsertedPerson2 } = await container.items.upsert(upsertSource);
console.log(`Upserted ${upsertedPerson2.id} to _rid ${upsertedPerson2._rid}.`);
if (upsertedPerson1._rid === upsertedPerson2._rid)
throw new Error("These two upserted records should have different resource IDs.");
//7.
console.log("\n6. delete item '" + item.id + "'");
await item.delete();
await finish();
}
async function init() {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container } = await database.containers.createIfNotExists({ id: containerId });
return { database, container };
}
async function handleError(error) {
console.log(`\nAn error with code '${error.code}' has occurred:`);
console.log("\t" + error.body || error);
await finish();
}
async function finish() {
await client.database(databaseId).delete();
console.log("\nEnd of demo.");
}
run().catch(handleError);

Просмотреть файл

@ -1,10 +0,0 @@
{
"name": "cosmos-item-management",
"version": "0.0.0",
"private": true,
"description": "Sample showing how to do item management in Cosmos",
"dependencies": {},
"scripts": {
"start": "node app.js"
}
}

Просмотреть файл

@ -1,9 +1,7 @@
// tslint:disable:no-console
import { v4 as guid } from "uuid";
import { Container, CosmosClient, Item, ItemDefinition, ItemResponse, Items } from "../../lib";
import { ItemBody } from "../../lib/client/Item/ItemBody";
import { Constants, StatusCodes } from "../../lib/common";
import { CosmosClient, Item, ItemDefinition, Items, OperationType, Resource, StatusCodes } from "../../dist";
import logger from "./logger";
import lwwSprocDef from "./lwwSprocDef";
@ -48,7 +46,7 @@ export class ConflictWorker {
});
// See ./lwwSprocDef for the stored procedure definition include the logic
const { sproc: lwwSproc } = await udpContainer.storedProcedures.upsert(lwwSprocDef);
const { sproc: lwwSproc } = await udpContainer.scripts.storedProcedures.create(lwwSprocDef);
}
public async RunManualConflict() {
@ -132,7 +130,7 @@ export class ConflictWorker {
const [initialRegionName, initialClient] = this.clients.entries().next().value;
const container = initialClient.database(this.databaseName).container(this.manualContainerName);
const item = { regionId: 0, regionEndpoint: initialRegionName, ...itemBase }; // TODO: ReadEndpoint?
const { body: newItemDef } = await container.items.create(item);
const { resource: newItemDef } = await container.items.create(item);
await this.sleep(1000); // 1 second for the write to sync
@ -154,7 +152,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.manualContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -185,7 +183,7 @@ export class ConflictWorker {
const [initialRegionName, initialClient] = this.clients.entries().next().value;
const container = initialClient.database(this.databaseName).container(this.manualContainerName);
const item = { regionId: 0, regionEndpoint: initialRegionName, ...itemBase }; // TODO: ReadEndpoint?
const { body: newItemDef } = await container.items.create(item);
const { resource: newItemDef } = await container.items.create(item);
await this.sleep(1000); // 1 second for the write to sync
@ -202,7 +200,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.manualContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -237,10 +235,10 @@ export class ConflictWorker {
while (true) {
const container = client.database(this.databaseName).container(this.manualContainerName);
const { result: conflicts } = await container.conflicts.readAll().toArray();
const { resources: conflicts } = await container.conflicts.readAll().fetchAll();
for (const conflict of conflicts) {
if (conflict.operationType !== Constants.OperationTypes.Delete) {
if (conflict.operationType !== OperationType.Delete) {
const content = JSON.parse(conflict.content as any);
if (item.id !== content.id) {
continue;
@ -308,7 +306,7 @@ export class ConflictWorker {
const [initialRegionName, initialClient] = this.clients.entries().next().value;
const container = initialClient.database(this.databaseName).container(this.lwwContainerName);
const item = { regionId: 0, regionEndpoint: initialRegionName, ...itemBase }; // TODO: ReadEndpoint?
const { body: newItemDef } = await container.items.create(item);
const { resource: newItemDef } = await container.items.create(item);
await this.sleep(1000); // 1 second for the write to sync
@ -323,7 +321,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.lwwContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -350,7 +348,7 @@ export class ConflictWorker {
const [initialRegionName, initialClient] = this.clients.entries().next().value;
const container = initialClient.database(this.databaseName).container(this.lwwContainerName);
const item = { regionId: 0, regionEndpoint: initialRegionName, ...itemBase }; // TODO: ReadEndpoint?
const { body: newItemDef } = await container.items.create(item);
const { resource: newItemDef } = await container.items.create(item);
await this.sleep(1000); // 1 second for the write to sync
@ -366,7 +364,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.lwwContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -376,7 +374,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.lwwContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -409,7 +407,7 @@ export class ConflictWorker {
) {
const container = client.database(this.databaseName).container(this.lwwContainerName);
const { result: conflicts } = await container.conflicts.readAll().toArray();
const { resources: conflicts } = await container.conflicts.readAll().fetchAll();
if (conflicts.length !== 0) {
console.error(`Found ${conflicts.length} conflicts in the lww container`);
@ -419,7 +417,7 @@ export class ConflictWorker {
if (hasDeleteConflict) {
do {
try {
await container.item(items[0].id).read();
await container.item(items[0].id, undefined).read();
} catch (err) {
if (err.code === StatusCodes.NotFound) {
console.log(`Delete conflict won @ ${regionName}`);
@ -437,7 +435,7 @@ export class ConflictWorker {
while (true) {
try {
const { body: currentItem } = await container.item(winner.id).read();
const { resource: currentItem } = await container.item(winner.id, undefined).read();
if (currentItem.regionId === winner.regionId) {
console.log(`Winner document from region ${currentItem.regionId} found at ${regionName}`);
@ -488,7 +486,7 @@ export class ConflictWorker {
const [initialRegionName, initialClient] = this.clients.entries().next().value;
const container = initialClient.database(this.databaseName).container(this.udpContainerName);
const item = { regionId: 0, regionEndpoint: initialRegionName, ...itemBase }; // TODO: ReadEndpoint?
const { body: newItemDef } = await container.items.create(item);
const { resource: newItemDef } = await container.items.create(item);
await this.sleep(1000); // 1 second for the write to sync
@ -503,7 +501,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.udpContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -529,7 +527,7 @@ export class ConflictWorker {
const [initialRegionName, initialClient] = this.clients.entries().next().value;
const container = initialClient.database(this.databaseName).container(this.udpContainerName);
const item = { regionId: 0, regionEndpoint: initialRegionName, ...itemBase }; // TODO: ReadEndpoint?
const { body: newItemDef } = await container.items.create(item);
const { resource: newItemDef } = await container.items.create(item);
await this.sleep(1000); // 1 second for the write to sync
@ -545,7 +543,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.udpContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -555,7 +553,7 @@ export class ConflictWorker {
client
.database(this.databaseName)
.container(this.udpContainerName)
.item(itemBase.id),
.item(itemBase.id, undefined),
newDef
)
);
@ -588,7 +586,7 @@ export class ConflictWorker {
) {
const container = client.database(this.databaseName).container(this.udpContainerName);
const { result: conflicts } = await container.conflicts.readAll().toArray();
const { resources: conflicts } = await container.conflicts.readAll().fetchAll();
if (conflicts.length !== 0) {
console.error(`Found ${conflicts.length} conflicts in the udp container`);
@ -598,7 +596,7 @@ export class ConflictWorker {
if (hasDeleteConflict) {
do {
try {
const { body: shouldNotExist } = await container.item(items[0].id).read();
const { resource: shouldNotExist } = await container.item(items[0].id, undefined).read();
} catch (err) {
if (err.code === StatusCodes.NotFound) {
console.log(`Delete conflict won @ ${regionName}`);
@ -616,7 +614,7 @@ export class ConflictWorker {
while (true) {
try {
const { body: currentItem } = await container.item(winner.id).read();
const { resource: currentItem } = await container.item(winner.id, undefined).read();
if (currentItem.regionId === winner.regionId) {
console.log(`Winner document from region ${currentItem.regionId} found at ${regionName}`);
@ -633,9 +631,9 @@ export class ConflictWorker {
}
}
private async tryInsertItem(items: Items, newDef: ItemDefinition): Promise<ItemDefinition & ItemBody> {
private async tryInsertItem(items: Items, newDef: ItemDefinition): Promise<ItemDefinition> {
try {
return (await items.create(newDef)).body;
return (await items.create(newDef)).resource;
} catch (err) {
// Handle conflict error silently
if (err.code === StatusCodes.Conflict) {
@ -645,7 +643,7 @@ export class ConflictWorker {
}
}
private async tryUpdateItem(item: Item, newDef: ItemDefinition): Promise<ItemDefinition & ItemBody> {
private async tryUpdateItem(item: Item, newDef: ItemDefinition): Promise<ItemDefinition & Resource> {
const time = Date.now();
try {
return (await item.replace(newDef, {
@ -653,7 +651,7 @@ export class ConflictWorker {
type: "IfMatch",
condition: newDef._etag
}
})).body;
})).resource;
} catch (err) {
if (err.code === StatusCodes.PreconditionFailed || err.code === StatusCodes.NotFound) {
console.log(`${await item.container.database.client.getWriteEndpoint()} hit ${err.code} at ${time}`);
@ -667,7 +665,7 @@ export class ConflictWorker {
private async tryDeleteItem(item: Item, newDef: ItemDefinition): Promise<ItemDefinition> {
try {
const { body: deletedItem } = await item.delete({
const { resource: deletedItem } = await item.delete({
accessCondition: {
type: "IfMatch",
condition: newDef._etag
@ -686,10 +684,10 @@ export class ConflictWorker {
private async DeleteConflict(item: ItemDefinition) {
const client = this.clients.values().next().value;
const container = client.database(this.databaseName).container(this.manualContainerName);
const conflicts = await container.conflicts.readAll().toArray();
const conflicts = await container.conflicts.readAll().fetchAll();
for (const conflict of conflicts.result) {
if (conflict.operationType !== Constants.OperationTypes.Delete) {
for (const conflict of conflicts.resources) {
if (conflict.operationType !== OperationType.Delete) {
const content = JSON.parse(conflict.content);
if (content._rid === item._rid && content._etag === item._etag && content.regionId === item.regionId) {
console.log(`Deleting manual conflict ${conflict.resourceId} from region ${item.regionId}`);

Просмотреть файл

@ -1,8 +1,7 @@
import { ConnectionPolicy, ConsistencyLevel, CosmosClient } from "../../lib";
import { ConsistencyLevel, CosmosClient } from "../../dist";
import config from "./config";
import { ConflictWorker } from "./ConflictWorker";
import { Worker } from "./Worker";
// tslint:disable:no-console
export class MultiRegionWriteScenario {
private basicWorkers: Worker[] = [];
private conflictWorker: ConflictWorker;
@ -15,13 +14,12 @@ export class MultiRegionWriteScenario {
config.udpCollectionName
);
for (const region of config.regions) {
const connectionPolicy: ConnectionPolicy = new ConnectionPolicy();
connectionPolicy.UseMultipleWriteLocations = true;
connectionPolicy.PreferredLocations = [region];
const client = new CosmosClient({
endpoint: config.endpoint,
auth: { masterKey: config.key },
connectionPolicy,
key: config.key,
connectionPolicy: {
preferredLocations: [region]
},
consistencyLevel: ConsistencyLevel.Eventual
});
this.conflictWorker.addClient(region, client);

Просмотреть файл

@ -1,5 +1,5 @@
import { v4 as guid } from "uuid";
import { Container, CosmosClient } from "../../lib";
import { Container } from "../../dist";
// tslint:disable:no-console
export class Worker {
@ -23,7 +23,7 @@ export class Worker {
public async ReadAll(expectedNumberOfItems: number) {
while (true) {
const { result: items } = await this.container.items.readAll().toArray();
const { resources: items } = await this.container.items.readAll().fetchAll();
if (items.length < expectedNumberOfItems) {
console.log(
`Total item read ${items.length} from ${
@ -40,9 +40,9 @@ export class Worker {
}
public async DeleteAll() {
const { result: items } = await this.container.items.readAll().toArray();
const { resources: items } = await this.container.items.readAll().fetchAll();
for (const item of items) {
await this.container.item(item.id).delete();
await this.container.item(item.id, undefined).delete();
}
console.log(`Deleted all documents from region ${this.regionName}`);
}

Просмотреть файл

@ -1,4 +1,4 @@
import { StoredProcedureDefinition } from "../../lib";
import { StoredProcedureDefinition } from "../../dist-esm";
const lwwSprocDef: StoredProcedureDefinition = {
id: "resolver",

Просмотреть файл

@ -1,17 +0,0 @@
# Multi-Region Write
This demo shows off writing to multiple regions at the same time. It also demos different conflict handling scenarios.
## Quick start
1. Install packages: `npm i`
2. Set environment variables
1. endpoint - the endpoint url
2. key - the masterkey for the account
3. regions - a semicolon deliminated list of regions (aka westus;eastus)
4. There are additional config options in the config.ts file, but they are not required.
3. Start: `npm start`
## Debugging with VS Code
There is a launch.json config named "MultiRegionWrite Debug" which you can use to attach via VS Code.

2
samples/MultiRegionWrite/types.d.ts поставляемый Normal file
Просмотреть файл

@ -0,0 +1,2 @@
declare module "ora";
declare module "uuid";

Просмотреть файл

@ -1,3 +0,0 @@
Our server-side javascript samples have moved to:
https://github.com/Azure/azure-documentdb-js-server/tree/master/samples

Просмотреть файл

@ -1 +0,0 @@
Samples for creating and executing ServerSide Scripts such as Stored Procedures, Triggers and User Defined Functions

Просмотреть файл

@ -1,66 +0,0 @@
// @ts-check
console.log();
console.log("Azure Cosmos DB Node.js Samples");
console.log("================================");
console.log();
console.log("SERVER SIDE SCRIPTS");
console.log("===================");
console.log();
/*jshint node:true */
("use strict");
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const fs = require("fs");
const databaseId = config.names.database;
const containerId = config.names.container;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
// Establish a new instance of the DocumentDBClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
// Path to stored procedure definition
const sprocDefinition = require("./JS/upsert");
// Execute the stored procedure with the following parameters.
const sprocParams = [
{
id: "myDocument",
foo: "bar"
}
];
async function run() {
const { database } = await client.databases.create({ id: databaseId });
const { container } = await database.containers.create({ id: containerId });
console.log("Upserting the sproc: '" + sprocDefinition.id + "'");
// Query for the stored procedure.
const { sproc, body: sprocDef } = await container.storedProcedures.upsert(sprocDefinition);
console.log("Executing the sproc: '" + sproc.id + "'");
console.log("Sproc parameters: " + JSON.stringify(sprocParams));
const { body: results, headers } = await sproc.execute(sprocParams);
console.log("//////////////////////////////////");
if (headers) {
console.log("// responseHeaders");
console.log(headers);
}
if (results) {
console.log("// results");
console.log(results);
}
console.log("//////////////////////////////////");
await database.delete();
console.log("Database and Collection DELETED");
console.log("Demo finished");
}
run().catch(console.error);

Просмотреть файл

@ -0,0 +1,49 @@
import { logSampleHeader, logStep, finish, handleError } from "../Shared/handleError";
import { endpoint, key, database as databaseId, container as containerId } from "../Shared/config";
import { CosmosClient } from "../../dist";
logSampleHeader("Server Side Scripts");
// Establish a new instance of the DocumentDBClient to be used throughout this demo
const client = new CosmosClient({ endpoint, key });
// Path to stored procedure definition
const sprocDefinition = require("./upsert");
// Execute the stored procedure with the following parameters.
const sprocParams = [
{
id: "myDocument",
foo: "bar"
}
];
async function run() {
const { database } = await client.databases.create({ id: databaseId });
const { container } = await database.containers.create({ id: containerId });
logStep("Creating the sproc: '" + sprocDefinition.id + "'");
// Query for the stored procedure.
const { sproc, resource: sprocDef } = await container.scripts.storedProcedures.create(sprocDefinition);
logStep("Executing the sproc: '" + sproc.id + "'");
console.log("Sproc parameters: " + JSON.stringify(sprocParams));
const { resource: results, headers } = await sproc.execute(undefined, sprocParams);
console.log("//////////////////////////////////");
if (headers) {
console.log("// responseHeaders");
console.log(headers);
}
if (results) {
console.log("// results");
console.log(results);
}
console.log("//////////////////////////////////");
await finish();
}
run().catch(handleError);

Просмотреть файл

@ -1,10 +0,0 @@
{
"name": "cosmos-serversidescripts-sample",
"private": true,
"version": "0.0.0",
"description": "A sample showing server side scripts with Azure Cosmos DB",
"scripts": {
"start": "node app.js"
},
"dependencies": {}
}

Просмотреть файл

@ -1,15 +0,0 @@
exports.connection = {
endpoint: process.env.COSMOS_SAMPLE_ENDPOINT || "https://localhost:8081/",
authKey:
process.env.COSMOS_SAMPLE_ENDPOINT ||
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
};
if (exports.connection.endpoint.includes("https://localhost")) {
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
exports.names = {
database: "NodeSamples",
container: "Data"
};

10
samples/Shared/config.ts Normal file
Просмотреть файл

@ -0,0 +1,10 @@
export const endpoint = process.env.COSMOS_ENDPOINT || "https://localhost:8081/";
export const key =
process.env.COSMOS_KEY || "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
if (endpoint.includes("https://localhost")) {
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
export const database = "NodeSamples";
export const container = "Data";

Просмотреть файл

@ -0,0 +1,34 @@
import { database, key, endpoint } from "./config";
import { CosmosClient } from "../../dist";
const client = new CosmosClient({ endpoint, key });
export async function handleError(error: any) {
console.log("\nAn error with code '" + error.code + "' has occurred:");
console.log(error);
await finish();
process.exitCode = 1;
}
export async function finish() {
try {
await client.database(database).delete();
console.log("\nEnd of demo.");
} catch (err) {
console.log(`Database: "${database}" might not have deleted properly. You might need to delete it manually.`);
process.exitCode = 1;
}
}
let currentStep = 0;
export function logStep(message: string) {
currentStep++;
console.log(`\n${currentStep}: ${message}`);
}
export function logSampleHeader(sampleName: string) {
console.log("Azure Cosmos DB Node.js Samples");
console.log("================================");
console.log(sampleName);
console.log("================================");
}

2
samples/TodoApp/.gitignore поставляемый
Просмотреть файл

@ -1,2 +0,0 @@
!config.js
!bin

21
samples/TodoApp/.vscode/launch.json поставляемый
Просмотреть файл

@ -1,21 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/bin/www",
"sourceMaps": true,
"outFiles": [
"${workspaceFolder}/../../lib/**"
],
"env": {
"NODE_TLS_REJECT_UNAUTHORIZED": "0"
}
}
]
}

Просмотреть файл

@ -1,72 +0,0 @@
const CosmosClient = require("../../").CosmosClient;
const config = require("./config");
const TaskList = require("./routes/tasklist");
const TaskDao = require("./models/taskDao");
const express = require("express");
const path = require("path");
const favicon = require("serve-favicon");
const logger = require("morgan");
const cookieParser = require("cookie-parser");
const bodyParser = require("body-parser");
const index = require("./routes/index");
const users = require("./routes/users");
const app = express();
// view engine setup
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "jade");
// uncomment after placing your favicon in /public
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger("dev"));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, "public")));
//Todo App:
const docDbClient = new CosmosClient({
endpoint: config.host,
auth: {
masterKey: config.authKey
}
});
const taskDao = new TaskDao(docDbClient, config.databaseId, config.collectionId);
const taskList = new TaskList(taskDao);
taskDao
.init(err => {
console.error(err);
})
.catch(err => {
console.error(err);
console.error("Shutting down");
process.exit(1);
});
app.get("/", (req, res, next) => taskList.showTasks(req, res).catch(next));
app.post("/addtask", (req, res, next) => taskList.addTask(req, res).catch(next));
app.post("/completetask", (req, res, next) => taskList.completeTask(req, res).catch(next));
app.set("view engine", "jade");
// catch 404 and forward to error handler
app.use(function(req, res, next) {
const err = new Error("Not Found");
err.status = 404;
next(err);
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get("env") === "development" ? err : {};
// render the error page
res.status(err.status || 500);
res.render("error");
});
module.exports = app;

Просмотреть файл

@ -1,90 +0,0 @@
#!/usr/bin/env node
/**
* Module dependencies.
*/
const app = require('../app');
const debug = require('debug')('todo:server');
const http = require('http');
/**
* Get port from environment and store in Express.
*/
const port = normalizePort(process.env.PORT || '3000');
app.set('port', port);
/**
* Create HTTP server.
*/
const server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
/**
* Normalize a port into a number, string, or false.
*/
function normalizePort(val) {
const port = parseInt(val, 10);
if (isNaN(port)) {
// named pipe
return val;
}
if (port >= 0) {
// port number
return port;
}
return false;
}
/**
* Event listener for HTTP server "error" event.
*/
function onError(error) {
if (error.syscall !== 'listen') {
throw error;
}
const bind = typeof port === 'string'
? 'Pipe ' + port
: 'Port ' + port;
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
console.error(bind + ' requires elevated privileges');
process.exit(1);
break;
case 'EADDRINUSE':
console.error(bind + ' is already in use');
process.exit(1);
break;
default:
throw error;
}
}
/**
* Event listener for HTTP server "listening" event.
*/
function onListening() {
const addr = server.address();
const bind = typeof addr === 'string'
? 'pipe ' + addr
: 'port ' + addr.port;
debug('Listening on ' + bind);
}

Просмотреть файл

@ -1,14 +0,0 @@
const config = {};
config.host = process.env.HOST || "https://localhost:8081/";
config.authKey =
process.env.AUTH_KEY || "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
config.databaseId = "ToDoList";
config.collectionId = "Items";
if (config.host.includes("https://localhost:")) {
console.log("WARNING: Disabled checking of self-signed certs. Do not have this code in production.");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
module.exports = config;

Просмотреть файл

@ -1,77 +0,0 @@
// @ts-check
const CosmosClient = require("../../../").CosmosClient;
class TaskDao {
/**
*
* @param {CosmosClient} cosmosClient
* @param {*} databaseId
* @param {*} containerId
*/
constructor(cosmosClient, databaseId, containerId) {
this.client = cosmosClient;
this.databaseId = databaseId;
this.collectionId = containerId;
this.database = null;
this.container = null;
}
async init() {
try {
const dbResponse = await this.client.databases.createIfNotExists({ id: this.databaseId });
this.database = dbResponse.database;
const coResponse = await this.database.containers.create({ id: this.collectionId });
this.container = coResponse.container;
} catch (err) {
throw err;
}
}
async find(querySpec) {
if (!this.container) {
throw new Error("Collection is not initialized.");
}
try {
const { result: results } = await this.container.items.query(querySpec).toArray();
return results;
} catch (err) {
throw err;
}
}
async addItem(item) {
item.date = Date.now();
item.completed = false;
try {
const { body: doc } = await this.container.items.create(item);
return doc;
} catch (err) {
throw err;
}
}
async updateItem(itemId) {
try {
const doc = await this.getItem(itemId);
doc.completed = true;
const { body: replaced } = await this.container.item(itemId).replace(doc);
return replaced;
} catch (err) {
throw err;
}
}
async getItem(itemId) {
try {
const { body } = await this.container.item(itemId).read();
return body;
} catch (err) {
throw err;
}
}
}
module.exports = TaskDao;

Просмотреть файл

@ -1,18 +0,0 @@
{
"name": "todo",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "set NODE_TLS_REJECT_UNAUTHORIZED=0 && node ./bin/www"
},
"dependencies": {
"async": "^2.1.2",
"body-parser": "~1.15.2",
"cookie-parser": "~1.4.3",
"debug": "~2.2.0",
"express": "~4.14.0",
"jade": "~1.11.0",
"morgan": "~1.7.0",
"serve-favicon": "~2.3.0"
}
}

Просмотреть файл

@ -1,17 +0,0 @@
body {
padding: 50px;
font: 14px "Lucida Grande", Helvetica, Arial, sans-serif;
}
a {
color: #00B7FF;
}
.well label {
display: block;
}
.well input {
margin-bottom: 5px;
}
.btn {
margin-top: 5px;
border: outset 1px #C8C8C8;
}

Просмотреть файл

@ -1,26 +0,0 @@
# Todo App
Sample Todo app
## Prereqs
- Build the SDK (see [dev.md](../../dev.md))
- Node 8 (uses async/await)
## Config
If you're using the local emulator with default config, it should work without setting any additionanl config
**Environment Variables**
- `host` - url for the Cosmos DB (default is https://localhost:8081)
- `AUTH_KEY` - master key for the Cosmos DB (default is the well known key for emulator)
- `PORT` - port for the web app (default is 3000)
## Run
```bash
npm i
npm start
```
open browser to http://localhost:3000

Просмотреть файл

@ -1,9 +0,0 @@
const express = require('express');
const router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Express' });
});
module.exports = router;

Просмотреть файл

@ -1,63 +0,0 @@
const CosmosClient = require('../../../').DocumentClient;
const TaskDao = require('../models/TaskDao');
const async = require('async');
class TaskList {
/**
*
* @param {TaskDao} taskDao
*/
constructor(taskDao) {
this.taskDao = taskDao;
}
async showTasks(req, res) {
const querySpec = {
query: 'SELECT * FROM root r WHERE r.completed=@completed',
parameters: [{
name: '@completed',
value: false
}]
};
try {
const items = await this.taskDao.find()
res.render('index', {
title: 'My ToDo List ',
tasks: items
});
} catch (err) {
throw err;
}
}
async addTask(req, res) {
const item = req.body;
try {
await this.taskDao.addItem(item);
res.redirect('/');
} catch (err) {
throw err;
}
}
async completeTask(req, res) {
const completedTasks = Object.keys(req.body);
const tasks = [];
try {
completedTasks.forEach((task) => {
tasks.push(this.taskDao.updateItem(task));
});
await Promise.all(tasks);
res.redirect('/');
} catch (err) {
throw err;
}
}
}
module.exports = TaskList;

Просмотреть файл

@ -1,9 +0,0 @@
const express = require('express');
const router = express.Router();
/* GET users listing. */
router.get('/', function(req, res, next) {
res.send('respond with a resource');
});
module.exports = router;

Просмотреть файл

@ -1,6 +0,0 @@
extends layout
block content
h1= message
h2= error.status
pre #{error.stack}

Просмотреть файл

@ -1,40 +0,0 @@
extends layout
block content
h1 #{title}
br
form(action="/completetask", method="post")
table.table.table-striped.table-bordered
tr
td Name
td Category
td Date
td Complete
if (typeof tasks === "undefined")
tr
td
else
each task in tasks
tr
td #{task.name}
td #{task.category}
- var date = new Date(task.date);
- var day = date.getDate();
- var month = date.getMonth() + 1;
- var year = date.getFullYear();
td #{month + "/" + day + "/" + year}
td
if(task.completed)
input(type="checkbox", disabled, name="#{task.id}", value="#{!task.completed}", checked=task.completed)
else
input(type="checkbox", name="#{task.id}", value="#{!task.completed}", checked=task.completed)
button.btn(type="submit") Update tasks
hr
form.well(action="/addtask", method="post")
label Item Name:
input(name="name", type="textbox")
label Item Category:
input(name="category", type="textbox")
br
button.btn(type="submit") Add item

Просмотреть файл

@ -1,13 +0,0 @@
doctype html
html
head
title= title
link(rel='stylesheet', href='//ajax.aspnetcdn.com/ajax/bootstrap/3.3.2/css/bootstrap.min.css')
link(rel='stylesheet', href='/stylesheets/style.css')
body
nav.navbar.navbar-inverse.navbar-fixed-top
div.navbar-header
a.navbar-brand(href='#') My Tasks
block content
script(src='//ajax.aspnetcdn.com/ajax/jQuery/jquery-1.11.2.min.js')
script(src='//ajax.aspnetcdn.com/ajax/bootstrap/3.3.2/bootstrap.min.js')

186
samples/UserManagement.ts Normal file
Просмотреть файл

@ -0,0 +1,186 @@
import { logSampleHeader, handleError, finish } from "./Shared/handleError";
import { Container, Permission, User, CosmosClient, PermissionMode } from "../dist";
import { endpoint, key, database as databaseId } from "./Shared/config";
logSampleHeader("User Management");
const container1Name = "COL1";
const container2Name = "COL2";
const user1Name = "Thomas Andersen";
const user2Name = "Robin Wakefield";
const item1Name = "item1";
const item2Name = "item2";
const item3Name = "item3";
// Establish a new instance of the DocumentDBClient to be used throughout this demo
const client = new CosmosClient({ endpoint, key });
async function run() {
//--------------------------------------------------------------------------------------------------
// We need a database, two containers, two users, and some permissions for this sample,
// So let's go ahead and set these up initially
//--------------------------------------------------------------------------------------------------
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container: container1 } = await database.containers.createIfNotExists({ id: container1Name });
const { container: container2 } = await database.containers.createIfNotExists({ id: container2Name });
let itemSpec = { id: item1Name };
let userDef = { id: user1Name };
let permissionDef;
const { resource: itemDef, item: item1 } = await container1.items.create(itemSpec);
console.log(item1Name + "Created in " + container1Name + " !");
itemSpec = { id: item2Name };
const { item: item2 } = await container1.items.create(itemSpec);
console.log(item2Name + "Created in " + container1Name + " !");
itemSpec = { id: item3Name };
const { item: item3 } = await container2.items.create(itemSpec);
console.log(item3Name + " Created in " + container2Name + " !");
const { user: user1 } = await database.users.create(userDef);
console.log(user1Name + " created!");
userDef = { id: user2Name };
const { user: user2 } = await database.users.create(userDef);
console.log(user2Name + " created!");
// Read Permission on container 1 for user1
permissionDef = { id: "p1", permissionMode: PermissionMode.Read, resource: container1.url };
const { permission: permission1 } = await user1.permissions.create(permissionDef);
console.log("Read only permission assigned to Thomas Andersen on container 1!");
permissionDef = { id: "p2", permissionMode: PermissionMode.All, resource: item1.url };
// All Permissions on Doc1 for user1
const { permission: permission2 } = await user1.permissions.create(permissionDef);
console.log("All permission assigned to Thomas Andersen on item 1!");
permissionDef = { id: "p3", permissionMode: PermissionMode.Read, resource: container2.url };
// Read Permissions on Col2 for user1
const { permission: permission3 } = await user1.permissions.create(permissionDef);
console.log("Read permission assigned to Thomas Andersen on container 2!");
permissionDef = { id: "p4", permissionMode: PermissionMode.All, resource: container2.url };
const { permission: permission4 } = await user2.permissions.create(permissionDef);
console.log("All permission assigned to Robin Wakefield on container 2!");
const { resources: permissions } = await user1.permissions.readAll().fetchAll();
console.log("Fetched permission for Thomas Andersen. Count is : " + permissions.length);
const resourceTokens = await getResourceToken(container1, permission1);
const resourceTokenClient = new CosmosClient({
endpoint,
resourceTokens
});
await resourceTokenClient
.database(databaseId)
.container(container1.id)
.items.readAll()
.fetchAll();
console.log(user1.id + " able to perform read operation on container 1");
try {
await resourceTokenClient.databases.readAll().fetchAll();
} catch (err) {
console.log(
"Expected error occurred as " +
user1.id +
" does not have access to get the list of databases. Error code : " +
err.code
);
}
await attemptWriteWithReadPermission(container1, user1, permission1);
await attemptReadFromTwoCollections(container1, container2, user1, permission1, permission3);
await finish();
}
async function getResourceToken(container: Container, permission: Permission) {
const { resource: permDef } = await permission.read();
return { [container.url]: permDef._token };
}
async function attemptWriteWithReadPermission(container: Container, user: User, permission: Permission) {
const resourceTokens = await getResourceToken(container, permission);
const client = new CosmosClient({
endpoint,
resourceTokens
});
const itemDef = { id: "not allowed" };
try {
await client
.database(databaseId)
.container(container.id)
.items.upsert(itemDef);
} catch (err) {
console.log(
"Expected error occurred as " +
user.id +
" does not have access to insert an item in the first container. Error code : " +
err.code
);
}
}
//attempts to read from both the containers as the user has read permission
async function attemptReadFromTwoCollections(
container1: Container,
container2: Container,
user1: User,
permission1: Permission,
permission2: Permission
) {
const token1 = await getResourceToken(container1, permission1);
const token2 = await getResourceToken(container2, permission2);
const resourceTokens = { ...token1, ...token2 };
const client = new CosmosClient({
endpoint,
resourceTokens
});
const { resources: items1 } = await client
.database(databaseId)
.container(container1.id)
.items.readAll()
.fetchAll();
console.log(user1.id + " able to read items from container 1. Document count is " + items1.length);
const { resources: items2 } = await client
.database(databaseId)
.container(container2.id)
.items.readAll()
.fetchAll();
console.log(user1.id + " able to read items from container 2. Document count is " + items2.length);
const itemDef = { id: "not allowed" };
try {
await client
.database(databaseId)
.container(container2.id)
.items.upsert(itemDef);
} catch (err) {
console.log(
"Expected error occurred as " +
user1.id +
" does not have access to insert an item in container 2. Error code : " +
err.code
);
}
}
run().catch(handleError);

Просмотреть файл

@ -1,3 +0,0 @@
# UserManagement

Просмотреть файл

@ -1,259 +0,0 @@
// @ts-check
console.log();
console.log("Azure Cosmos DB Node.js Samples");
console.log("================================");
console.log();
console.log("USER MANAGEMENT");
console.log("================");
console.log();
const cosmos = require("../../lib/src");
const CosmosClient = cosmos.CosmosClient;
const config = require("../Shared/config");
const databaseId = config.names.database;
const containerId = config.names.container;
const endpoint = config.connection.endpoint;
const masterKey = config.connection.authKey;
const container1Name = "COL1";
const container2Name = "COL2";
const user1Name = "Thomas Andersen";
const user2Name = "Robin Wakefield";
const item1Name = "item1";
const item2Name = "item2";
const item3Name = "item3";
// Establish a new instance of the DocumentDBClient to be used throughout this demo
const client = new CosmosClient({ endpoint, auth: { masterKey } });
async function run() {
const resources = await init();
await attemptAdminOperations(resources.container1, resources.user1, resources.permission1);
await attemptWriteWithReadPermissionAsync(resources.container1, resources.user1, resources.permission1);
await attemptReadFromTwoCollections(
resources.container1,
resources.container2,
resources.user1,
resources.permission1,
resources.permission3
);
await finish();
}
async function init() {
//--------------------------------------------------------------------------------------------------
// We need a database, two containers, two users, and some permissions for this sample,
// So let's go ahead and set these up initially
//--------------------------------------------------------------------------------------------------
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container: container1 } = await database.containers.createIfNotExists({ id: container1Name });
const { container: container2 } = await database.containers.createIfNotExists({ id: container2Name });
let itemSpec = { id: item1Name };
let userDef = { id: user1Name };
let permissionDef;
const { body: itemDef, item: item1 } = await container1.items.create(itemSpec);
console.log(`${item1Name}Created in ${container1Name} !`);
itemSpec = { id: item2Name };
const { item: item2 } = await container1.items.create(itemSpec);
console.log(`${item2Name}Created in ${container1Name} !`);
itemSpec = { id: item3Name };
const { item: item3 } = await container2.items.create(itemSpec);
console.log(`${item3Name} Created in ${container2Name} !`);
const { user: user1 } = await database.users.create(userDef);
console.log(`${user1Name} created!`);
userDef = { id: user2Name };
const { user: user2 } = await database.users.create(userDef);
console.log(`${user2Name} created!`);
// Read Permission on container 1 for user1
permissionDef = { id: "p1", permissionMode: cosmos.DocumentBase.PermissionMode.Read, resource: container1.url };
const { ref: permission1 } = await user1.permissions.create(permissionDef);
console.log(`Read only permission assigned to Thomas Andersen on container 1!`);
permissionDef = { id: "p2", permissionMode: cosmos.DocumentBase.PermissionMode.All, resource: item1.url };
// All Permissions on Doc1 for user1
const { ref: permission2 } = await user1.permissions.create(permissionDef);
console.log("All permission assigned to Thomas Andersen on item 1!");
permissionDef = { id: "p3", permissionMode: cosmos.DocumentBase.PermissionMode.Read, resource: container2.url };
// Read Permissions on Col2 for user1
const { ref: permission3 } = await user1.permissions.create(permissionDef);
console.log("Read permission assigned to Thomas Andersen on container 2!");
permissionDef = { id: "p4", permissionMode: cosmos.DocumentBase.PermissionMode.All, resource: container2.url };
const { ref: permission4 } = await user2.permissions.create(permissionDef);
console.log("All permission assigned to Robin Wakefield on container 2!");
const { result: permissions } = await user1.permissions.readAll().toArray();
console.log(`Fetched permission for Thomas Andersen. Count is : ${permissions.length}`);
return { user1, user2, container1, container2, permission1, permission2, permission3, permission4 };
}
//handle error
async function handleError(error) {
console.log();
console.log(`An error with code '${error.code}' has occurred:`);
console.log(`\t${error.body || error}`);
if (error.headers) {
console.log(`\t${JSON.stringify(error.headers)}`);
}
console.log();
try {
await finish();
} catch (err) {
console.log("Database might not have cleaned itself up properly...");
}
}
async function finish() {
await client.database(databaseId).delete();
console.log();
console.log("End of demo.");
}
/**
*
* @param {cosmos.Permission} permission
*/
async function getResourceToken(container, permission) {
const { body: permDef } = await permission.read();
const resourceToken = {};
resourceToken[container.url] = permDef._token;
return resourceToken;
}
/**
* Attempt to do admin operations when user only has Read on a container
* @param {cosmos.Container} container
* @param {cosmos.User} user
* @param {cosmos.Permission} permission
*/
async function attemptAdminOperations(container, user, permission) {
/** @type any */
const resourceTokens = await getResourceToken(container, permission);
const client = new CosmosClient({
endpoint,
auth: {
resourceTokens
}
});
await client
.database(databaseId)
.container(container.id)
.items.readAll()
.toArray();
console.log(`${user.id} able to perform read operation on container 1`);
try {
await client.databases.readAll().toArray();
} catch (err) {
console.log(
`Expected error occurred as ${user.id} does not have access to get the list of databases. Error code : ${
err.code
}`
);
}
}
/**
* attempts to write in container 1 with user 1 permission. It fails as the user1 has read only permission on container 1
* @param {cosmos.Container} container
* @param {cosmos.User} user
* @param {cosmos.Permission} permission
*/
async function attemptWriteWithReadPermissionAsync(container, user, permission) {
/** @type any */
const resourceTokens = await getResourceToken(container, permission);
const client = new CosmosClient({
endpoint,
auth: {
resourceTokens
}
});
const itemDef = { id: "not allowed" };
try {
await client
.database(databaseId)
.container(container.id)
.items.upsert(itemDef);
} catch (err) {
console.log(
`Expected error occurred as ${
user.id
} does not have access to insert an item in the first container. Error code : ${err.code}`
);
}
}
//attempts to read from both the containers as the user has read permission
/**
*
* @param {cosmos.Container} container1
* @param {cosmos.Container} container2
* @param {cosmos.User} user1
* @param {cosmos.Permission} permission1
* @param {cosmos.Permission} permission2
*/
async function attemptReadFromTwoCollections(container1, container2, user1, permission1, permission2) {
const token1 = await getResourceToken(container1, permission1);
const token2 = await getResourceToken(container2, permission2);
const resourceTokens = { ...token1, ...token2 };
const client = new CosmosClient({
endpoint,
auth: {
resourceTokens
}
});
const { result: items1 } = await client
.database(databaseId)
.container(container1.id)
.items.readAll()
.toArray();
console.log(`${user1.id} able to read items from container 1. Document count is ${items1.length}`);
const { result: items2 } = await client
.database(databaseId)
.container(container2.id)
.items.readAll()
.toArray();
console.log(`${user1.id} able to read items from container 2. Document count is ${items2.length}`);
const itemDef = { id: "not allowed" };
try {
await client
.database(databaseId)
.container(container2.id)
.items.upsert(itemDef);
} catch (err) {
console.log(
`Expected error occurred as ${user1.id} does not have access to insert an item in container 2. Error code : ${
err.code
}`
);
}
}
run().catch(handleError);

Просмотреть файл

@ -1,10 +0,0 @@
{
"name": "user-management",
"version": "0.0.0",
"private": true,
"description": "UserManagement",
"scripts": {
"start": "node app.js"
},
"dependencies": {}
}

17
samples/package.json Normal file
Просмотреть файл

@ -0,0 +1,17 @@
{
"name": "cosmos-samples",
"version": "0.0.0",
"private": true,
"description": "Cosmos DB Samples",
"main": "app.js",
"dependencies": {},
"scripts": {
"ContainerManagement": "npx ts-node ./ContainerManagement",
"UserManagement": "npx ts-node ./UserManagement",
"ServerSideScripts": "npx ts-node ./ServerSideScripts",
"ItemManagement": "npx ts-node ./ItemManagement",
"DatabaseManagement": "npx ts-node ./DatabaseManagement",
"IndexeManagement": "npx ts-node ./IndexeManagement",
"ChangeFeed": "npx ts-node ./ChangeFeed"
}
}

Просмотреть файл

@ -1,51 +0,0 @@
## Introduction
These samples demonstrate how to use the Node.js SDK to interact with the [Azure Cosmos DB](https://docs.microsoft.com/azure/cosmos-db/) service
## Running the samples
### Quick steps:
1. Start the Cosmos DB emulator
2. Follow the steps in [../dev.md](../dev.md) to build the SDK.
3. `cd` into a given sample's directory
4. `npm start`
### Debugging
These samples were built using [VS Code](https://code.visualstudio.com) and includes a `.vscode/launch.json`. However, you do not _need_ anything other than Node.js to run these samples. Just run the app.js in your choice of editor or terminal.
To debug in VS Code, just use the "Debug File" option, and start it in the sample's app.js of your choice. (For the TodoApp, you need to start from `bin/www`)
### Cosmos Account
Before you can run any of the samples you do need an active Azure Cosmos DB account or the emulator.
Head over to [How to create a Azure Cosmos DB database account](https://docs.microsoft.com/azure/cosmos-db/create-sql-api-nodejs#create-a-database-account) and see how to setup your account. Check out the emulator (windows only at the moment) [here](https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator).
## Description
Azure Cosmos DB is a fully managed, scalable, query-able, schema free JSON document database service built for modern applications and delivered to you by Microsoft.
These samples demonstrate how to use the Client SDKs to interact with the service.
- **CollectionManagement** - CRUD operations on DocumentCollection resources.
- **DatabaseManagent** - CRUD operations on Database resources.
- **DocumentManagement** - CRUD operations on Document resources.
- **IndexManagement** - shows samples on how to customize the Indexing Policy for a Collection should you need to.
- **Partitioning** - shows samples on using the provided hashPartitionResolver and rangePartitionResolver classes, and how to implement custom resolvers.
- **ServerSideScripts** - shows how to create, and execute, server-side stored procedures, triggers and user-defined functions.
- **TodoApp** - Quick and simple todo app.
After walking through these samples you should have a good idea of how to get going and how to make use of the various Azure Cosmos DB APIs.
There are step-by-step tutorials and more documentation on the [Azure Cosmos DB documentation](https://docs.microsoft.com/azure/cosmos-db/) page so head over about this NoSQL document database.
## More information
For more information on this database service, please refer to the [Azure Cosmos DB](https://azure.microsoft.com/services/cosmos-db/) service page.

17
samples/tsconfig.json Normal file
Просмотреть файл

@ -0,0 +1,17 @@
{
"compilerOptions": {
"noEmit": true,
"module": "commonjs",
"moduleResolution": "node",
"importHelpers": true,
"noImplicitAny": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"preserveConstEnums": true,
"removeComments": false,
"target": "es6",
"sourceMap": true,
"newLine": "LF",
"composite": true
}
}

5
samples/tslint.json Normal file
Просмотреть файл

@ -0,0 +1,5 @@
{
"rules": {
"no-console": false
}
}

Просмотреть файл

@ -34,7 +34,6 @@ export class ChangeFeedIterator<T> {
private resourceId: string,
private resourceLink: string,
private partitionKey: string | number | boolean,
private isPartitionedContainer: () => Promise<boolean>,
private changeFeedOptions: ChangeFeedOptions
) {
// partition key XOR partition key range id
@ -76,7 +75,7 @@ export class ChangeFeedIterator<T> {
*/
public async *getAsyncIterator(): AsyncIterable<ChangeFeedResponse<Array<T & Resource>>> {
do {
const result = await this.executeNext();
const result = await this.fetchNext();
if (result.count > 0) {
yield result;
}
@ -86,7 +85,7 @@ export class ChangeFeedIterator<T> {
/**
* Read feed and retrieves the next page of results in Azure Cosmos DB.
*/
public async executeNext(): Promise<ChangeFeedResponse<Array<T & Resource>>> {
public async fetchNext(): Promise<ChangeFeedResponse<Array<T & Resource>>> {
const response = await this.getFeedResponse();
this.lastStatusCode = response.statusCode;
this.nextIfNoneMatch = response.headers[Constants.HttpHeaders.ETag];
@ -94,11 +93,10 @@ export class ChangeFeedIterator<T> {
}
private async getFeedResponse(): Promise<ChangeFeedResponse<Array<T & Resource>>> {
const isParittionedContainer = await this.isPartitionedContainer();
if (!this.isPartitionSpecified && isParittionedContainer) {
if (!this.isPartitionSpecified) {
throw new Error("Container is partitioned, but no partition key or partition key range id was specified.");
}
const feedOptions: FeedOptions = { initialHeaders: {}, a_im: "Incremental feed" };
const feedOptions: FeedOptions = { initialHeaders: {}, useIncrementalFeed: true };
if (typeof this.changeFeedOptions.maxItemCount === "number") {
feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount;
@ -119,23 +117,20 @@ export class ChangeFeedIterator<T> {
feedOptions.initialHeaders[Constants.HttpHeaders.IfModifiedSince] = this.ifModifiedSince;
}
if (this.partitionKey !== undefined) {
feedOptions.partitionKey = this.partitionKey as any; // TODO: our partition key is too restrictive on the main object
}
const response: Response<Array<T & Resource>> = await (this.clientContext.queryFeed<T>(
this.resourceLink,
ResourceType.item,
this.resourceId,
result => (result ? result.Documents : []),
undefined,
feedOptions
) as Promise<any>); // TODO: some funky issues with query feed. Probably need to change it up.
const response: Response<Array<T & Resource>> = await (this.clientContext.queryFeed<T>({
path: this.resourceLink,
resourceType: ResourceType.item,
resourceId: this.resourceId,
resultFn: result => (result ? result.Documents : []),
query: undefined,
options: feedOptions,
partitionKey: this.partitionKey
}) as Promise<any>); // TODO: some funky issues with query feed. Probably need to change it up.
return new ChangeFeedResponse(
response.result,
response.result ? response.result.length : 0,
response.statusCode,
response.code,
response.headers
);
}

Просмотреть файл

@ -6,7 +6,7 @@
* - startTime
* - startFromBeginning
*
* If none of those options are set, it will start reading changes from the first `ChangeFeedIterator.executeNext()` call.
* If none of those options are set, it will start reading changes from the first `ChangeFeedIterator.fetchNext()` call.
*/
export interface ChangeFeedOptions {
/**

Просмотреть файл

@ -1,5 +1,5 @@
import { Constants } from "./common";
import { IHeaders } from "./queryExecutionContext";
import { CosmosHeaders } from "./queryExecutionContext";
/**
* A single response page from the Azure Cosmos DB Change Feed
@ -27,7 +27,7 @@ export class ChangeFeedResponse<T> {
* Gets the status code of the response from Azure Cosmos DB
*/
public readonly statusCode: number,
headers: IHeaders
headers: CosmosHeaders
) {
this.headers = Object.freeze(headers);
}
@ -79,5 +79,5 @@ export class ChangeFeedResponse<T> {
/**
* Response headers of the response from Azure Cosmos DB
*/
public headers: IHeaders;
public headers: CosmosHeaders;
}

Просмотреть файл

@ -1,13 +1,21 @@
import { Constants, CosmosClientOptions, IHeaders, QueryIterator, RequestOptions, Response, SqlQuerySpec } from ".";
import { PartitionKeyRange } from "./client/Container/PartitionKeyRange";
import { Resource } from "./client/Resource";
import { Helper, StatusCodes, SubStatusCodes } from "./common";
import { ConnectionPolicy, ConsistencyLevel, DatabaseAccount, QueryCompatibilityMode } from "./documents";
import { Constants, HTTPMethod, OperationType, ResourceType } from "./common/constants";
import { getIdFromLink, getPathFromLink, parseLink } from "./common/helper";
import { StatusCodes, SubStatusCodes } from "./common/statusCodes";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { ConnectionPolicy, ConsistencyLevel, DatabaseAccount, PartitionKey } from "./documents";
import { GlobalEndpointManager } from "./globalEndpointManager";
import { FetchFunctionCallback } from "./queryExecutionContext";
import { FeedOptions, RequestHandler } from "./request";
import { ErrorResponse, getHeaders } from "./request/request";
import { executePlugins, PluginOn } from "./plugins/Plugin";
import { FetchFunctionCallback, SqlQuerySpec } from "./queryExecutionContext";
import { CosmosHeaders } from "./queryExecutionContext/CosmosHeaders";
import { QueryIterator } from "./queryIterator";
import { FeedOptions, RequestOptions, Response } from "./request";
import { ErrorResponse } from "./request";
import { PartitionedQueryExecutionInfo } from "./request/ErrorResponse";
import { getHeaders } from "./request/request";
import { RequestContext } from "./request/RequestContext";
import { request as executeRequest } from "./request/RequestHandler";
import { SessionContainer } from "./session/sessionContainer";
import { SessionContext } from "./session/SessionContext";
@ -18,282 +26,264 @@ import { SessionContext } from "./session/SessionContext";
export class ClientContext {
private readonly sessionContainer: SessionContainer;
private connectionPolicy: ConnectionPolicy;
private requestHandler: RequestHandler;
public partitionKeyDefinitionCache: { [containerUrl: string]: any }; // TODO: ParitionKeyDefinitionCache
public constructor(
private cosmosClientOptions: CosmosClientOptions,
private globalEndpointManager: GlobalEndpointManager
) {
this.connectionPolicy = Helper.parseConnectionPolicy(cosmosClientOptions.connectionPolicy);
this.connectionPolicy = cosmosClientOptions.connectionPolicy;
this.sessionContainer = new SessionContainer();
this.requestHandler = new RequestHandler(
globalEndpointManager,
this.connectionPolicy,
this.cosmosClientOptions.agent
);
this.partitionKeyDefinitionCache = {};
}
/** @ignore */
public async read<T>(
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & Resource>> {
public async read<T>({
path,
resourceType,
resourceId,
options = {},
partitionKey
}: {
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T & Resource>> {
try {
const requestHeaders = await getHeaders(
this.cosmosClientOptions.auth,
{ ...initialHeaders, ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) },
"get",
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.get,
path,
id,
type,
options,
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
this.applySessionToken(path, requestHeaders);
const request: any = {
// TODO: any
path,
operationType: Constants.OperationTypes.Read,
operationType: OperationType.Read,
client: this,
endpointOverride: null
resourceId,
options,
resourceType,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// read will use ReadEndpoint since it uses GET operation
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await this.requestHandler.get(
endpoint,
request,
requestHeaders,
this.cosmosClientOptions.auth,
id,
type
);
this.captureSessionToken(undefined, path, Constants.OperationTypes.Read, response.headers);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await executePlugins(request, executeRequest, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Read, response.headers);
return response;
} catch (err) {
this.captureSessionToken(err, path, Constants.OperationTypes.Upsert, (err as ErrorResponse).headers);
this.captureSessionToken(err, path, OperationType.Upsert, (err as ErrorResponse).headers);
throw err;
}
}
public async queryFeed<T>(
path: string,
type: string, // TODO: code smell: enum?
id: string,
resultFn: (result: { [key: string]: any }) => any[], // TODO: any
query: SqlQuerySpec | string,
options: FeedOptions,
partitionKeyRangeId?: string
): Promise<Response<T & Resource>> {
public async queryFeed<T>({
path,
resourceType,
resourceId,
resultFn,
query,
options,
partitionKeyRangeId,
partitionKey
}: {
path: string;
resourceType: ResourceType;
resourceId: string;
resultFn: (
result: {
[key: string]: any;
}
) => any[];
query: SqlQuerySpec | string;
options: FeedOptions;
partitionKeyRangeId?: string;
partitionKey?: PartitionKey;
}): Promise<Response<T & Resource>> {
// Query operations will use ReadEndpoint even though it uses
// GET(for queryFeed) and POST(for regular query operations)
const request: any = {
// TODO: any request
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.get,
path,
operationType: Constants.OperationTypes.Query,
operationType: OperationType.Query,
client: this,
endpointOverride: null
partitionKeyRangeId,
resourceId,
resourceType,
options,
body: query,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const initialHeaders = { ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) };
if (query === undefined) {
const reqHeaders = await getHeaders(
this.cosmosClientOptions.auth,
initialHeaders,
"get",
path,
id,
type,
options,
partitionKeyRangeId,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
this.applySessionToken(path, reqHeaders);
const response = await this.requestHandler.get(
endpoint,
request,
reqHeaders,
this.cosmosClientOptions.auth,
id,
type
);
this.captureSessionToken(undefined, path, Constants.OperationTypes.Query, response.headers);
return this.processQueryFeedResponse(response, !!query, resultFn);
} else {
initialHeaders[Constants.HttpHeaders.IsQuery] = "true";
switch (this.cosmosClientOptions.queryCompatibilityMode) {
case QueryCompatibilityMode.SqlQuery:
initialHeaders[Constants.HttpHeaders.ContentType] = Constants.MediaTypes.SQL;
break;
case QueryCompatibilityMode.Query:
case QueryCompatibilityMode.Default:
default:
if (typeof query === "string") {
query = { query }; // Converts query text to query object.
}
initialHeaders[Constants.HttpHeaders.ContentType] = Constants.MediaTypes.QueryJson;
break;
}
const reqHeaders = await getHeaders(
this.cosmosClientOptions.auth,
initialHeaders,
"post",
path,
id,
type,
options,
partitionKeyRangeId,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
this.applySessionToken(path, reqHeaders);
const response = await this.requestHandler.post(
endpoint,
request,
query,
reqHeaders,
this.cosmosClientOptions.auth,
id,
type
);
this.captureSessionToken(undefined, path, Constants.OperationTypes.Query, response.headers);
return this.processQueryFeedResponse(response, !!query, resultFn);
if (query !== undefined) {
request.method = HTTPMethod.post;
}
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
request.headers = await this.buildHeaders(request);
if (query !== undefined) {
request.headers[Constants.HttpHeaders.IsQuery] = "true";
request.headers[Constants.HttpHeaders.ContentType] = Constants.MediaTypes.QueryJson;
if (typeof query === "string") {
request.body = { query }; // Converts query text to query object.
}
}
this.applySessionToken(request);
const response = await executeRequest(request);
this.captureSessionToken(undefined, path, OperationType.Query, response.headers);
return this.processQueryFeedResponse(response, !!query, resultFn);
}
public async getQueryPlan(
path: string,
resourceType: ResourceType,
resourceId: string,
query: SqlQuerySpec | string,
options: FeedOptions = {}
): Promise<Response<PartitionedQueryExecutionInfo>> {
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.post,
path,
operationType: OperationType.Read,
client: this,
resourceId,
resourceType,
options,
body: query,
plugins: this.cosmosClientOptions.plugins
};
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
request.headers = await this.buildHeaders(request);
request.headers[Constants.HttpHeaders.IsQueryPlan] = "True";
request.headers[Constants.HttpHeaders.QueryVersion] = "1.4";
request.headers[Constants.HttpHeaders.SupportedQueryFeatures] =
"Aggregate, Distinct, MultipleOrderBy, OffsetAndLimit, OrderBy, Top, CompositeAggregate";
request.headers[Constants.HttpHeaders.ContentType] = Constants.MediaTypes.QueryJson;
if (typeof query === "string") {
request.body = { query }; // Converts query text to query object.
}
this.applySessionToken(request);
const response = await executeRequest(request);
this.captureSessionToken(undefined, path, OperationType.Query, response.headers);
return response as any;
}
public queryPartitionKeyRanges(collectionLink: string, query?: string | SqlQuerySpec, options?: FeedOptions) {
const path = Helper.getPathFromLink(collectionLink, "pkranges");
const id = Helper.getIdFromLink(collectionLink);
const path = getPathFromLink(collectionLink, ResourceType.pkranges);
const id = getIdFromLink(collectionLink);
const cb: FetchFunctionCallback = innerOptions => {
return this.queryFeed(path, "pkranges", id, result => result.PartitionKeyRanges, query, innerOptions);
return this.queryFeed({
path,
resourceType: ResourceType.pkranges,
resourceId: id,
resultFn: result => result.PartitionKeyRanges,
query,
options: innerOptions
});
};
return new QueryIterator<PartitionKeyRange>(this, query, options, cb);
}
public async delete<T>(
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & Resource>> {
public async delete<T>({
path,
resourceType,
resourceId,
options = {},
partitionKey
}: {
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T & Resource>> {
try {
const reqHeaders = await getHeaders(
this.cosmosClientOptions.auth,
{ ...initialHeaders, ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) },
"delete",
path,
id,
type,
options,
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.delete,
client: this,
operationType: Constants.OperationTypes.Delete,
operationType: OperationType.Delete,
path,
resourceType: type
resourceType,
options,
resourceId,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
this.applySessionToken(path, reqHeaders);
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// deleteResource will use WriteEndpoint since it uses DELETE operation
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await this.requestHandler.delete(
endpoint,
request,
reqHeaders,
this.cosmosClientOptions.auth,
id,
type
);
if (Helper.parseLink(path).type !== "colls") {
this.captureSessionToken(undefined, path, Constants.OperationTypes.Delete, response.headers);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await executePlugins(request, executeRequest, PluginOn.operation);
if (parseLink(path).type !== "colls") {
this.captureSessionToken(undefined, path, OperationType.Delete, response.headers);
} else {
this.clearSessionToken(path);
}
return response;
} catch (err) {
this.captureSessionToken(err, path, Constants.OperationTypes.Upsert, (err as ErrorResponse).headers);
this.captureSessionToken(err, path, OperationType.Upsert, (err as ErrorResponse).headers);
throw err;
}
}
// Most cases, things return the definition + the system resource props
public async create<T>(
body: T,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & Resource>>;
// But a few cases, like permissions, there is additional junk added to the response that isn't in system resource props
public async create<T, U>(
body: T,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & U & Resource>>;
public async create<T, U>(
body: T,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & U & Resource>> {
public async create<T, U = T>({
body,
path,
resourceType,
resourceId,
options = {},
partitionKey
}: {
body: T;
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T & U & Resource>> {
try {
const requestHeaders = await getHeaders(
this.cosmosClientOptions.auth,
{ ...initialHeaders, ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) },
"post",
path,
id,
type,
options,
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.post,
client: this,
operationType: Constants.OperationTypes.Create,
operationType: OperationType.Create,
path,
resourceType: type
resourceType,
resourceId,
body,
options,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
request.headers = await this.buildHeaders(request);
// create will use WriteEndpoint since it uses POST operation
this.applySessionToken(path, requestHeaders);
this.applySessionToken(request);
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await this.requestHandler.post(
endpoint,
request,
body,
requestHeaders,
this.cosmosClientOptions.auth,
id,
type
);
this.captureSessionToken(undefined, path, Constants.OperationTypes.Create, response.headers);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await executePlugins(request, executeRequest, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Create, response.headers);
return response;
} catch (err) {
this.captureSessionToken(err, path, Constants.OperationTypes.Upsert, (err as ErrorResponse).headers);
this.captureSessionToken(err, path, OperationType.Upsert, (err as ErrorResponse).headers);
throw err;
}
}
@ -304,21 +294,23 @@ export class ClientContext {
resultFn: (result: { [key: string]: any }) => any[]
): Response<any> {
if (isQuery) {
return { result: resultFn(res.result), headers: res.headers, statusCode: res.statusCode };
return { result: resultFn(res.result), headers: res.headers, code: res.code };
} else {
const newResult = resultFn(res.result).map((body: any) => body);
return { result: newResult, headers: res.headers, statusCode: res.statusCode };
return { result: newResult, headers: res.headers, code: res.code };
}
}
private applySessionToken(path: string, reqHeaders: IHeaders) {
const request = this.getSessionParams(path);
private applySessionToken(requestContext: RequestContext) {
const request = this.getSessionParams(requestContext.path);
if (reqHeaders && reqHeaders[Constants.HttpHeaders.SessionToken]) {
if (requestContext.headers && requestContext.headers[Constants.HttpHeaders.SessionToken]) {
return;
}
const sessionConsistency: ConsistencyLevel = reqHeaders[Constants.HttpHeaders.ConsistencyLevel];
const sessionConsistency: ConsistencyLevel = requestContext.headers[
Constants.HttpHeaders.ConsistencyLevel
] as ConsistencyLevel;
if (!sessionConsistency) {
return;
}
@ -330,164 +322,143 @@ export class ClientContext {
if (request.resourceAddress) {
const sessionToken = this.sessionContainer.get(request);
if (sessionToken) {
reqHeaders[Constants.HttpHeaders.SessionToken] = sessionToken;
requestContext.headers[Constants.HttpHeaders.SessionToken] = sessionToken;
}
}
}
public async replace<T>(
resource: any,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & Resource>> {
public async replace<T>({
body,
path,
resourceType,
resourceId,
options = {},
partitionKey
}: {
body: any;
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T & Resource>> {
try {
const reqHeaders = await getHeaders(
this.cosmosClientOptions.auth,
{ ...initialHeaders, ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) },
"put",
path,
id,
type,
options,
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.put,
client: this,
operationType: Constants.OperationTypes.Replace,
operationType: OperationType.Replace,
path,
resourceType: type
resourceType,
body,
resourceId,
options,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
this.applySessionToken(path, reqHeaders);
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// replace will use WriteEndpoint since it uses PUT operation
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(reqHeaders);
const response = await this.requestHandler.put(
endpoint,
request,
resource,
reqHeaders,
this.cosmosClientOptions.auth,
id,
type
);
this.captureSessionToken(undefined, path, Constants.OperationTypes.Replace, response.headers);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await executePlugins(request, executeRequest, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Replace, response.headers);
return response;
} catch (err) {
this.captureSessionToken(err, path, Constants.OperationTypes.Upsert, (err as ErrorResponse).headers);
this.captureSessionToken(err, path, OperationType.Upsert, (err as ErrorResponse).headers);
throw err;
}
}
public async upsert<T>(
body: T,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & Resource>>;
public async upsert<T, U>(
body: T,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & U & Resource>>;
public async upsert<T>(
body: T,
path: string,
type: string,
id: string,
initialHeaders: IHeaders,
options?: RequestOptions
): Promise<Response<T & Resource>> {
public async upsert<T, U = T>({
body,
path,
resourceType,
resourceId,
options = {},
partitionKey
}: {
body: T;
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T & U & Resource>> {
try {
const requestHeaders = await getHeaders(
this.cosmosClientOptions.auth,
{ ...initialHeaders, ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) },
"post",
path,
id,
type,
options,
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.post,
client: this,
operationType: Constants.OperationTypes.Upsert,
operationType: OperationType.Upsert,
path,
resourceType: type
resourceType,
body,
resourceId,
options,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
Helper.setIsUpsertHeader(requestHeaders);
this.applySessionToken(path, requestHeaders);
request.headers = await this.buildHeaders(request);
request.headers[Constants.HttpHeaders.IsUpsert] = true;
this.applySessionToken(request);
// upsert will use WriteEndpoint since it uses POST operation
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await this.requestHandler.post(
endpoint,
request,
body,
requestHeaders,
this.cosmosClientOptions.auth,
id,
type
);
this.captureSessionToken(undefined, path, Constants.OperationTypes.Upsert, response.headers);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
const response = await executePlugins(request, executeRequest, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Upsert, response.headers);
return response;
} catch (err) {
this.captureSessionToken(err, path, Constants.OperationTypes.Upsert, (err as ErrorResponse).headers);
this.captureSessionToken(err, path, OperationType.Upsert, (err as ErrorResponse).headers);
throw err;
}
}
public async execute<T>(
sprocLink: string,
params?: any[], // TODO: any
options?: RequestOptions
): Promise<Response<T>> {
const initialHeaders = { ...this.cosmosClientOptions.defaultHeaders, ...(options && options.initialHeaders) };
public async execute<T>({
sprocLink,
params,
options = {},
partitionKey
}: {
sprocLink: string;
params?: any[];
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T>> {
// Accept a single parameter or an array of parameters.
// Didn't add type annotation for this because we should legacy this behavior
if (params !== null && params !== undefined && !Array.isArray(params)) {
params = [params];
}
const path = Helper.getPathFromLink(sprocLink);
const id = Helper.getIdFromLink(sprocLink);
const type = "sprocs";
const headers = await getHeaders(
this.cosmosClientOptions.auth,
initialHeaders,
"post",
path,
id,
type,
options,
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
const path = getPathFromLink(sprocLink);
const id = getIdFromLink(sprocLink);
const request: RequestContext = {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.post,
client: this,
operationType: Constants.OperationTypes.Execute,
operationType: OperationType.Execute,
path,
resourceType: "sprocs"
resourceType: ResourceType.sproc,
options,
resourceId: id,
body: params,
plugins: this.cosmosClientOptions.plugins,
partitionKey
};
request.headers = await this.buildHeaders(request);
// executeStoredProcedure will use WriteEndpoint since it uses POST operation
const endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
return this.requestHandler.post(endpoint, request, params, headers, this.cosmosClientOptions.auth, id, type);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(request);
return executePlugins(request, executeRequest, PluginOn.operation);
}
/**
@ -496,35 +467,24 @@ export class ClientContext {
* If not present, current client's url will be used.
*/
public async getDatabaseAccount(options: RequestOptions = {}): Promise<Response<DatabaseAccount>> {
const urlConnection = options.urlConnection || this.cosmosClientOptions.endpoint;
const requestHeaders = await getHeaders(
this.cosmosClientOptions.auth,
this.cosmosClientOptions.defaultHeaders,
"get",
"",
"",
"",
{},
undefined,
this.cosmosClientOptions.connectionPolicy.UseMultipleWriteLocations
);
const endpoint = options.urlConnection || this.cosmosClientOptions.endpoint;
const request: RequestContext = {
endpoint,
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
method: HTTPMethod.get,
client: this,
operationType: Constants.OperationTypes.Read,
operationType: OperationType.Read,
path: "",
resourceType: "DatabaseAccount"
resourceType: ResourceType.none,
options,
plugins: this.cosmosClientOptions.plugins
};
const { result, headers } = await this.requestHandler.get(
urlConnection,
request,
requestHeaders,
this.cosmosClientOptions.auth,
"",
""
);
request.headers = await this.buildHeaders(request);
// await options.beforeOperation({ endpoint, request, headers: requestHeaders });
const { result, headers } = await executePlugins(request, executeRequest, PluginOn.operation);
const databaseAccount = new DatabaseAccount(result, headers);
@ -539,9 +499,14 @@ export class ClientContext {
return this.globalEndpointManager.getReadEndpoint();
}
private captureSessionToken(err: ErrorResponse, path: string, opType: string, resHeaders: IHeaders) {
const request = this.getSessionParams(path); // TODO: any request
request.operationType = opType;
private captureSessionToken(
err: ErrorResponse,
path: string,
operationType: OperationType,
resHeaders: CosmosHeaders
) {
const request = this.getSessionParams(path);
request.operationType = operationType;
if (
!err ||
(!this.isMasterResource(request.resourceType) &&
@ -553,22 +518,6 @@ export class ClientContext {
}
}
// TODO: some session tests are using this, but I made them use type coercsion to call this method because I don't think it should be public.
private getSessionToken(collectionLink: string) {
if (!collectionLink) {
throw new Error("collectionLink cannot be null");
}
const paths = Helper.parseLink(collectionLink);
if (paths === undefined) {
return "";
}
const request = this.getSessionParams(collectionLink);
return this.sessionContainer.get(request);
}
public clearSessionToken(path: string) {
const request = this.getSessionParams(path);
this.sessionContainer.remove(request);
@ -577,7 +526,7 @@ export class ClientContext {
private getSessionParams(resourceLink: string): SessionContext {
const resourceId: string = null;
let resourceAddress: string = null;
const parserOutput = Helper.parseLink(resourceLink);
const parserOutput = parseLink(resourceLink);
resourceAddress = parserOutput.objectBody.self;
@ -606,4 +555,19 @@ export class ClientContext {
return false;
}
private buildHeaders(requestContext: RequestContext) {
return getHeaders({
clientOptions: this.cosmosClientOptions,
defaultHeaders: { ...this.cosmosClientOptions.defaultHeaders, ...requestContext.options.initialHeaders },
verb: requestContext.method,
path: requestContext.path,
resourceId: requestContext.resourceId,
resourceType: requestContext.resourceType,
options: requestContext.options,
partitionKeyRangeId: requestContext.partitionKeyRangeId,
useMultipleWriteLocations: this.connectionPolicy.useMultipleWriteLocations,
partitionKey: requestContext.partitionKey
});
}
}

Просмотреть файл

@ -1,15 +1,13 @@
import { Agent, AgentOptions } from "https";
import * as tunnel from "tunnel";
import * as url from "url";
import { Constants, RequestOptions } from ".";
import { Database, Databases } from "./client/Database";
import { Offer, Offers } from "./client/Offer";
import { ClientContext } from "./ClientContext";
import { Helper, Platform } from "./common";
import { parseConnectionString } from "./common";
import { Constants } from "./common/constants";
import { getPlatformDefaultHeaders, getUserAgent } from "./common/platform";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { DatabaseAccount } from "./documents";
import { DatabaseAccount, defaultConnectionPolicy } from "./documents";
import { GlobalEndpointManager } from "./globalEndpointManager";
import { CosmosResponse } from "./request";
import { RequestOptions, ResourceResponse } from "./request";
/**
* Provides a client-side logical representation of the Azure Cosmos DB database account.
@ -38,7 +36,7 @@ export class CosmosClient {
*
* @example Create a new database
* ```typescript
* const {body: databaseDefinition, database} = await client.databases.create({id: "<name here>"});
* const {resource: databaseDefinition, database} = await client.databases.create({id: "<name here>"});
* ```
*/
public readonly databases: Databases;
@ -48,65 +46,46 @@ export class CosmosClient {
* Use `.offer(id)` to read, or replace existing offers.
*/
public readonly offers: Offers;
private clientContext: ClientContext;
/**
* Creates a new {@link CosmosClient} object from a connection string. Your database connection string can be found in the Azure Portal
*/
constructor(connectionString: string);
/**
* Creates a new {@link CosmosClient} object. See {@link CosmosClientOptions} for more details on what options you can use.
* @param options bag of options - require at least endpoint and auth to be configured
*/
private clientContext: ClientContext;
constructor(private options: CosmosClientOptions) {
options.auth = options.auth || {};
if (options.key) {
options.auth.key = options.key;
constructor(options: CosmosClientOptions); // tslint:disable-line:unified-signatures
constructor(optionsOrConnectionString: string | CosmosClientOptions) {
if (typeof optionsOrConnectionString === "string") {
optionsOrConnectionString = parseConnectionString(optionsOrConnectionString);
}
options.connectionPolicy = Helper.parseConnectionPolicy(options.connectionPolicy);
optionsOrConnectionString.connectionPolicy = Object.assign(
{},
defaultConnectionPolicy,
optionsOrConnectionString.connectionPolicy
);
options.defaultHeaders = options.defaultHeaders || {};
options.defaultHeaders[Constants.HttpHeaders.CacheControl] = "no-cache";
options.defaultHeaders[Constants.HttpHeaders.Version] = Constants.CurrentVersion;
if (options.consistencyLevel !== undefined) {
options.defaultHeaders[Constants.HttpHeaders.ConsistencyLevel] = options.consistencyLevel;
optionsOrConnectionString.defaultHeaders = optionsOrConnectionString.defaultHeaders || {};
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.CacheControl] = "no-cache";
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.Version] = Constants.CurrentVersion;
if (optionsOrConnectionString.consistencyLevel !== undefined) {
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.ConsistencyLevel] =
optionsOrConnectionString.consistencyLevel;
}
const platformDefaultHeaders = Platform.getPlatformDefaultHeaders() || {};
const platformDefaultHeaders = getPlatformDefaultHeaders() || {};
for (const platformDefaultHeader of Object.keys(platformDefaultHeaders)) {
options.defaultHeaders[platformDefaultHeader] = platformDefaultHeaders[platformDefaultHeader];
optionsOrConnectionString.defaultHeaders[platformDefaultHeader] = platformDefaultHeaders[platformDefaultHeader];
}
options.defaultHeaders[Constants.HttpHeaders.UserAgent] = Platform.getUserAgent();
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.UserAgent] = getUserAgent();
if (!this.options.agent) {
// Initialize request agent
const requestAgentOptions: AgentOptions & tunnel.HttpsOverHttpsOptions & tunnel.HttpsOverHttpOptions = {
keepAlive: true
};
if (!!this.options.connectionPolicy.ProxyUrl) {
const proxyUrl = url.parse(this.options.connectionPolicy.ProxyUrl);
const port = parseInt(proxyUrl.port, 10);
requestAgentOptions.proxy = {
host: proxyUrl.hostname,
port,
headers: {}
};
if (!!proxyUrl.auth) {
requestAgentOptions.proxy.proxyAuth = proxyUrl.auth;
}
this.options.agent =
proxyUrl.protocol.toLowerCase() === "https:"
? tunnel.httpsOverHttps(requestAgentOptions)
: tunnel.httpsOverHttp(requestAgentOptions); // TODO: type coersion
} else {
this.options.agent = new Agent(requestAgentOptions); // TODO: Move to request?
}
}
const globalEndpointManager = new GlobalEndpointManager(this.options, async (opts: RequestOptions) =>
const globalEndpointManager = new GlobalEndpointManager(optionsOrConnectionString, async (opts: RequestOptions) =>
this.getDatabaseAccount(opts)
);
this.clientContext = new ClientContext(options, globalEndpointManager);
this.clientContext = new ClientContext(optionsOrConnectionString, globalEndpointManager);
this.databases = new Databases(this, this.clientContext);
this.offers = new Offers(this, this.clientContext);
@ -115,9 +94,9 @@ export class CosmosClient {
/**
* Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.)
*/
public async getDatabaseAccount(options?: RequestOptions): Promise<CosmosResponse<DatabaseAccount, CosmosClient>> {
public async getDatabaseAccount(options?: RequestOptions): Promise<ResourceResponse<DatabaseAccount>> {
const response = await this.clientContext.getDatabaseAccount(options);
return { body: response.result, headers: response.headers, ref: this };
return new ResourceResponse<DatabaseAccount>(response.result, response.headers, response.code);
}
/**

Просмотреть файл

@ -1,9 +1,11 @@
import { AuthOptions } from "./auth";
import { ConnectionPolicy, ConsistencyLevel, QueryCompatibilityMode } from "./documents";
import { IHeaders } from "./queryExecutionContext/IHeaders";
import { TokenProvider } from "./auth";
import { PermissionDefinition } from "./client";
import { ConnectionPolicy, ConsistencyLevel } from "./documents";
import { PluginConfig } from "./plugins/Plugin";
import { CosmosHeaders } from "./queryExecutionContext/CosmosHeaders";
// We expose our own Agent interface to avoid taking a dependency on and leaking node types. This interface should mirror the node Agent interface
interface Agent {
export interface Agent {
maxFreeSockets: number;
maxSockets: number;
sockets: any;
@ -14,19 +16,31 @@ interface Agent {
export interface CosmosClientOptions {
/** The service endpoint to use to create the client. */
endpoint: string;
/** The account master or readonly key (alias of auth.key) */
/** The account master or readonly key */
key?: string;
/** An object that is used for authenticating requests and must contains one of the options */
auth?: AuthOptions;
/** An object that contains resources tokens.
* Keys for the object are resource Ids and values are the resource tokens.
*/
resourceTokens?: { [resourcePath: string]: string };
/** A user supplied function for resolving header authorization tokens.
* Allows users to generating their own auth tokens, potentially using a separate service
*/
tokenProvider?: TokenProvider;
/** An array of {@link Permission} objects. */
permissionFeed?: PermissionDefinition[];
/** An instance of {@link ConnectionPolicy} class.
* This parameter is optional and the default connectionPolicy will be used if omitted.
*/
connectionPolicy?: ConnectionPolicy | { [P in keyof ConnectionPolicy]?: ConnectionPolicy[P] };
connectionPolicy?: ConnectionPolicy;
/** An optional parameter that represents the consistency level.
* It can take any value from {@link ConsistencyLevel}.
*/
consistencyLevel?: keyof typeof ConsistencyLevel;
defaultHeaders?: IHeaders;
defaultHeaders?: CosmosHeaders;
/** An optional custom http(s) Agent to be used in NodeJS enironments
* Use an agent such as https://github.com/TooTallNate/node-proxy-agent if you need to connect to Cosmos via a proxy
*/
agent?: Agent;
queryCompatibilityMode?: QueryCompatibilityMode;
/** @internal */
plugins?: PluginConfig[];
}

Просмотреть файл

@ -1,4 +1,4 @@
import { Constants, Helper, ResourceType } from "./common";
import { Constants, isReadRequest, OperationType, ResourceType } from "./common";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { DatabaseAccount, Location } from "./documents";
import { LocationInfo } from "./LocationInfo";
@ -39,11 +39,11 @@ export class LocationCache {
public constructor(private options: CosmosClientOptions) {
this.defaultEndpoint = options.endpoint;
this.locationInfo = new LocationInfo(options.connectionPolicy.PreferredLocations, options.endpoint);
this.locationInfo = new LocationInfo(options.connectionPolicy.preferredLocations, options.endpoint);
}
public get prefferredLocations(): string[] {
return this.options.connectionPolicy.PreferredLocations;
return this.options.connectionPolicy.preferredLocations;
}
public getWriteEndpoint(): string {
@ -98,12 +98,14 @@ export class LocationCache {
);
}
public resolveServiceEndpoint(request: RequestContext): string {
public resolveServiceEndpoint(
request: Pick<RequestContext, "operationType" | "resourceType" | "retryCount" | "locationRouting">
): string {
request.locationRouting = request.locationRouting || new LocationRouting();
let locationIndex = request.locationRouting.locationIndexToRoute || 0;
if (!this.options.connectionPolicy.EnableEndpointDiscovery) {
if (!this.options.connectionPolicy.enableEndpointDiscovery) {
return this.defaultEndpoint;
}
@ -115,7 +117,8 @@ export class LocationCache {
// then default to the first two write locations, alternating (or the default endpoint)
if (
request.locationRouting.ignorePreferredLocation ||
(!Helper.isReadRequest(request) && !this.canUseMultipleWriteLocations(request))
(!isReadRequest(request.operationType) &&
!this.canUseMultipleWriteLocations(request.resourceType, request.operationType))
) {
const currentInfo = this.locationInfo;
if (currentInfo.orderedWriteLocations.length > 0) {
@ -127,7 +130,7 @@ export class LocationCache {
}
} else {
// If we're using preferred regions, then choose the correct endpoint based on the location index
const endpoints = Helper.isReadRequest(request)
const endpoints = isReadRequest(request.operationType)
? this.locationInfo.readEndpoints
: this.locationInfo.writeEndpoints;
return endpoints[locationIndex % endpoints.length];
@ -142,10 +145,10 @@ export class LocationCache {
currentInfo.preferredLocations ? currentInfo.preferredLocations[0] : null
);
if (this.options.connectionPolicy.EnableEndpointDiscovery) {
if (this.options.connectionPolicy.enableEndpointDiscovery) {
// Refresh if client opts-in to use multiple write locations, but it's not enabled on the server.
const shouldRefresh =
this.options.connectionPolicy.UseMultipleWriteLocations && !this.enableMultipleWritableLocations;
this.options.connectionPolicy.useMultipleWriteLocations && !this.enableMultipleWritableLocations;
if (mostPreferredLocation) {
if (currentInfo.availableReadEndpointByLocation.size > 0) {
@ -182,14 +185,14 @@ export class LocationCache {
return { shouldRefresh: false, canRefreshInBackground };
}
public canUseMultipleWriteLocations(request?: RequestContext): boolean {
let canUse = this.options.connectionPolicy.UseMultipleWriteLocations && this.enableMultipleWritableLocations;
public canUseMultipleWriteLocations(resourceType?: ResourceType, operationType?: OperationType): boolean {
let canUse = this.options.connectionPolicy.useMultipleWriteLocations && this.enableMultipleWritableLocations;
if (request) {
if (resourceType) {
canUse =
canUse &&
(request.resourceType === ResourceType.item ||
(request.resourceType === ResourceType.sproc && request.operationType === Constants.OperationTypes.Execute));
(resourceType === ResourceType.item ||
(resourceType === ResourceType.sproc && operationType === OperationType.Execute));
}
return canUse;
@ -257,7 +260,7 @@ export class LocationCache {
// TODO: To sstay consistent with .NET, grab a local copy of the locationInfo
if (this.options.connectionPolicy.EnableEndpointDiscovery) {
if (this.options.connectionPolicy.enableEndpointDiscovery) {
if (readLocations) {
({
endpointsByLocation: this.locationInfo.availableReadEndpointByLocation,
@ -298,11 +301,11 @@ export class LocationCache {
): string[] {
const endpoints = [];
if (this.options.connectionPolicy.EnableEndpointDiscovery && endpointsByLocation && endpointsByLocation.size > 0) {
if (this.options.connectionPolicy.enableEndpointDiscovery && endpointsByLocation && endpointsByLocation.size > 0) {
if (this.canUseMultipleWriteLocations() || expectedAvailableOperation === EndpointOperationType.Read) {
const unavailableEndpoints: string[] = [];
if (this.options.connectionPolicy.PreferredLocations) {
for (const location of this.options.connectionPolicy.PreferredLocations) {
if (this.options.connectionPolicy.preferredLocations) {
for (const location of this.options.connectionPolicy.preferredLocations) {
const endpoint = endpointsByLocation.get(LocationCache.normalizeLocationName(location));
if (endpoint) {
if (this.isEndpointUnavailable(endpoint, expectedAvailableOperation)) {

Просмотреть файл

@ -1,173 +1,123 @@
import createHmac from "create-hmac";
import { PermissionDefinition } from "./client";
import { Helper } from "./common";
import { IHeaders } from "./queryExecutionContext";
import { generateHeaders } from "@azure/cosmos-sign";
import { Constants, getResourceIdFromPath, HTTPMethod, ResourceType } from "./common";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { CosmosHeaders } from "./queryExecutionContext";
/** @hidden */
export interface IRequestInfo {
[index: string]: any;
verb: string;
export interface RequestInfo {
verb: HTTPMethod;
path: string;
resourceId: string;
resourceType: string;
headers: IHeaders;
resourceType: ResourceType;
headers: CosmosHeaders;
}
export interface ITokenProvider {
getToken: (requestInfo: IRequestInfo, callback?: (err: Error, token: string) => void) => Promise<string>;
}
export type TokenProvider = (requestInfo: RequestInfo) => Promise<string>;
export interface AuthOptions {
/** Account master key or read only key */
key?: string;
/** The authorization master key to use to create the client. */
masterKey?: string;
/** An object that contains resources tokens.
* Keys for the object are resource Ids and values are the resource tokens.
*/
resourceTokens?: { [resourcePath: string]: string };
tokenProvider?: any; // TODO: any
/** An array of {@link Permission} objects. */
permissionFeed?: PermissionDefinition[]; // TODO: any
}
/** @hidden */
export class AuthHandler {
public static async getAuthorizationHeader(
authOptions: AuthOptions,
verb: string,
path: string,
resourceId: string,
resourceType: string,
headers: IHeaders
): Promise<string> {
if (authOptions.permissionFeed) {
authOptions.resourceTokens = {};
for (const permission of authOptions.permissionFeed) {
const id = Helper.getResourceIdFromPath(permission.resource);
if (!id) {
throw new Error(`authorization error: ${id} \
/**
* @ignore
* @param clientOptions
* @param verb
* @param path
* @param resourceId
* @param resourceType
* @param headers
*/
export async function setAuthorizationHeader(
clientOptions: CosmosClientOptions,
verb: HTTPMethod,
path: string,
resourceId: string,
resourceType: ResourceType,
headers: CosmosHeaders
): Promise<void> {
if (clientOptions.permissionFeed) {
clientOptions.resourceTokens = {};
for (const permission of clientOptions.permissionFeed) {
const id = getResourceIdFromPath(permission.resource);
if (!id) {
throw new Error(`authorization error: ${id} \
is an invalid resourceId in permissionFeed`);
}
authOptions.resourceTokens[id] = (permission as any)._token; // TODO: any
}
}
if (authOptions.masterKey || authOptions.key) {
const key = authOptions.masterKey || authOptions.key;
return encodeURIComponent(
AuthHandler.getAuthorizationTokenUsingMasterKey(verb, resourceId, resourceType, headers, key)
);
} else if (authOptions.resourceTokens) {
return encodeURIComponent(
AuthHandler.getAuthorizationTokenUsingResourceTokens(authOptions.resourceTokens, path, resourceId)
);
} else if (authOptions.tokenProvider) {
return encodeURIComponent(
await AuthHandler.getAuthorizationTokenUsingTokenProvider(authOptions.tokenProvider, {
verb,
path,
resourceId,
resourceType,
headers
})
);
clientOptions.resourceTokens[id] = (permission as any)._token; // TODO: any
}
}
private static getAuthorizationTokenUsingMasterKey(
verb: string,
resourceId: string,
resourceType: string,
headers: IHeaders,
masterKey: string
) {
if (resourceType === "offers") {
resourceId = resourceId && resourceId.toLowerCase();
}
const key = Buffer.from(masterKey, "base64");
const text =
(verb || "").toLowerCase() +
"\n" +
(resourceType || "").toLowerCase() +
"\n" +
(resourceId || "") +
"\n" +
((headers["x-ms-date"] as string) || "").toLowerCase() +
"\n" +
((headers["date"] as string) || "").toLowerCase() +
"\n";
const body = Buffer.from(text, "utf8");
const signature = createHmac("sha256", key)
.update(body)
.digest("base64");
const MasterToken = "master";
const TokenVersion = "1.0";
return `type=${MasterToken}&ver=${TokenVersion}&sig=${signature}`;
}
// TODO: Resource tokens
private static getAuthorizationTokenUsingResourceTokens(
resourceTokens: { [resourceId: string]: string },
path: string,
resourceId: string
) {
if (resourceTokens && Object.keys(resourceTokens).length > 0) {
// For database account access(through getDatabaseAccount API), path and resourceId are "",
// so in this case we return the first token to be used for creating the auth header as the
// service will accept any token in this case
if (!path && !resourceId) {
return resourceTokens[Object.keys(resourceTokens)[0]];
}
if (resourceId && resourceTokens[resourceId]) {
return resourceTokens[resourceId];
}
// minimum valid path /dbs
if (!path || path.length < 4) {
return null;
}
// remove '/' from left and right of path
path = path[0] === "/" ? path.substring(1) : path;
path = path[path.length - 1] === "/" ? path.substring(0, path.length - 1) : path;
const pathSegments = (path && path.split("/")) || [];
// if it's an incomplete path like /dbs/db1/colls/, start from the paretn resource
let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
for (; index > 0; index -= 2) {
const id = decodeURI(pathSegments[index]);
if (resourceTokens[id]) {
return resourceTokens[id];
}
}
}
return null;
}
private static getAuthorizationTokenUsingTokenProvider(
tokenProvider: ITokenProvider,
requestInfo: IRequestInfo
): Promise<string> {
requestInfo.getAuthorizationTokenUsingMasterKey = AuthHandler.getAuthorizationTokenUsingMasterKey;
return new Promise(async (resolve, reject) => {
const callback = (err: Error, token: string) => {
if (reject) {
return reject(err);
}
resolve(token);
};
const results = tokenProvider.getToken(requestInfo, callback);
if (results.then && typeof results.then === "function") {
resolve(await results);
}
});
if (clientOptions.key) {
setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, clientOptions.key);
} else if (clientOptions.resourceTokens) {
headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(
getAuthorizationTokenUsingResourceTokens(clientOptions.resourceTokens, path, resourceId)
);
} else if (clientOptions.tokenProvider) {
headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(
await clientOptions.tokenProvider({ verb, path, resourceId, resourceType, headers })
);
}
}
/**
* The default function for setting header token using the masterKey
* @ignore
*/
export function setAuthorizationTokenHeaderUsingMasterKey(
verb: HTTPMethod,
resourceId: string,
resourceType: ResourceType,
headers: CosmosHeaders,
masterKey: string
) {
// TODO This should live in cosmos-sign
if (resourceType === ResourceType.offer) {
resourceId = resourceId && resourceId.toLowerCase();
}
headers = Object.assign(headers, generateHeaders(masterKey, verb, resourceType, resourceId));
}
/**
* @ignore
* @param resourceTokens
* @param path
* @param resourceId
*/
// TODO: Resource tokens
function getAuthorizationTokenUsingResourceTokens(
resourceTokens: { [resourceId: string]: string },
path: string,
resourceId: string
) {
if (resourceTokens && Object.keys(resourceTokens).length > 0) {
// For database account access(through getDatabaseAccount API), path and resourceId are "",
// so in this case we return the first token to be used for creating the auth header as the
// service will accept any token in this case
if (!path && !resourceId) {
return resourceTokens[Object.keys(resourceTokens)[0]];
}
if (resourceId && resourceTokens[resourceId]) {
return resourceTokens[resourceId];
}
// minimum valid path /dbs
if (!path || path.length < 4) {
return null;
}
// remove '/' from left and right of path
path = path[0] === "/" ? path.substring(1) : path;
path = path[path.length - 1] === "/" ? path.substring(0, path.length - 1) : path;
const pathSegments = (path && path.split("/")) || [];
// if it's an incomplete path like /dbs/db1/colls/, start from the paretn resource
let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
for (; index > 0; index -= 2) {
const id = decodeURI(pathSegments[index]);
if (resourceTokens[id]) {
return resourceTokens[id];
}
}
}
return null;
}

Просмотреть файл

@ -1,5 +1,5 @@
import { ClientContext } from "../../ClientContext";
import { Constants, Helper } from "../../common";
import { Constants, getIdFromLink, getPathFromLink, ResourceType } from "../../common";
import { RequestOptions } from "../../request";
import { Container } from "../Container";
import { ConflictDefinition } from "./ConflictDefinition";
@ -33,11 +33,16 @@ export class Conflict {
* @param options
*/
public async read(options?: RequestOptions): Promise<ConflictResponse> {
const path = Helper.getPathFromLink(this.url, "conflicts");
const id = Helper.getIdFromLink(this.url);
const path = getPathFromLink(this.url, ResourceType.conflicts);
const id = getIdFromLink(this.url);
const response = await this.clientContext.read<ConflictDefinition>(path, "users", id, undefined, options);
return { body: response.result, headers: response.headers, ref: this, conflict: this };
const response = await this.clientContext.read<ConflictDefinition>({
path,
resourceType: ResourceType.user,
resourceId: id,
options
});
return new ConflictResponse(response.result, response.headers, response.code, this);
}
/**
@ -45,10 +50,15 @@ export class Conflict {
* @param options
*/
public async delete(options?: RequestOptions): Promise<ConflictResponse> {
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.delete<ConflictDefinition>(path, "conflicts", id, undefined, options);
return { body: response.result, headers: response.headers, ref: this, conflict: this };
const response = await this.clientContext.delete<ConflictDefinition>({
path,
resourceType: ResourceType.conflicts,
resourceId: id,
options
});
return new ConflictResponse(response.result, response.headers, response.code, this);
}
}

Просмотреть файл

@ -1,11 +1,11 @@
import { ItemDefinition } from "../Item";
import { OperationType, ResourceType } from "../../common";
export interface ConflictDefinition {
/** The id of the conflict */
id?: string;
/** Source resource id */
resourceId?: string;
resourceType?: string;
operationType?: string; // TODO: enum
resourceType?: ResourceType;
operationType?: OperationType;
content?: string;
}

Просмотреть файл

@ -1,9 +1,14 @@
import { CosmosResponse } from "../../request";
import { CosmosHeaders } from "../../queryExecutionContext";
import { ResourceResponse } from "../../request";
import { Resource } from "../Resource";
import { Conflict } from "./Conflict";
import { ConflictDefinition } from "./ConflictDefinition";
export interface ConflictResponse extends CosmosResponse<ConflictDefinition & Resource, Conflict> {
export class ConflictResponse extends ResourceResponse<ConflictDefinition & Resource> {
constructor(resource: ConflictDefinition & Resource, headers: CosmosHeaders, statusCode: number, conflict: Conflict) {
super(resource, headers, statusCode);
this.conflict = conflict;
}
/** A reference to the {@link Conflict} corresponding to the returned {@link ConflictDefinition}. */
conflict: Conflict;
public readonly conflict: Conflict;
}

Просмотреть файл

@ -1,5 +1,5 @@
import { ClientContext } from "../../ClientContext";
import { Helper } from "../../common";
import { getIdFromLink, getPathFromLink, ResourceType } from "../../common";
import { SqlQuerySpec } from "../../queryExecutionContext";
import { QueryIterator } from "../../queryIterator";
import { FeedOptions } from "../../request";
@ -21,20 +21,27 @@ export class Conflicts {
* @param options Use to set options like response page size, continuation tokens, etc.
* @returns {@link QueryIterator} Allows you to return results in an array or iterate over them one at a time.
*/
public query(query: SqlQuerySpec, options?: FeedOptions): QueryIterator<any>;
public query(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<any>;
/**
* Queries all conflicts.
* @param query Query configuration for the operation. See {@link SqlQuerySpec} for more info on how to configure a query.
* @param options Use to set options like response page size, continuation tokens, etc.
* @returns {@link QueryIterator} Allows you to return results in an array or iterate over them one at a time.
*/
public query<T>(query: SqlQuerySpec, options?: FeedOptions): QueryIterator<T>;
public query<T>(query: SqlQuerySpec, options?: FeedOptions): QueryIterator<T> {
const path = Helper.getPathFromLink(this.container.url, "conflicts");
const id = Helper.getIdFromLink(this.container.url);
public query<T>(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<T>;
public query<T>(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<T> {
const path = getPathFromLink(this.container.url, ResourceType.conflicts);
const id = getIdFromLink(this.container.url);
return new QueryIterator(this.clientContext, query, options, innerOptions => {
return this.clientContext.queryFeed(path, "conflicts", id, result => result.Conflicts, query, innerOptions);
return this.clientContext.queryFeed({
path,
resourceType: ResourceType.conflicts,
resourceId: id,
resultFn: result => result.Conflicts,
query,
options: innerOptions
});
});
}

Просмотреть файл

@ -1,16 +1,23 @@
import { PartitionKey } from "../..";
import { ClientContext } from "../../ClientContext";
import { Helper, UriFactory } from "../../common";
import {
createDocumentCollectionUri,
getIdFromLink,
getPathFromLink,
isResourceValid,
ResourceType
} from "../../common";
import { PartitionKeyDefinition } from "../../documents";
import { CosmosResponse, FeedOptions, RequestOptions } from "../../request";
import { SqlQuerySpec } from "../../queryExecutionContext";
import { QueryIterator } from "../../queryIterator";
import { FeedOptions, RequestOptions, ResourceResponse, Response } from "../../request";
import { PartitionedQueryExecutionInfo } from "../../request/ErrorResponse";
import { Conflict, Conflicts } from "../Conflict";
import { Database } from "../Database";
import { Item, Items } from "../Item";
import { StoredProcedure, StoredProcedures } from "../StoredProcedure";
import { Trigger, Triggers } from "../Trigger";
import { UserDefinedFunction, UserDefinedFunctions } from "../UserDefinedFunction";
import { Scripts } from "../Script/Scripts";
import { ContainerDefinition } from "./ContainerDefinition";
import { ContainerResponse } from "./ContainerResponse";
import { PartitionKeyRange } from "./PartitionKeyRange";
/**
* Operations for reading, replacing, or deleting a specific, existing container by id.
@ -23,6 +30,7 @@ import { ContainerResponse } from "./ContainerResponse";
* do this once on application start up.
*/
export class Container {
private $items: Items;
/**
* Operations for creating new items, and reading/querying all items
*
@ -33,33 +41,42 @@ export class Container {
* const {body: createdItem} = await container.items.create({id: "<item id>", properties: {}});
* ```
*/
public readonly items: Items;
/**
* Operations for creating new stored procedures, and reading/querying all stored procedures.
*
* For reading, replacing, or deleting an existing stored procedure, use `.storedProcedure(id)`.
*/
public readonly storedProcedures: StoredProcedures;
/**
* Operations for creating new triggers, and reading/querying all triggers.
*
* For reading, replacing, or deleting an existing trigger, use `.trigger(id)`.
*/
public readonly triggers: Triggers;
/**
* Operations for creating new user defined functions, and reading/querying all user defined functions.
*
* For reading, replacing, or deleting an existing user defined function, use `.userDefinedFunction(id)`.
*/
public readonly userDefinedFunctions: UserDefinedFunctions;
public get items(): Items {
if (!this.$items) {
this.$items = new Items(this, this.clientContext);
}
return this.$items;
}
public readonly conflicts: Conflicts;
private $scripts: Scripts;
/**
* All operations for Stored Procedures, Triggers, and User Defined Functions
*/
public get scripts(): Scripts {
if (!this.$scripts) {
this.$scripts = new Scripts(this, this.clientContext);
}
return this.$scripts;
}
private $conflicts: Conflicts;
/**
* Opertaions for reading and querying conflicts for the given container.
*
* For reading or deleting a specific conflict, use `.conflict(id)`.
*/
public get conflicts(): Conflicts {
if (!this.$conflicts) {
this.$conflicts = new Conflicts(this, this.clientContext);
}
return this.$conflicts;
}
/**
* Returns a reference URL to the resource. Used for linking in Permissions.
*/
public get url() {
return UriFactory.createDocumentCollectionUri(this.database.id, this.id);
return createDocumentCollectionUri(this.database.id, this.id);
}
/**
@ -72,13 +89,7 @@ export class Container {
public readonly database: Database,
public readonly id: string,
private readonly clientContext: ClientContext
) {
this.items = new Items(this, this.clientContext);
this.storedProcedures = new StoredProcedures(this, this.clientContext);
this.triggers = new Triggers(this, this.clientContext);
this.userDefinedFunctions = new UserDefinedFunctions(this, this.clientContext);
this.conflicts = new Conflicts(this, this.clientContext);
}
) {}
/**
* Used to read, replace, or delete a specific, existing {@link Item} by id.
@ -86,24 +97,14 @@ export class Container {
* Use `.items` for creating new items, or querying/reading all items.
*
* @param id The id of the {@link Item}.
* @param partitionKey The partition key of the {@link Item}. (Required for partitioned containers).
* @param partitionKey The partition key of the {@link Item}
* @example Replace an item
* const {body: replacedItem} = await container.item("<item id>").replace({id: "<item id>", title: "Updated post", authorID: 5});
*/
public item(id: string, partitionKey?: string): Item {
public item(id: string, partitionKey: any): Item {
return new Item(this, id, partitionKey, this.clientContext);
}
/**
* Used to read, replace, or delete a specific, existing {@link UserDefinedFunction} by id.
*
* Use `.userDefinedFunctions` for creating new user defined functions, or querying/reading all user defined functions.
* @param id The id of the {@link UserDefinedFunction}.
*/
public userDefinedFunction(id: string): UserDefinedFunction {
return new UserDefinedFunction(this, id, this.clientContext);
}
/**
* Used to read, replace, or delete a specific, existing {@link Conflict} by id.
*
@ -114,72 +115,53 @@ export class Container {
return new Conflict(this, id, this.clientContext);
}
/**
* Used to read, replace, or delete a specific, existing {@link StoredProcedure} by id.
*
* Use `.storedProcedures` for creating new stored procedures, or querying/reading all stored procedures.
* @param id The id of the {@link StoredProcedure}.
*/
public storedProcedure(id: string): StoredProcedure {
return new StoredProcedure(this, id, this.clientContext);
}
/**
* Used to read, replace, or delete a specific, existing {@link Trigger} by id.
*
* Use `.triggers` for creating new triggers, or querying/reading all triggers.
* @param id The id of the {@link Trigger}.
*/
public trigger(id: string): Trigger {
return new Trigger(this, id, this.clientContext);
}
/** Read the container's definition */
public async read(options?: RequestOptions): Promise<ContainerResponse> {
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.read<ContainerDefinition>(path, "colls", id, undefined, options);
const response = await this.clientContext.read<ContainerDefinition>({
path,
resourceType: ResourceType.container,
resourceId: id,
options
});
this.clientContext.partitionKeyDefinitionCache[this.url] = response.result.partitionKey;
return {
body: response.result,
headers: response.headers,
ref: this,
container: this
};
return new ContainerResponse(response.result, response.headers, response.code, this);
}
/** Replace the container's definition */
public async replace(body: ContainerDefinition, options?: RequestOptions): Promise<ContainerResponse> {
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.replace<ContainerDefinition>(body, path, "colls", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
container: this
};
const response = await this.clientContext.replace<ContainerDefinition>({
body,
path,
resourceType: ResourceType.container,
resourceId: id,
options
});
return new ContainerResponse(response.result, response.headers, response.code, this);
}
/** Delete the container */
public async delete(options?: RequestOptions): Promise<ContainerResponse> {
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.delete<ContainerDefinition>(path, "colls", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
container: this
};
const response = await this.clientContext.delete<ContainerDefinition>({
path,
resourceType: ResourceType.container,
resourceId: id,
options
});
return new ContainerResponse(response.result, response.headers, response.code, this);
}
/**
@ -189,53 +171,32 @@ export class Container {
* @param {function} callback - \
* The arguments to the callback are(in order): error, partitionKeyDefinition, response object and response headers
*/
public async getPartitionKeyDefinition(): Promise<CosmosResponse<PartitionKeyDefinition, Container>> {
public async getPartitionKeyDefinition(): Promise<ResourceResponse<PartitionKeyDefinition>> {
// $ISSUE-felixfan-2016-03-17: Make name based path and link based path use the same key
// $ISSUE-felixfan-2016-03-17: Refresh partitionKeyDefinitionCache when necessary
if (this.url in this.clientContext.partitionKeyDefinitionCache) {
return {
body: this.clientContext.partitionKeyDefinitionCache[this.url],
ref: this
};
return new ResourceResponse<PartitionKeyDefinition>(
this.clientContext.partitionKeyDefinitionCache[this.url],
{},
0
);
}
const { headers } = await this.read();
return {
body: this.clientContext.partitionKeyDefinitionCache[this.url],
const { headers, statusCode } = await this.read();
return new ResourceResponse<PartitionKeyDefinition>(
this.clientContext.partitionKeyDefinitionCache[this.url],
headers,
ref: this
};
statusCode
);
}
public readPartitionKeyRanges(feedOptions?: FeedOptions) {
public async getQueryPlan(query: string | SqlQuerySpec): Promise<Response<PartitionedQueryExecutionInfo>> {
const path = getPathFromLink(this.url);
return this.clientContext.getQueryPlan(path + "/docs", ResourceType.item, getIdFromLink(this.url), query);
}
public readPartitionKeyRanges(feedOptions?: FeedOptions): QueryIterator<PartitionKeyRange> {
feedOptions = feedOptions || {};
return this.clientContext.queryPartitionKeyRanges(this.url, undefined, feedOptions);
}
// TODO: The ParitionKey type is REALLY weird. Now that it's being exported, we should clean it up.
public extractPartitionKey(document: any, partitionKeyDefinition: PartitionKeyDefinition): PartitionKey[] {
// TODO: any
if (partitionKeyDefinition && partitionKeyDefinition.paths && partitionKeyDefinition.paths.length > 0) {
const partitionKey: PartitionKey[] = [];
partitionKeyDefinition.paths.forEach((path: string) => {
const pathParts = Helper.parsePath(path);
let obj = document;
for (const part of pathParts) {
if (!(typeof obj === "object" && part in obj)) {
obj = {};
break;
}
obj = obj[part];
}
partitionKey.push(obj);
});
return partitionKey;
}
return undefined;
}
}

Просмотреть файл

@ -1,4 +1,4 @@
import { IndexingPolicy, PartitionKey, PartitionKeyDefinition } from "../../documents";
import { IndexingPolicy, PartitionKeyDefinition } from "../../documents";
import { ConflictResolutionPolicy } from "../Conflict/ConflictResolutionPolicy";
import { UniqueKeyPolicy } from "./UniqueKeyPolicy";

Просмотреть файл

@ -0,0 +1,6 @@
import { ContainerDefinition } from "./ContainerDefinition";
export interface ContainerRequest extends ContainerDefinition {
/** Throughput for this container. */
throughput?: number;
}

Просмотреть файл

@ -1,10 +1,20 @@
import { Container } from ".";
import { CosmosResponse } from "../../request/CosmosResponse";
import { CosmosHeaders } from "../../queryExecutionContext";
import { ResourceResponse } from "../../request/ResourceResponse";
import { Resource } from "../Resource";
import { ContainerDefinition } from "./ContainerDefinition";
import { Container } from "./index";
/** Response object for Container operations */
export interface ContainerResponse extends CosmosResponse<ContainerDefinition & Resource, Container> {
export class ContainerResponse extends ResourceResponse<ContainerDefinition & Resource> {
constructor(
resource: ContainerDefinition & Resource,
headers: CosmosHeaders,
statusCode: number,
container: Container
) {
super(resource, headers, statusCode);
this.container = container;
}
/** A reference to the {@link Container} that the returned {@link ContainerDefinition} corresponds to. */
container: Container;
public readonly container: Container;
}

Просмотреть файл

@ -1,12 +1,14 @@
import { ClientContext } from "../../ClientContext";
import { Helper, StatusCodes } from "../../common";
import { HeaderUtils, SqlQuerySpec } from "../../queryExecutionContext";
import { Constants, getIdFromLink, getPathFromLink, isResourceValid, ResourceType, StatusCodes } from "../../common";
import { DEFAULT_PARTITION_KEY_PATH } from "../../common/partitionKeys";
import { mergeHeaders, SqlQuerySpec } from "../../queryExecutionContext";
import { QueryIterator } from "../../queryIterator";
import { FeedOptions, RequestOptions } from "../../request";
import { Database } from "../Database";
import { Resource } from "../Resource";
import { Container } from "./Container";
import { ContainerDefinition } from "./ContainerDefinition";
import { ContainerRequest } from "./ContainerRequest";
import { ContainerResponse } from "./ContainerResponse";
/**
@ -35,7 +37,7 @@ export class Containers {
* {name: "@container", value: "Todo"}
* ]
* };
* const {body: containerList} = await client.database("<db id>").containers.query(querySpec).toArray();
* const {body: containerList} = await client.database("<db id>").containers.query(querySpec).fetchAll();
* ```
*/
public query(query: SqlQuerySpec, options?: FeedOptions): QueryIterator<any>;
@ -52,23 +54,23 @@ export class Containers {
* {name: "@container", value: "Todo"}
* ]
* };
* const {body: containerList} = await client.database("<db id>").containers.query(querySpec).toArray();
* const {body: containerList} = await client.database("<db id>").containers.query(querySpec).fetchAll();
* ```
*/
public query<T>(query: SqlQuerySpec, options?: FeedOptions): QueryIterator<T>;
public query<T>(query: SqlQuerySpec, options?: FeedOptions): QueryIterator<T> {
const path = Helper.getPathFromLink(this.database.url, "colls");
const id = Helper.getIdFromLink(this.database.url);
const path = getPathFromLink(this.database.url, ResourceType.container);
const id = getIdFromLink(this.database.url);
return new QueryIterator(this.clientContext, query, options, innerOptions => {
return this.clientContext.queryFeed<ContainerDefinition>(
return this.clientContext.queryFeed<ContainerDefinition>({
path,
"colls",
id,
result => result.DocumentCollections,
resourceType: ResourceType.container,
resourceId: id,
resultFn: result => result.DocumentCollections,
query,
innerOptions
);
options: innerOptions
});
});
}
@ -89,22 +91,37 @@ export class Containers {
* @param body Represents the body of the container.
* @param options Use to set options like response page size, continuation tokens, etc.
*/
public async create(body: ContainerDefinition, options?: RequestOptions): Promise<ContainerResponse> {
public async create(body: ContainerRequest, options: RequestOptions = {}): Promise<ContainerResponse> {
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
const path = Helper.getPathFromLink(this.database.url, "colls");
const id = Helper.getIdFromLink(this.database.url);
const path = getPathFromLink(this.database.url, ResourceType.container);
const id = getIdFromLink(this.database.url);
const response = await this.clientContext.create<ContainerDefinition>(body, path, "colls", id, undefined, options);
if (body.throughput) {
options.initialHeaders = Object.assign({}, options.initialHeaders, {
[Constants.HttpHeaders.OfferThroughput]: body.throughput
});
delete body.throughput;
}
// If they don't specify a partition key, use the default path
if (!body.partitionKey || !body.partitionKey.paths) {
body.partitionKey = {
paths: [DEFAULT_PARTITION_KEY_PATH]
};
}
const response = await this.clientContext.create<ContainerRequest>({
body,
path,
resourceType: ResourceType.container,
resourceId: id,
options
});
const ref = new Container(this.database, response.result.id, this.clientContext);
return {
body: response.result,
headers: response.headers,
ref,
container: ref
};
return new ContainerResponse(response.result, response.headers, response.code, ref);
}
/**
@ -126,7 +143,7 @@ export class Containers {
* @param body Represents the body of the container.
* @param options Use to set options like response page size, continuation tokens, etc.
*/
public async createIfNotExists(body: ContainerDefinition, options?: RequestOptions): Promise<ContainerResponse> {
public async createIfNotExists(body: ContainerRequest, options?: RequestOptions): Promise<ContainerResponse> {
if (!body || body.id === null || body.id === undefined) {
throw new Error("body parameter must be an object with an id property");
}
@ -141,7 +158,7 @@ export class Containers {
if (err.code === StatusCodes.NotFound) {
const createResponse = await this.create(body, options);
// Must merge the headers to capture RU costskaty
HeaderUtils.mergeHeaders(createResponse.headers, err.headers);
mergeHeaders(createResponse.headers, err.headers);
return createResponse;
} else {
throw err;
@ -155,7 +172,7 @@ export class Containers {
* @returns {@link QueryIterator} Allows you to return all containers in an array or iterate over them one at a time.
* @example Read all containers to array.
* ```typescript
* const {body: containerList} = await client.database("<db id>").containers.readAll().toArray();
* const {body: containerList} = await client.database("<db id>").containers.readAll().fetchAll();
* ```
*/
public readAll(options?: FeedOptions): QueryIterator<ContainerDefinition & Resource> {

Просмотреть файл

@ -1,3 +1,6 @@
/**
* @ignore
*/
export interface PartitionKeyRange {
id: string;
minInclusive: string;

Просмотреть файл

@ -1,5 +1,5 @@
import { ClientContext } from "../../ClientContext";
import { Helper, UriFactory } from "../../common";
import { createDatabaseUri, getIdFromLink, getPathFromLink, ResourceType } from "../../common";
import { CosmosClient } from "../../CosmosClient";
import { RequestOptions } from "../../request";
import { Container, Containers } from "../Container";
@ -40,7 +40,7 @@ export class Database {
* Returns a reference URL to the resource. Used for linking in Permissions.
*/
public get url() {
return UriFactory.createDatabaseUri(this.id);
return createDatabaseUri(this.id);
}
/** Returns a new {@link Database} instance.
@ -77,27 +77,27 @@ export class Database {
/** Read the definition of the given Database. */
public async read(options?: RequestOptions): Promise<DatabaseResponse> {
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const response = await this.clientContext.read<DatabaseDefinition>(path, "dbs", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
database: this
};
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.read<DatabaseDefinition>({
path,
resourceType: ResourceType.database,
resourceId: id,
options
});
return new DatabaseResponse(response.result, response.headers, response.code, this);
}
/** Delete the given Database. */
public async delete(options?: RequestOptions): Promise<DatabaseResponse> {
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const response = await this.clientContext.delete<DatabaseDefinition>(path, "dbs", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
database: this
};
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.delete<DatabaseDefinition>({
path,
resourceType: ResourceType.database,
resourceId: id,
options
});
return new DatabaseResponse(response.result, response.headers, response.code, this);
}
}

Просмотреть файл

@ -0,0 +1,6 @@
import { DatabaseDefinition } from "./DatabaseDefinition";
export interface DatabaseRequest extends DatabaseDefinition {
/** Throughput for this database. */
throughput?: number;
}

Просмотреть файл

@ -1,10 +1,15 @@
import { CosmosResponse } from "../../request/CosmosResponse";
import { CosmosHeaders } from "../../queryExecutionContext";
import { ResourceResponse } from "../../request/ResourceResponse";
import { Resource } from "../Resource";
import { Database } from "./Database";
import { DatabaseDefinition } from "./DatabaseDefinition";
/** Response object for Database operations */
export interface DatabaseResponse extends CosmosResponse<DatabaseDefinition & Resource, Database> {
export class DatabaseResponse extends ResourceResponse<DatabaseDefinition & Resource> {
constructor(resource: DatabaseDefinition & Resource, headers: CosmosHeaders, statusCode: number, database: Database) {
super(resource, headers, statusCode);
this.database = database;
}
/** A reference to the {@link Database} that the returned {@link DatabaseDefinition} corresponds to. */
database: Database;
public readonly database: Database;
}

Просмотреть файл

@ -1,12 +1,13 @@
import { ClientContext } from "../../ClientContext";
import { Helper, StatusCodes } from "../../common";
import { Constants, isResourceValid, ResourceType, StatusCodes } from "../../common";
import { CosmosClient } from "../../CosmosClient";
import { FetchFunctionCallback, HeaderUtils, SqlQuerySpec } from "../../queryExecutionContext";
import { FetchFunctionCallback, mergeHeaders, SqlQuerySpec } from "../../queryExecutionContext";
import { QueryIterator } from "../../queryIterator";
import { FeedOptions, RequestOptions } from "../../request";
import { Resource } from "../Resource";
import { Database } from "./Database";
import { DatabaseDefinition } from "./DatabaseDefinition";
import { DatabaseRequest } from "./DatabaseRequest";
import { DatabaseResponse } from "./DatabaseResponse";
/**
@ -39,7 +40,7 @@ export class Databases {
* {name: "@db", value: "Todo"}
* ]
* };
* const {body: databaseList} = await client.databases.query(querySpec).toArray();
* const {body: databaseList} = await client.databases.query(querySpec).fetchAll();
* ```
*/
public query(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<any>;
@ -56,13 +57,20 @@ export class Databases {
* {name: "@db", value: "Todo"}
* ]
* };
* const {body: databaseList} = await client.databases.query(querySpec).toArray();
* const {body: databaseList} = await client.databases.query(querySpec).fetchAll();
* ```
*/
public query<T>(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<T>;
public query<T>(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<T> {
const cb: FetchFunctionCallback = innerOptions => {
return this.clientContext.queryFeed("/dbs", "dbs", "", result => result.Databases, query, innerOptions);
return this.clientContext.queryFeed({
path: "/dbs",
resourceType: ResourceType.database,
resourceId: "",
resultFn: result => result.Databases,
query,
options: innerOptions
});
};
return new QueryIterator(this.clientContext, query, options, cb);
}
@ -81,28 +89,29 @@ export class Databases {
* @param body The {@link DatabaseDefinition} that represents the {@link Database} to be created.
* @param options Use to set options like response page size, continuation tokens, etc.
*/
public async create(body: DatabaseDefinition, options?: RequestOptions): Promise<DatabaseResponse> {
public async create(body: DatabaseRequest, options: RequestOptions = {}): Promise<DatabaseResponse> {
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
if (body.throughput) {
options.initialHeaders = Object.assign({}, options.initialHeaders, {
[Constants.HttpHeaders.OfferThroughput]: body.throughput
});
delete body.throughput;
}
const path = "/dbs"; // TODO: constant
const response = await this.clientContext.create<DatabaseDefinition>(
const response = await this.clientContext.create<DatabaseRequest>({
body,
path,
"dbs",
undefined,
undefined,
resourceType: ResourceType.database,
resourceId: undefined,
options
);
});
const ref = new Database(this.client, body.id, this.clientContext);
return {
body: response.result,
headers: response.headers,
ref,
database: ref
};
return new DatabaseResponse(response.result, response.headers, response.code, ref);
}
/**
@ -120,7 +129,7 @@ export class Databases {
* @param body The {@link DatabaseDefinition} that represents the {@link Database} to be created.
* @param options
*/
public async createIfNotExists(body: DatabaseDefinition, options?: RequestOptions): Promise<DatabaseResponse> {
public async createIfNotExists(body: DatabaseRequest, options?: RequestOptions): Promise<DatabaseResponse> {
if (!body || body.id === null || body.id === undefined) {
throw new Error("body parameter must be an object with an id property");
}
@ -135,7 +144,7 @@ export class Databases {
if (err.code === StatusCodes.NotFound) {
const createResponse = await this.create(body, options);
// Must merge the headers to capture RU costskaty
HeaderUtils.mergeHeaders(createResponse.headers, err.headers);
mergeHeaders(createResponse.headers, err.headers);
return createResponse;
} else {
throw err;
@ -150,7 +159,7 @@ export class Databases {
* @returns {@link QueryIterator} Allows you to return all databases in an array or iterate over them one at a time.
* @example Read all databases to array.
* ```typescript
* const {body: databaseList} = await client.databases.readAll().toArray();
* const {body: databaseList} = await client.databases.readAll().fetchAll();
* ```
*/
public readAll(options?: FeedOptions): QueryIterator<DatabaseDefinition & Resource> {

Просмотреть файл

@ -1,6 +1,15 @@
import { ClientContext } from "../../ClientContext";
import { Helper, UriFactory } from "../../common";
import { RequestOptions } from "../../request";
import {
createDocumentUri,
getIdFromLink,
getPathFromLink,
isResourceValid,
ResourceType,
StatusCodes
} from "../../common";
import { PartitionKey } from "../../documents";
import { extractPartitionKey, undefinedPartitionKey } from "../../extractPartitionKey";
import { RequestOptions, Response } from "../../request";
import { Container } from "../Container";
import { Resource } from "../Resource";
import { ItemDefinition } from "./ItemDefinition";
@ -12,35 +21,29 @@ import { ItemResponse } from "./ItemResponse";
* @see {@link Items} for operations on all items; see `container.items`.
*/
export class Item {
private partitionKey: PartitionKey;
/**
* Returns a reference URL to the resource. Used for linking in Permissions.
*/
public get url() {
return UriFactory.createDocumentUri(this.container.database.id, this.container.id, this.id);
return createDocumentUri(this.container.database.id, this.container.id, this.id);
}
/**
* @hidden
* @param container The parent {@link Container}.
* @param id The id of the given {@link Item}.
* @param primaryKey The primary key of the given {@link Item} (only for partitioned containers).
* @param partitionKey The primary key of the given {@link Item} (only for partitioned containers).
*/
constructor(
public readonly container: Container,
public readonly id: string,
public readonly primaryKey: string,
partitionKey: PartitionKey,
private readonly clientContext: ClientContext
) {}
) {
this.partitionKey = partitionKey;
}
/**
* Read the item's definition.
*
* There is no set schema for JSON items. They may contain any number of custom properties.
*
* @param options Additional options for the request, such as the partition key.
* Note, if you provide a partition key on the options object, it will override the primary key on `this.primaryKey`.
*/
public read(options?: RequestOptions): Promise<ItemResponse<ItemDefinition>>;
/**
* Read the item's definition.
*
@ -52,7 +55,7 @@ export class Item {
* There is no set schema for JSON items. They may contain any number of custom properties.
*
* @param options Additional options for the request, such as the partition key.
* Note, if you provide a partition key on the options object, it will override the primary key on `this.primaryKey`.
* Note, if you provide a partition key on the options object, it will override the primary key on `this.partitionKey`.
*
* @example Using custom type for response
* ```typescript
@ -66,22 +69,30 @@ export class Item {
* ({body: item} = await item.read<TodoItem>());
* ```
*/
public read<T extends ItemDefinition>(options?: RequestOptions): Promise<ItemResponse<T>>;
public async read<T extends ItemDefinition>(options?: RequestOptions): Promise<ItemResponse<T>> {
options = options || {};
if ((!options || !options.partitionKey) && this.primaryKey) {
options.partitionKey = this.primaryKey;
public async read<T extends ItemDefinition = any>(options: RequestOptions = {}): Promise<ItemResponse<T>> {
if (this.partitionKey === undefined) {
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
this.partitionKey = undefinedPartitionKey(partitionKeyDefinition);
}
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
let response: Response<T & Resource>;
try {
response = await this.clientContext.read<T>({
path,
resourceType: ResourceType.item,
resourceId: id,
options,
partitionKey: this.partitionKey
});
} catch (error) {
if (error.code !== StatusCodes.NotFound) {
throw error;
}
response = error;
}
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const response = await this.clientContext.read<T>(path, "docs", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
item: this
};
return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
}
/**
@ -105,38 +116,31 @@ export class Item {
* @param options Additional options for the request, such as the partition key.
*/
public replace<T extends ItemDefinition>(body: T, options?: RequestOptions): Promise<ItemResponse<T>>;
public async replace<T extends ItemDefinition>(body: T, options?: RequestOptions): Promise<ItemResponse<T>> {
options = options || {};
if ((!options || !options.partitionKey) && this.primaryKey) {
options.partitionKey = this.primaryKey;
}
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
const { body: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
options.partitionKey = this.container.extractPartitionKey(body, partitionKeyDefinition);
public async replace<T extends ItemDefinition>(body: T, options: RequestOptions = {}): Promise<ItemResponse<T>> {
if (this.partitionKey === undefined) {
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
this.partitionKey = extractPartitionKey(body, partitionKeyDefinition);
}
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.replace<T>(body, path, "docs", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
item: this
};
const response = await this.clientContext.replace<T>({
body,
path,
resourceType: ResourceType.item,
resourceId: id,
options,
partitionKey: this.partitionKey
});
return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
}
/**
* Delete the item.
* @param options Additional options for the request, such as the partition key.
*/
public delete(options?: RequestOptions): Promise<ItemResponse<ItemDefinition>>;
/**
* Delete the item.
*
@ -145,21 +149,22 @@ export class Item {
*
* @param options Additional options for the request, such as the partition key.
*/
public delete<T extends ItemDefinition>(options?: RequestOptions): Promise<ItemResponse<T>>;
public async delete<T extends ItemDefinition>(options?: RequestOptions): Promise<ItemResponse<T>> {
options = options || {};
if ((!options || !options.partitionKey) && this.primaryKey) {
options.partitionKey = this.primaryKey;
public async delete<T extends ItemDefinition = any>(options: RequestOptions = {}): Promise<ItemResponse<T>> {
if (this.partitionKey === undefined) {
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
this.partitionKey = undefinedPartitionKey(partitionKeyDefinition);
}
const path = Helper.getPathFromLink(this.url);
const id = Helper.getIdFromLink(this.url);
const response = await this.clientContext.delete<T>(path, "docs", id, undefined, options);
return {
body: response.result,
headers: response.headers,
ref: this,
item: this
};
const path = getPathFromLink(this.url);
const id = getIdFromLink(this.url);
const response = await this.clientContext.delete<T>({
path,
resourceType: ResourceType.item,
resourceId: id,
options,
partitionKey: this.partitionKey
});
return new ItemResponse(response.result, response.headers, response.code, response.substatus, this);
}
}

Просмотреть файл

@ -7,5 +7,9 @@
* in {@link ItemBody}
*/
export interface ItemDefinition {
/** The id of the item. User settable property. Uniquely identifies the item along with the partition key */
id?: string;
/** Time to live in seconds for collections with TTL enabled */
ttl?: number;
[key: string]: any;
}

Просмотреть файл

@ -1,9 +1,14 @@
import { CosmosResponse } from "../../request/CosmosResponse";
import { CosmosHeaders } from "../../queryExecutionContext";
import { ResourceResponse } from "../../request/ResourceResponse";
import { Resource } from "../Resource";
import { Item } from "./Item";
import { ItemDefinition } from "./ItemDefinition";
export interface ItemResponse<T extends ItemDefinition> extends CosmosResponse<T & Resource, Item> {
export class ItemResponse<T extends ItemDefinition> extends ResourceResponse<T & Resource> {
constructor(resource: T & Resource, headers: CosmosHeaders, statusCode: number, subsstatusCode: number, item: Item) {
super(resource, headers, statusCode, subsstatusCode);
this.item = item;
}
/** Reference to the {@link Item} the response corresponds to. */
item: Item;
public readonly item: Item;
}

Просмотреть файл

@ -1,16 +1,21 @@
import uuid from "uuid/v4";
import { ChangeFeedIterator } from "../../ChangeFeedIterator";
import { ChangeFeedOptions } from "../../ChangeFeedOptions";
import { ClientContext } from "../../ClientContext";
import { Helper } from "../../common";
import { getIdFromLink, getPathFromLink, isResourceValid, ResourceType } from "../../common";
import { extractPartitionKey } from "../../extractPartitionKey";
import { FetchFunctionCallback, SqlQuerySpec } from "../../queryExecutionContext";
import { QueryIterator } from "../../queryIterator";
import { FeedOptions, RequestOptions } from "../../request";
import { Container } from "../Container";
import { Resource } from "../Resource";
import { Item } from "./Item";
import { ItemDefinition } from "./ItemDefinition";
import { ItemResponse } from "./ItemResponse";
/**
* @ignore
* @param options
*/
function isChangeFeedOptions(options: unknown): options is ChangeFeedOptions {
const optionsType = typeof options;
return options && !(optionsType === "string" || optionsType === "boolean" || optionsType === "number");
@ -41,7 +46,7 @@ export class Items {
* {name: "@lastName", value: "Hendricks"}
* ]
* };
* const {result: items} = await items.query(querySpec).toArray();
* const {result: items} = await items.query(querySpec).fetchAll();
* ```
*/
public query(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<any>;
@ -57,26 +62,26 @@ export class Items {
* {name: "@lastName", value: "Hendricks"}
* ]
* };
* const {result: items} = await items.query<{firstName: string}>(querySpec).toArray();
* const {result: items} = await items.query<{firstName: string}>(querySpec).fetchAll();
* ```
*/
public query<T>(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<T>;
public query<T>(query: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<T> {
const path = Helper.getPathFromLink(this.container.url, "docs");
const id = Helper.getIdFromLink(this.container.url);
public query<T>(query: string | SqlQuerySpec, options: FeedOptions): QueryIterator<T>;
public query<T>(query: string | SqlQuerySpec, options: FeedOptions = {}): QueryIterator<T> {
const path = getPathFromLink(this.container.url, ResourceType.item);
const id = getIdFromLink(this.container.url);
const fetchFunction: FetchFunctionCallback = (innerOptions: FeedOptions) => {
return this.clientContext.queryFeed(
return this.clientContext.queryFeed({
path,
"docs",
id,
result => (result ? result.Documents : []),
resourceType: ResourceType.item,
resourceId: id,
resultFn: result => (result ? result.Documents : []),
query,
innerOptions
);
options: innerOptions
});
};
return new QueryIterator(this.clientContext, query, options, fetchFunction, this.container.url);
return new QueryIterator(this.clientContext, query, options, fetchFunction, this.container.url, ResourceType.item);
}
/**
@ -88,9 +93,9 @@ export class Items {
* @example Read from the beginning of the change feed.
* ```javascript
* const iterator = items.readChangeFeed({ startFromBeginning: true });
* const firstPage = await iterator.executeNext();
* const firstPage = await iterator.fetchNext();
* const firstPageResults = firstPage.result
* const secondPage = await iterator.executeNext();
* const secondPage = await iterator.fetchNext();
* ```
*/
public readChangeFeed(
@ -135,19 +140,9 @@ export class Items {
throw new Error("changeFeedOptions must be a valid object");
}
const path = Helper.getPathFromLink(this.container.url, "docs");
const id = Helper.getIdFromLink(this.container.url);
return new ChangeFeedIterator<T>(
this.clientContext,
id,
path,
partitionKey,
async () => {
const bodyWillBeTruthyIfPartitioned = (await this.container.getPartitionKeyDefinition()).body;
return !!bodyWillBeTruthyIfPartitioned;
},
changeFeedOptions
);
const path = getPathFromLink(this.container.url, ResourceType.item);
const id = getIdFromLink(this.container.url);
return new ChangeFeedIterator<T>(this.clientContext, id, path, partitionKey, changeFeedOptions);
}
/**
@ -158,7 +153,7 @@ export class Items {
* @param options Used for modifying the request (for instance, specifying the partition key).
* @example Read all items to array.
* ```typescript
* const {body: containerList} = await items.readAll().toArray();
* const {body: containerList} = await items.readAll().fetchAll();
* ```
*/
public readAll(options?: FeedOptions): QueryIterator<ItemDefinition>;
@ -173,23 +168,14 @@ export class Items {
* @param options Used for modifying the request (for instance, specifying the partition key).
* @example Read all items to array.
* ```typescript
* const {body: containerList} = await items.readAll().toArray();
* const {body: containerList} = await items.readAll().fetchAll();
* ```
*/
public readAll<T extends ItemDefinition>(options?: FeedOptions): QueryIterator<T>;
public readAll<T extends ItemDefinition>(options?: FeedOptions): QueryIterator<T> {
return this.query<T>(undefined, options);
return this.query<T>("SELECT * from c", options);
}
/**
* Create a item.
*
* There is no set schema for JSON items. They may contain any number of custom properties..
*
* @param body Represents the body of the item. Can contain any number of user defined properties.
* @param options Used for modifying the request (for instance, specifying the partition key).
*/
public async create(body: any, options?: RequestOptions): Promise<ItemResponse<ItemDefinition>>;
/**
* Create a item.
*
@ -201,41 +187,35 @@ export class Items {
* @param body Represents the body of the item. Can contain any number of user defined properties.
* @param options Used for modifying the request (for instance, specifying the partition key).
*/
public async create<T extends ItemDefinition>(body: T, options?: RequestOptions): Promise<ItemResponse<T>>;
public async create<T extends ItemDefinition>(body: T, options: RequestOptions = {}): Promise<ItemResponse<T>> {
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
const { body: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
options.partitionKey = this.container.extractPartitionKey(body, partitionKeyDefinition);
}
public async create<T extends ItemDefinition = any>(body: T, options: RequestOptions = {}): Promise<ItemResponse<T>> {
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
const partitionKey = extractPartitionKey(body, partitionKeyDefinition);
// Generate random document id if the id is missing in the payload and
// options.disableAutomaticIdGeneration != true
if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) {
body.id = Helper.generateGuidId();
body.id = uuid();
}
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
const path = Helper.getPathFromLink(this.container.url, "docs");
const id = Helper.getIdFromLink(this.container.url);
const path = getPathFromLink(this.container.url, ResourceType.item);
const id = getIdFromLink(this.container.url);
const response = await this.clientContext.create<T>(body, path, "docs", id, undefined, options);
const response = await this.clientContext.create<T>({
body,
path,
resourceType: ResourceType.item,
resourceId: id,
options,
partitionKey
});
const ref = new Item(
this.container,
(response.result as any).id,
(options && options.partitionKey) as string,
this.clientContext
);
return {
body: response.result,
headers: response.headers,
ref,
item: ref
};
const ref = new Item(this.container, (response.result as any).id, partitionKey, this.clientContext);
return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref);
}
/**
@ -260,38 +240,33 @@ export class Items {
*/
public async upsert<T extends ItemDefinition>(body: T, options?: RequestOptions): Promise<ItemResponse<T>>;
public async upsert<T extends ItemDefinition>(body: T, options: RequestOptions = {}): Promise<ItemResponse<T>> {
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
const { body: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
options.partitionKey = this.container.extractPartitionKey(body, partitionKeyDefinition);
}
const { resource: partitionKeyDefinition } = await this.container.getPartitionKeyDefinition();
const partitionKey = extractPartitionKey(body, partitionKeyDefinition);
// Generate random document id if the id is missing in the payload and
// options.disableAutomaticIdGeneration != true
if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) {
body.id = Helper.generateGuidId();
body.id = uuid();
}
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
const path = Helper.getPathFromLink(this.container.url, "docs");
const id = Helper.getIdFromLink(this.container.url);
const path = getPathFromLink(this.container.url, ResourceType.item);
const id = getIdFromLink(this.container.url);
const response = (await this.clientContext.upsert<T>(body, path, "docs", id, undefined, options)) as T & Resource;
const response = await this.clientContext.upsert<T>({
body,
path,
resourceType: ResourceType.item,
resourceId: id,
options,
partitionKey
});
const ref = new Item(
this.container,
(response.result as any).id,
(options && options.partitionKey) as string,
this.clientContext
);
return {
body: response.result,
headers: response.headers,
ref,
item: ref
};
const ref = new Item(this.container, (response.result as any).id, partitionKey, this.clientContext);
return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref);
}
}

Просмотреть файл

@ -1,5 +1,5 @@
import { ClientContext } from "../../ClientContext";
import { Constants, Helper } from "../../common";
import { Constants, isResourceValid, ResourceType } from "../../common";
import { CosmosClient } from "../../CosmosClient";
import { RequestOptions } from "../../request";
import { OfferDefinition } from "./OfferDefinition";
@ -33,8 +33,13 @@ export class Offer {
* @param options
*/
public async read(options?: RequestOptions): Promise<OfferResponse> {
const response = await this.clientContext.read<OfferDefinition>(this.url, "offers", this.id, undefined, options);
return { body: response.result, headers: response.headers, ref: this, offer: this };
const response = await this.clientContext.read<OfferDefinition>({
path: this.url,
resourceType: ResourceType.offer,
resourceId: this.id,
options
});
return new OfferResponse(response.result, response.headers, response.code, this);
}
/**
@ -44,17 +49,16 @@ export class Offer {
*/
public async replace(body: OfferDefinition, options?: RequestOptions): Promise<OfferResponse> {
const err = {};
if (!Helper.isResourceValid(body, err)) {
if (!isResourceValid(body, err)) {
throw err;
}
const response = await this.clientContext.replace<OfferDefinition>(
const response = await this.clientContext.replace<OfferDefinition>({
body,
this.url,
"offers",
this.id,
undefined,
path: this.url,
resourceType: ResourceType.offer,
resourceId: this.id,
options
);
return { body: response.result, headers: response.headers, ref: this, offer: this };
});
return new OfferResponse(response.result, response.headers, response.code, this);
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше