TypeScript conversion (#3)
* 2.0.0-0 * TypeScript Conversion * Promise support
This commit is contained in:
Родитель
9ec12a261e
Коммит
1173f1d224
|
@ -256,3 +256,5 @@ node_modules
|
|||
**/node_nodules/
|
||||
|
||||
*.dat
|
||||
|
||||
lib/**
|
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Mocha Tests",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"args": [
|
||||
"-u",
|
||||
"tdd",
|
||||
"--colors",
|
||||
"${workspaceFolder}/lib/test/**.spec.js",
|
||||
"-g",
|
||||
".*"
|
||||
],
|
||||
"internalConsoleOptions": "openOnSessionStart",
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/lib/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach by Process ID",
|
||||
"processId": "${command:PickProcess}"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch Program",
|
||||
"program": "${workspaceFolder}/samples/TodoApp/bin/www"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"mocha.files.glob":"test/legacy/**/*.js"
|
||||
}
|
2
LICENSE
2
LICENSE
|
@ -1,5 +1,5 @@
|
|||
The MIT License (MIT)
|
||||
Copyright (c) 2014 Microsoft Corporation
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
92
README.md
92
README.md
|
@ -1,10 +1,6 @@
|
|||
# Microsoft Azure Cosmos DB Node.js SDK
|
||||
# Microsoft Azure Cosmos JavaScript SDK
|
||||
|
||||
![](https://img.shields.io/npm/v/documentdb.svg)
|
||||
![](https://img.shields.io/npm/dm/documentdb.svg)
|
||||
![](https://img.shields.io/github/issues/azure/azure-documentdb-node.svg)
|
||||
|
||||
This project provides Node.js SDK library for [SQL API](https://docs.microsoft.com/en-us/azure/cosmos-db/sql-api-sql-query) of [Azure Cosmos DB
|
||||
This project provides JavaScript & Node.js SDK library for [SQL API](https://docs.microsoft.com/en-us/azure/cosmos-db/sql-api-sql-query) of [Azure Cosmos
|
||||
Database Service](https://azure.microsoft.com/en-us/services/cosmos-db/). This project also includes samples, tools, and utilities.
|
||||
|
||||
Useful links:
|
||||
|
@ -30,7 +26,7 @@ Node SDK can be consumed in two ways.
|
|||
|
||||
The core module uses the callbacks model for responses, exposed through the DocumentClient
|
||||
|
||||
npm install documentdb
|
||||
npm install @azure/cosmos
|
||||
|
||||
### Install Core Module From Github
|
||||
|
||||
|
@ -55,75 +51,65 @@ Follow these instructions to run the tests locally.
|
|||
|
||||
### Prerequisites
|
||||
|
||||
1. Clone Azure/azure-documentdb-node repository
|
||||
Please clone the source and tests from [https://github.com/Azure/azure-documentdb-node](https://github.com/Azure/azure-documentdb-node)
|
||||
1. Clone Azure/azure-cosmos-js repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/azure/azure-cosmos-js.git
|
||||
```
|
||||
|
||||
2. Install Node.js and npm
|
||||
[https://docs.npmjs.com/getting-started/installing-node](https://docs.npmjs.com/getting-started/installing-node)
|
||||
|
||||
3. Install mocha package globally
|
||||
3. [Cosmos DB emulator](https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator)
|
||||
- Note: requires a windows machine or ability to run Windows container
|
||||
|
||||
npm install -g mocha
|
||||
4. Install dependencies
|
||||
|
||||
```bash
|
||||
npm i # alias for npm install
|
||||
```
|
||||
|
||||
5. Build the source
|
||||
|
||||
```bash
|
||||
npm run build # compiles the typescript source, runs linting, creates webpack, creates docs
|
||||
```
|
||||
|
||||
### Running the tests
|
||||
|
||||
Using your command-line tool, from the root of your local copy of azure-documentdb-node repository:
|
||||
(If you are contributing changes and submitting PR then you need to ensure that you run the tests against your local copy of the source, and not the published npm package.)
|
||||
|
||||
1. Remove documentdb, if previously installed
|
||||
|
||||
npm remove documentdb
|
||||
|
||||
2. Install documentdb
|
||||
|
||||
npm install source
|
||||
|
||||
3. Change to `test` directory
|
||||
|
||||
cd source\test
|
||||
|
||||
4. Run the tests
|
||||
|
||||
mocha -t 0 -R spec
|
||||
|
||||
If you just want to run the tests against the published npm package then skip steps #1 & #2 proceed directly to step #3
|
||||
```bash
|
||||
npm run test # runs all tests
|
||||
```
|
||||
|
||||
## Examples
|
||||
### Hello World using Callbacks via the Core Module
|
||||
### Hello World
|
||||
|
||||
```js
|
||||
var DocumentClient = require('documentdb').DocumentClient;
|
||||
```ts
|
||||
import { CosmosClient } from "@azure/cosmos"
|
||||
|
||||
var host = "[hostendpoint]"; // Add your endpoint
|
||||
var masterKey = "[database account masterkey]"; // Add the masterkey of the endpoint
|
||||
var client = new DocumentClient(host, {masterKey: masterKey});
|
||||
const host = "[hostendpoint]"; // Add your endpoint
|
||||
const masterKey = "[database account masterkey]"; // Add the masterkey of the endpoint
|
||||
const client = new CosmosClient(host, { masterKey });
|
||||
|
||||
var databaseDefinition = { id: "sample database" };
|
||||
var collectionDefinition = { id: "sample collection" };
|
||||
var documentDefinition = { id: "hello world doc", content: "Hello World!" };
|
||||
|
||||
client.createDatabase(databaseDefinition, function(err, database) {
|
||||
if(err) return console.log(err);
|
||||
async function helloCosmos() {
|
||||
await client.createDatabase(databaseDefinition);
|
||||
console.log('created db');
|
||||
|
||||
client.createCollection(database._self, collectionDefinition, function(err, collection) {
|
||||
if(err) return console.log(err);
|
||||
console.log('created collection');
|
||||
await client.createCollection(database._self, collectionDefinition);
|
||||
console.log('created collection');
|
||||
|
||||
client.createDocument(collection._self, documentDefinition, function(err, document) {
|
||||
if(err) return console.log(err);
|
||||
console.log('Created Document with content: ', document.content);
|
||||
await client.createDocument(collection._self, documentDefinition);
|
||||
console.log('Created Document with content: ', document.content);
|
||||
|
||||
cleanup(client, database);
|
||||
});
|
||||
});
|
||||
await client.deleteDatabase(database._self);
|
||||
console.log("Deleted database");
|
||||
});
|
||||
|
||||
function cleanup(client, database) {
|
||||
client.deleteDatabase(database._self, function(err) {
|
||||
if(err) console.log(err);
|
||||
})
|
||||
}
|
||||
helloCosmos().finally(()=>{});
|
||||
```
|
||||
|
||||
### Youtube Videos
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "."
|
||||
},
|
||||
{
|
||||
"path": "samples\\TodoApp"
|
||||
}
|
||||
],
|
||||
"settings": {
|
||||
"mocha.files.glob":"test/**/*.spec.ts",
|
||||
"mocha.sideBarOptions": {
|
||||
"lens": true,
|
||||
"decoration": true,
|
||||
"autoUpdateTime": 0,
|
||||
"showDebugTestStatus": true
|
||||
},
|
||||
"mocha.runTestsOnSave": "false",
|
||||
"mocha.logVerbose": true,
|
||||
"mocha.options": {
|
||||
"compilers":{
|
||||
"ts": "ts-node/register"
|
||||
}
|
||||
},
|
||||
"mocha.requires": [
|
||||
"ts-node/register"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
const tests = require.context("./lib/", true, /\.spec\.js$/);
|
||||
|
||||
tests.keys().forEach(tests);
|
|
@ -1,3 +1,9 @@
|
|||
## Changes in 2.0.0-0 ##
|
||||
- Added Promise support
|
||||
- Added token handler option for auth
|
||||
- typings now emitted from source (moved source to TypeScript)
|
||||
- Added CosmosClient (DocumentClient now considered deprecated)
|
||||
|
||||
## Changes in 1.14.2 : ##
|
||||
- Updated documentation to use Azure Cosmos DB
|
||||
- Added Support for proxyUrl setting in ConnectionPolicy
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
# Dev docs
|
||||
|
||||
Info on how to build the SDK and run the samples
|
||||
|
||||
## Pre-reqs
|
||||
|
||||
- [Node v6 or above](https://nodejs.org/en/)
|
||||
- Recommend using Node 8 LTS
|
||||
- Recommend using a Node version manager ([nvm-windows](https://github.com/coreybutler/nvm-windows/releases), [nvm (mac/linux)](https://github.com/creationix/nvm/), [n (mac/linux)](https://github.com/tj/n))
|
||||
- npm (comes with Node)(all tooling is done via npm scripts)
|
||||
- All OS's should be supported (but only tested on Windows so far)(Requires a Cosmos Emulator running on a Windows container on your machine or local network)
|
||||
- (Recommended) [VS Code](https://code.visualstudio.com/)
|
||||
- Cosmos DB (Azure or Local Emulator) (emulator only works on Windows, right now, so mac/linux needs a cloud instance)
|
||||
|
||||
## Building the SDK
|
||||
|
||||
1. Install dependencies `npm i`
|
||||
2. Build library `npm run build`
|
||||
|
||||
## Testing the SDK
|
||||
|
||||
Only a subset of tests are working at the moment (due to API changes, theoretically 😉).
|
||||
|
||||
1. Build the SDK (see above)
|
||||
2. Run all tests `npm run test`
|
||||
|
||||
You can also run the tests via VS Code. There should already be a launch.json for launching the mocha tests. You can modify the `-g` setting to run a specific test. (aka change `.*` to `validate database CRUD` or whatever your test cases are called)
|
||||
|
||||
# Samples
|
||||
|
||||
Build the SDK and make sure the tests run before you try any samples (they depend on the SDK)
|
||||
|
||||
- [TodoApp](./samples/TodoApp)
|
||||
|
||||
We recommend using [VS code's multi-root workspaces](https://code.visualstudio.com/docs/editor/multi-root-workspaces) for testing the samples, especially if you're using the samples to test the SDK. There is a `launch.json` for the samples thave have been updated and multi-root workspaces will show all `launch.json`s.
|
|
@ -0,0 +1,81 @@
|
|||
// Karma configuration
|
||||
// Generated on Thu May 24 2018 16:35:54 GMT-0700 (Pacific Daylight Time)
|
||||
|
||||
module.exports = function (config) {
|
||||
config.set({
|
||||
// frameworks to use
|
||||
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
|
||||
frameworks: ['mocha'],
|
||||
|
||||
|
||||
// list of files / patterns to load in the browser
|
||||
files: [
|
||||
'./browser-test.js'
|
||||
],
|
||||
|
||||
|
||||
// list of files / patterns to exclude
|
||||
exclude: [
|
||||
'./lib/dist/**'
|
||||
],
|
||||
|
||||
|
||||
// preprocess matching files before serving them to the browser
|
||||
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
|
||||
preprocessors: {
|
||||
'./browser-test.js': [ 'webpack', 'sourcemap' ]
|
||||
},
|
||||
|
||||
webpack: require('./webpack.config.js'),
|
||||
|
||||
webpackMiddleware: {
|
||||
stats: "errors-only"
|
||||
},
|
||||
|
||||
|
||||
// test results reporter to use
|
||||
// possible values: 'dots', 'progress'
|
||||
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
|
||||
reporters: ['progress', 'mocha'],
|
||||
|
||||
|
||||
// web server port
|
||||
port: 9876,
|
||||
|
||||
|
||||
// enable / disable colors in the output (reporters and logs)
|
||||
colors: true,
|
||||
|
||||
|
||||
// level of logging
|
||||
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
|
||||
logLevel: config.LOG_DEBUG,
|
||||
|
||||
|
||||
// enable / disable watching file and executing tests whenever any file changes
|
||||
autoWatch: true,
|
||||
browserDisconnectTimeout: 120000,
|
||||
browserNoActivityTimeout: 120000,
|
||||
browserDisconnectTolerance: 5,
|
||||
|
||||
// start these browsers
|
||||
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
|
||||
browsers: ['Chrome_without_security'],
|
||||
|
||||
customLaunchers: {
|
||||
Chrome_without_security: {
|
||||
base: 'Chrome',
|
||||
flags: ['--disable-web-security', '--auto-open-devtools-for-tabs']
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
// Continuous Integration mode
|
||||
// if true, Karma captures browsers, runs the tests and exits
|
||||
singleRun: false,
|
||||
|
||||
// Concurrency level
|
||||
// how many browser should be started simultaneous
|
||||
concurrency: Infinity
|
||||
})
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,82 @@
|
|||
{
|
||||
"name": "@azure/cosmos",
|
||||
"description": "Azure Cosmos DB Service Node.js SDK for SQL API",
|
||||
"keywords": [
|
||||
"cosmosdb",
|
||||
"cosmos db",
|
||||
"documentdb",
|
||||
"document database",
|
||||
"azure",
|
||||
"nosql",
|
||||
"database",
|
||||
"cloud"
|
||||
],
|
||||
"version": "2.0.0-0",
|
||||
"author": "Microsoft Corporation",
|
||||
"main": "./lib/src/index.js",
|
||||
"types": "./lib/src/index.d.ts",
|
||||
"engine": {
|
||||
"node": ">=0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf lib",
|
||||
"lint": "tslint 'src/**/*.ts'",
|
||||
"compile": "echo Using TypeScript && tsc --version && tsc --pretty",
|
||||
"copy-legacy-js": "./node_modules/.bin/copy-cli src/test/legacy/* ./lib/test/legacy/",
|
||||
"docs": "typedoc --mode file --out ./lib/docs ./src",
|
||||
"pack": "webpack",
|
||||
"build": "npm run clean && npm run lint && npm run compile && npm run docs && npm run copy-legacy-js && npm run pack",
|
||||
"test": "mocha ./lib/test/ --recursive --timeout 30000 -i -g .*ignore.js",
|
||||
"test-browser": "karma start ./karma.config.js --single-run"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/mocha": "^5.2.0",
|
||||
"@types/node": "^8.10.10",
|
||||
"@types/priorityqueuejs": "^1.0.1",
|
||||
"@types/semaphore": "^1.1.0",
|
||||
"@types/sinon": "^4.3.1",
|
||||
"@types/tunnel": "^0.0.0",
|
||||
"@types/underscore": "^1.8.8",
|
||||
"copy": "^0.3.2",
|
||||
"grunt": "^0.4.5",
|
||||
"grunt-eslint": "^13.0.0",
|
||||
"grunt-mocha-test": "^0.13.3",
|
||||
"karma": "^2.0.2",
|
||||
"karma-chrome-launcher": "^2.2.0",
|
||||
"karma-cli": "^1.0.1",
|
||||
"karma-firefox-launcher": "^1.1.0",
|
||||
"karma-mocha": "^1.3.0",
|
||||
"karma-mocha-reporter": "^2.2.5",
|
||||
"karma-requirejs": "^1.1.0",
|
||||
"karma-sourcemap-loader": "^0.3.7",
|
||||
"karma-webpack": "^3.0.0",
|
||||
"load-grunt-tasks": "^3.1.0",
|
||||
"mocha": "^5.1.1",
|
||||
"mocha-junit-reporter": "^1.15.0",
|
||||
"mocha-multi-reporters": "^1.1.6",
|
||||
"requirejs": "^2.3.5",
|
||||
"sinon": "^5.0.1",
|
||||
"time-grunt": "^1.2.0",
|
||||
"ts-node": "^6.0.0",
|
||||
"tslint": "^5.9.1",
|
||||
"typedoc": "^0.11.1",
|
||||
"typescript": "^2.8.3",
|
||||
"webpack": "^4.8.3",
|
||||
"webpack-cli": "^2.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"big-integer": "^1.6.28",
|
||||
"binary-search-bounds": "2.0.3",
|
||||
"int64-buffer": "^0.1.9",
|
||||
"priorityqueuejs": "1.0.0",
|
||||
"semaphore": "1.0.5",
|
||||
"stream-http": "^2.8.1",
|
||||
"tunnel": "0.0.5",
|
||||
"underscore": "1.8.3"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Azure/azure-cosmos-js"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
!config.js
|
||||
!bin
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch Program",
|
||||
"program": "${workspaceFolder}/bin/www",
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/../../lib/**"
|
||||
],
|
||||
"env": {
|
||||
"NODE_TLS_REJECT_UNAUTHORIZED": "0"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
const CosmosClient = require('../../').DocumentClient;
|
||||
const config = require('./config');
|
||||
const TaskList = require('./routes/tasklist');
|
||||
const TaskDao = require('./models/taskDao');
|
||||
|
||||
const express = require('express');
|
||||
const path = require('path');
|
||||
const favicon = require('serve-favicon');
|
||||
const logger = require('morgan');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const bodyParser = require('body-parser');
|
||||
|
||||
const index = require('./routes/index');
|
||||
const users = require('./routes/users');
|
||||
|
||||
const app = express();
|
||||
|
||||
// view engine setup
|
||||
app.set('views', path.join(__dirname, 'views'));
|
||||
app.set('view engine', 'jade');
|
||||
|
||||
// uncomment after placing your favicon in /public
|
||||
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
|
||||
app.use(logger('dev'));
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({ extended: false }));
|
||||
app.use(cookieParser());
|
||||
app.use(express.static(path.join(__dirname, 'public')));
|
||||
|
||||
//Todo App:
|
||||
const docDbClient = new CosmosClient(config.host, {
|
||||
masterKey: config.authKey
|
||||
});
|
||||
const taskDao = new TaskDao(docDbClient, config.databaseId, config.collectionId);
|
||||
const taskList = new TaskList(taskDao);
|
||||
taskDao.init((err) => {
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
app.get('/', (req, res, next) => taskList.showTasks(req, res).catch(next));
|
||||
app.post('/addtask', (req, res, next) => taskList.addTask(req, res).catch(next));
|
||||
app.post('/completetask', (req, res, next) => taskList.completeTask(req, res).catch(next));
|
||||
app.set('view engine', 'jade');
|
||||
|
||||
// catch 404 and forward to error handler
|
||||
app.use(function(req, res, next) {
|
||||
const err = new Error('Not Found');
|
||||
err.status = 404;
|
||||
next(err);
|
||||
});
|
||||
|
||||
// error handler
|
||||
app.use(function(err, req, res, next) {
|
||||
// set locals, only providing error in development
|
||||
res.locals.message = err.message;
|
||||
res.locals.error = req.app.get('env') === 'development' ? err : {};
|
||||
|
||||
// render the error page
|
||||
res.status(err.status || 500);
|
||||
res.render('error');
|
||||
});
|
||||
|
||||
module.exports = app;
|
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const app = require('../app');
|
||||
const debug = require('debug')('todo:server');
|
||||
const http = require('http');
|
||||
|
||||
/**
|
||||
* Get port from environment and store in Express.
|
||||
*/
|
||||
|
||||
const port = normalizePort(process.env.PORT || '3000');
|
||||
app.set('port', port);
|
||||
|
||||
/**
|
||||
* Create HTTP server.
|
||||
*/
|
||||
|
||||
const server = http.createServer(app);
|
||||
|
||||
/**
|
||||
* Listen on provided port, on all network interfaces.
|
||||
*/
|
||||
|
||||
server.listen(port);
|
||||
server.on('error', onError);
|
||||
server.on('listening', onListening);
|
||||
|
||||
/**
|
||||
* Normalize a port into a number, string, or false.
|
||||
*/
|
||||
|
||||
function normalizePort(val) {
|
||||
const port = parseInt(val, 10);
|
||||
|
||||
if (isNaN(port)) {
|
||||
// named pipe
|
||||
return val;
|
||||
}
|
||||
|
||||
if (port >= 0) {
|
||||
// port number
|
||||
return port;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "error" event.
|
||||
*/
|
||||
|
||||
function onError(error) {
|
||||
if (error.syscall !== 'listen') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const bind = typeof port === 'string'
|
||||
? 'Pipe ' + port
|
||||
: 'Port ' + port;
|
||||
|
||||
// handle specific listen errors with friendly messages
|
||||
switch (error.code) {
|
||||
case 'EACCES':
|
||||
console.error(bind + ' requires elevated privileges');
|
||||
process.exit(1);
|
||||
break;
|
||||
case 'EADDRINUSE':
|
||||
console.error(bind + ' is already in use');
|
||||
process.exit(1);
|
||||
break;
|
||||
default:
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "listening" event.
|
||||
*/
|
||||
|
||||
function onListening() {
|
||||
const addr = server.address();
|
||||
const bind = typeof addr === 'string'
|
||||
? 'pipe ' + addr
|
||||
: 'port ' + addr.port;
|
||||
debug('Listening on ' + bind);
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
const config = {}
|
||||
|
||||
config.host = process.env.HOST || "https://localhost:8081/";
|
||||
config.authKey = process.env.AUTH_KEY || "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
|
||||
config.databaseId = "ToDoList";
|
||||
config.collectionId = "Items";
|
||||
|
||||
module.exports = config;
|
|
@ -0,0 +1,69 @@
|
|||
const CosmosClient = require('../../../').DocumentClient;
|
||||
|
||||
class CosmosUtils {
|
||||
/**
|
||||
*
|
||||
* @param {CosmosClient} client
|
||||
* @param {string} databaseId
|
||||
*/
|
||||
static async getOrCreateDatabase(client, databaseId) {
|
||||
const querySpec = {
|
||||
query: 'SELECT * FROM root r WHERE r.id= @id',
|
||||
parameters: [{
|
||||
name: '@id',
|
||||
value: databaseId
|
||||
}]
|
||||
};
|
||||
|
||||
try {
|
||||
const {result: result} = await client.queryDatabases(querySpec).toArray();
|
||||
if (result.length === 0) {
|
||||
const databaseSpec = {
|
||||
id: databaseId
|
||||
};
|
||||
|
||||
const {result: database} = await client.createDatabase(databaseSpec);
|
||||
return database;
|
||||
} else {
|
||||
return result[0];
|
||||
}
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {CosmosClient} client
|
||||
* @param {string} databaseLink
|
||||
* @param {string} collectionId
|
||||
*/
|
||||
static async getOrCreateCollection (client, databaseLink, collectionId) {
|
||||
const querySpec = {
|
||||
query: 'SELECT * FROM root r WHERE r.id=@id',
|
||||
parameters: [{
|
||||
name: '@id',
|
||||
value: collectionId
|
||||
}]
|
||||
};
|
||||
|
||||
try {
|
||||
const {result: results} = await client.queryCollections(databaseLink, querySpec).toArray();
|
||||
if (results.length === 0) {
|
||||
const collectionSpec = {
|
||||
id: collectionId
|
||||
};
|
||||
|
||||
|
||||
const {result: collection} = await client.createCollection(databaseLink, collectionSpec);
|
||||
return collection;
|
||||
} else {
|
||||
return results[0];
|
||||
}
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = CosmosUtils;
|
|
@ -0,0 +1,88 @@
|
|||
const CosmosClient = require('../../../').DocumentClient;
|
||||
const CosmosUtils = require('./CosmosUtils');
|
||||
|
||||
class TaskDao {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {CosmosClient} cosmosClient
|
||||
* @param {*} databaseId
|
||||
* @param {*} collectionId
|
||||
*/
|
||||
constructor(cosmosClient, databaseId, collectionId) {
|
||||
this.client = cosmosClient;
|
||||
this.databaseId = databaseId;
|
||||
this.collectionId = collectionId;
|
||||
|
||||
this.database = null;
|
||||
this.collection = null;
|
||||
}
|
||||
|
||||
async init() {
|
||||
try {
|
||||
const db = await CosmosUtils.getOrCreateDatabase(this.client, this.databaseId);
|
||||
this.database = db;
|
||||
const coll = await CosmosUtils.getOrCreateCollection(this.client, this.database._self, this.collectionId);
|
||||
this.collection = coll;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async find(querySpec) {
|
||||
|
||||
if (!this.collection) {
|
||||
throw new Error("Collection is not initialized.");
|
||||
}
|
||||
try {
|
||||
const { result: results } = await this.client.queryDocuments(this.collection._self, querySpec).toArray();
|
||||
return results;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async addItem(item) {
|
||||
item.date = Date.now();
|
||||
item.completed = false;
|
||||
try {
|
||||
const { result: doc } = await this.client.createDocument(this.collection._self, item);
|
||||
return doc;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async updateItem(itemId) {
|
||||
try {
|
||||
const doc = await this.getItem(itemId);
|
||||
doc.completed = true;
|
||||
|
||||
const { result: replaced } = await this.client.replaceDocument(doc._self, doc);
|
||||
return replaced;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getItem(itemId) {
|
||||
try {
|
||||
const querySpec = {
|
||||
query: 'SELECT * FROM root r WHERE r.id = @id',
|
||||
parameters: [{
|
||||
name: '@id',
|
||||
value: itemId
|
||||
}]
|
||||
};
|
||||
|
||||
const { result: results } = await this.client.queryDocuments(this.collection._self, querySpec).toArray();
|
||||
|
||||
return results[0];
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TaskDao;
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"name": "todo",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"start": "set NODE_TLS_REJECT_UNAUTHORIZED=0 && node ./bin/www"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "^2.1.2",
|
||||
"body-parser": "~1.15.2",
|
||||
"cookie-parser": "~1.4.3",
|
||||
"debug": "~2.2.0",
|
||||
"express": "~4.14.0",
|
||||
"jade": "~1.11.0",
|
||||
"morgan": "~1.7.0",
|
||||
"serve-favicon": "~2.3.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
body {
|
||||
padding: 50px;
|
||||
font: 14px "Lucida Grande", Helvetica, Arial, sans-serif;
|
||||
}
|
||||
a {
|
||||
color: #00B7FF;
|
||||
}
|
||||
.well label {
|
||||
display: block;
|
||||
}
|
||||
.well input {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
.btn {
|
||||
margin-top: 5px;
|
||||
border: outset 1px #C8C8C8;
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
# Todo App
|
||||
|
||||
Sample Todo app
|
||||
|
||||
## Prereqs
|
||||
|
||||
- Build the SDK (see [dev.md](../../dev.md))
|
||||
- Node 8 (uses async/await)
|
||||
|
||||
## Config
|
||||
|
||||
If you're using the local emulator with default config, it should work without setting any additionanl config
|
||||
|
||||
**Environment Variables**
|
||||
- `host` - url for the Cosmos DB (default is https://localhost:8081)
|
||||
- `AUTH_KEY` - master key for the Cosmos DB (default is the well known key for emulator)
|
||||
- `PORT` - port for the web app (default is 3000)
|
||||
|
||||
## Run
|
||||
|
||||
```bash
|
||||
npm i
|
||||
npm start
|
||||
```
|
||||
|
||||
open browser to http://localhost:3000
|
|
@ -0,0 +1,9 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
/* GET home page. */
|
||||
router.get('/', function(req, res, next) {
|
||||
res.render('index', { title: 'Express' });
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -0,0 +1,63 @@
|
|||
const CosmosClient = require('../../../').DocumentClient;
|
||||
const TaskDao = require('../models/TaskDao');
|
||||
const async = require('async');
|
||||
|
||||
class TaskList {
|
||||
/**
|
||||
*
|
||||
* @param {TaskDao} taskDao
|
||||
*/
|
||||
constructor(taskDao) {
|
||||
this.taskDao = taskDao;
|
||||
}
|
||||
async showTasks(req, res) {
|
||||
const querySpec = {
|
||||
query: 'SELECT * FROM root r WHERE r.completed=@completed',
|
||||
parameters: [{
|
||||
name: '@completed',
|
||||
value: false
|
||||
}]
|
||||
};
|
||||
|
||||
try {
|
||||
const items = await this.taskDao.find()
|
||||
res.render('index', {
|
||||
title: 'My ToDo List ',
|
||||
tasks: items
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async addTask(req, res) {
|
||||
const item = req.body;
|
||||
|
||||
try {
|
||||
await this.taskDao.addItem(item);
|
||||
res.redirect('/');
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async completeTask(req, res) {
|
||||
const completedTasks = Object.keys(req.body);
|
||||
const tasks = [];
|
||||
|
||||
try {
|
||||
completedTasks.forEach((task) => {
|
||||
tasks.push(this.taskDao.updateItem(task));
|
||||
});
|
||||
|
||||
await Promise.all(tasks);
|
||||
|
||||
res.redirect('/');
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TaskList;
|
|
@ -0,0 +1,9 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
/* GET users listing. */
|
||||
router.get('/', function(req, res, next) {
|
||||
res.send('respond with a resource');
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -0,0 +1,6 @@
|
|||
extends layout
|
||||
|
||||
block content
|
||||
h1= message
|
||||
h2= error.status
|
||||
pre #{error.stack}
|
|
@ -0,0 +1,40 @@
|
|||
extends layout
|
||||
|
||||
block content
|
||||
h1 #{title}
|
||||
br
|
||||
|
||||
form(action="/completetask", method="post")
|
||||
table.table.table-striped.table-bordered
|
||||
tr
|
||||
td Name
|
||||
td Category
|
||||
td Date
|
||||
td Complete
|
||||
if (typeof tasks === "undefined")
|
||||
tr
|
||||
td
|
||||
else
|
||||
each task in tasks
|
||||
tr
|
||||
td #{task.name}
|
||||
td #{task.category}
|
||||
- var date = new Date(task.date);
|
||||
- var day = date.getDate();
|
||||
- var month = date.getMonth() + 1;
|
||||
- var year = date.getFullYear();
|
||||
td #{month + "/" + day + "/" + year}
|
||||
td
|
||||
if(task.completed)
|
||||
input(type="checkbox", disabled, name="#{task.id}", value="#{!task.completed}", checked=task.completed)
|
||||
else
|
||||
input(type="checkbox", name="#{task.id}", value="#{!task.completed}", checked=task.completed)
|
||||
button.btn(type="submit") Update tasks
|
||||
hr
|
||||
form.well(action="/addtask", method="post")
|
||||
label Item Name:
|
||||
input(name="name", type="textbox")
|
||||
label Item Category:
|
||||
input(name="category", type="textbox")
|
||||
br
|
||||
button.btn(type="submit") Add item
|
|
@ -0,0 +1,13 @@
|
|||
doctype html
|
||||
html
|
||||
head
|
||||
title= title
|
||||
link(rel='stylesheet', href='//ajax.aspnetcdn.com/ajax/bootstrap/3.3.2/css/bootstrap.min.css')
|
||||
link(rel='stylesheet', href='/stylesheets/style.css')
|
||||
body
|
||||
nav.navbar.navbar-inverse.navbar-fixed-top
|
||||
div.navbar-header
|
||||
a.navbar-brand(href='#') My Tasks
|
||||
block content
|
||||
script(src='//ajax.aspnetcdn.com/ajax/jQuery/jquery-1.11.2.min.js')
|
||||
script(src='//ajax.aspnetcdn.com/ajax/bootstrap/3.3.2/bootstrap.min.js')
|
|
@ -1,31 +0,0 @@
|
|||
{
|
||||
"rules": {
|
||||
"indent": [
|
||||
2,
|
||||
4
|
||||
],
|
||||
"quotes": [
|
||||
2,
|
||||
"double"
|
||||
],
|
||||
"linebreak-style": [
|
||||
2,
|
||||
"windows"
|
||||
],
|
||||
"semi": [
|
||||
2,
|
||||
"always"
|
||||
],
|
||||
"curly": 0,
|
||||
"dot-notation": 0,
|
||||
"eol-last": 0,
|
||||
"no-redeclare": 0,
|
||||
"no-shadow": 0,
|
||||
"no-undef": 0,
|
||||
"no-underscore-dangle": 0,
|
||||
"no-unused-vars": 0
|
||||
},
|
||||
"env": {
|
||||
"node": true
|
||||
}
|
||||
}
|
|
@ -1,257 +0,0 @@
|
|||
*.cmd
|
||||
|
||||
config.js
|
||||
.vs/
|
||||
|
||||
#################
|
||||
## Eclipse
|
||||
#################
|
||||
|
||||
*.pydevproject
|
||||
.project
|
||||
.metadata
|
||||
bin/
|
||||
obj/
|
||||
tmp/
|
||||
*.tmp
|
||||
*.bak
|
||||
*.swp
|
||||
*~.nib
|
||||
local.properties
|
||||
.classpath
|
||||
.settings/
|
||||
.loadpath
|
||||
|
||||
# External tool builders
|
||||
.externalToolBuilders/
|
||||
|
||||
# Locally stored "Eclipse launch configurations"
|
||||
*.launch
|
||||
|
||||
# CDT-specific
|
||||
.cproject
|
||||
|
||||
# PDT-specific
|
||||
.buildpath
|
||||
|
||||
|
||||
#################
|
||||
## Visual Studio
|
||||
#################
|
||||
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.sln.docstates
|
||||
|
||||
# Build results
|
||||
|
||||
[Dd]ebug/
|
||||
[Rr]elease/
|
||||
x64/
|
||||
build/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.log
|
||||
*.scc
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# NCrunch
|
||||
*.ncrunch*
|
||||
.*crunch*.local.xml
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.Publish.xml
|
||||
*.pubxml
|
||||
|
||||
# NuGet Packages Directory
|
||||
## TODO: If you have NuGet Package Restore enabled, uncomment the next line
|
||||
#packages/
|
||||
|
||||
# Windows Azure Build Output
|
||||
csx
|
||||
*.build.csdef
|
||||
|
||||
# Windows Store app package directory
|
||||
AppPackages/
|
||||
|
||||
# Others
|
||||
sql/
|
||||
*.Cache
|
||||
ClientBin/
|
||||
[Ss]tyle[Cc]op.*
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.[Pp]ublish.xml
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file to a newer
|
||||
# Visual Studio version. Backup files are not needed, because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
|
||||
# SQL Server files
|
||||
App_Data/*.mdf
|
||||
App_Data/*.ldf
|
||||
|
||||
#############
|
||||
## Windows detritus
|
||||
#############
|
||||
|
||||
# Windows image file caches
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
|
||||
# Folder config file
|
||||
Desktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Mac crap
|
||||
.DS_Store
|
||||
|
||||
|
||||
#############
|
||||
## Python
|
||||
#############
|
||||
|
||||
*.py[cod]
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist/
|
||||
build/
|
||||
eggs/
|
||||
parts/
|
||||
var/
|
||||
sdist/
|
||||
develop-eggs/
|
||||
.installed.cfg
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
.tox
|
||||
|
||||
#Translations
|
||||
*.mo
|
||||
|
||||
#Mr Developer
|
||||
.mr.developer.cfg
|
||||
|
||||
#############
|
||||
## Node.js
|
||||
#############
|
||||
|
||||
# Logs
|
||||
logs
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
|
||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directory
|
||||
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
|
||||
node_modules
|
||||
|
||||
# Dependency sub-directories for samples
|
||||
**/node_nodules/
|
||||
|
||||
*.dat
|
|
@ -1,203 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<SchemaVersion>2.0</SchemaVersion>
|
||||
<ProjectGuid>{411c2c02-66ef-40c8-a964-af8fdacf3961}</ProjectGuid>
|
||||
<StartupFile>test\test.js</StartupFile>
|
||||
<WorkingDirectory>.</WorkingDirectory>
|
||||
<OutputPath>.</OutputPath>
|
||||
<ProjectTypeGuids>{3AF33F2E-1136-4D97-BBB7-1795711AC8B8};{349c5851-65df-11da-9384-00065b846f21};{9092AA53-FB77-4645-B42D-1CCCA6BD08BD}</ProjectTypeGuids>
|
||||
<VisualStudioVersion Condition="'$(VisualStudioVersion)' == ''">11.0</VisualStudioVersion>
|
||||
<VSToolsPath Condition="'$(VSToolsPath)' == ''">$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)</VSToolsPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)' == 'Debug'" />
|
||||
<PropertyGroup Condition="'$(Configuration)' == 'Release'" />
|
||||
<ItemGroup>
|
||||
<Content Include="package.json" />
|
||||
<Content Include="test\BaselineTest.PathParser.json" />
|
||||
<Content Include="test\mocha.json" />
|
||||
<Content Include="test\data.json" />
|
||||
<Content Include="test\readme.md" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="Gruntfile.js" />
|
||||
<Compile Include="index.js" />
|
||||
<Compile Include="lib\endpointDiscoveryRetryPolicy.js" />
|
||||
<Compile Include="lib\globalEndpointManager.js" />
|
||||
<Compile Include="lib\helper.js" />
|
||||
<Compile Include="lib\platform.js" />
|
||||
<Compile Include="lib\queryExecutionContext\aggregators.js" />
|
||||
<Compile Include="lib\queryExecutionContext\documentProducer.js" />
|
||||
<Compile Include="lib\queryExecutionContext\orderByDocumentProducerComparator.js" />
|
||||
<Compile Include="lib\queryExecutionContext\orderByQueryExecutionContext.js" />
|
||||
<Compile Include="lib\queryExecutionContext\parallelQueryExecutionContext.js" />
|
||||
<Compile Include="lib\queryExecutionContext\endpointComponent.js" />
|
||||
<Compile Include="lib\queryExecutionContext\parallelQueryExecutionContextBase.js" />
|
||||
<Compile Include="lib\queryExecutionContext\partitionedQueryExecutionContextInfoParser.js" />
|
||||
<Compile Include="lib\queryExecutionContext\pipelinedQueryExecutionContext.js" />
|
||||
<Compile Include="lib\queryExecutionContext\proxyQueryExecutionContext.js" />
|
||||
<Compile Include="lib\queryExecutionContext\defaultQueryExecutionContext.js" />
|
||||
<Compile Include="lib\queryExecutionContext\headerUtils.js" />
|
||||
<Compile Include="lib\range.js" />
|
||||
<Compile Include="lib\resourceId.js" />
|
||||
<Compile Include="lib\retryOptions.js" />
|
||||
<Compile Include="lib\retryUtility.js" />
|
||||
<Compile Include="lib\resourceThrottleRetryPolicy.js" />
|
||||
<Compile Include="lib\routing\smartRoutingMapProvider.js" />
|
||||
<Compile Include="lib\routing\partitionKeyRangeCache.js" />
|
||||
<Compile Include="lib\routing\inMemoryCollectionRoutingMap.js" />
|
||||
<Compile Include="lib\sessionContainer.js" />
|
||||
<Compile Include="lib\sessionReadRetryPolicy.js" />
|
||||
<Compile Include="lib\statusCodes.js" />
|
||||
<Compile Include="lib\uriFactory.js" />
|
||||
<Compile Include="test\aggregateQueryTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\baseTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\encodingTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\queryTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\ruPerMinTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\sessionContainerTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\sessionTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\smartRoutingMapProviderTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\crossPartitionTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\inMemoryCollectionRoutingMapTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\splitTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\sslVerificationTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\uriFactoryTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\_testConfig.js" />
|
||||
<Compile Include="lib\auth.js" />
|
||||
<Compile Include="lib\base.js" />
|
||||
<Compile Include="lib\constants.js" />
|
||||
<Compile Include="lib\documentclient.js" />
|
||||
<Compile Include="lib\documents.js" />
|
||||
<Compile Include="lib\hash\consistentHashRing.js" />
|
||||
<Compile Include="lib\hash\hashPartitionResolver.js" />
|
||||
<Compile Include="lib\hash\murmurHash.js" />
|
||||
<Compile Include="lib\index.js" />
|
||||
<Compile Include="lib\queryIterator.js" />
|
||||
<Compile Include="lib\range.js" />
|
||||
<Compile Include="lib\request.js" />
|
||||
<Compile Include="test\hashPartitionResolverTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\consistentHashRingTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\documentClientTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\rangeTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\rangePartitionResolverTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\murmurHashTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\test.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\queryExecutionContext\defaultQueryExecutionContextTests.js">
|
||||
<TestFramework>Mocha</TestFramework>
|
||||
</Compile>
|
||||
<Compile Include="test\_testConfig.js" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include=".eslintrc" />
|
||||
<None Include="package.json" />
|
||||
<None Include="test\readme.md" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Folder Include="lib" />
|
||||
<Folder Include="lib\hash" />
|
||||
<Folder Include="lib\routing\" />
|
||||
<Folder Include="lib\queryExecutionContext\" />
|
||||
<Folder Include="test" />
|
||||
<Folder Include="test\queryExecutionContext\" />
|
||||
<Folder Include="test\routing\" />
|
||||
<Folder Include="typings\" />
|
||||
<Folder Include="typings\globals\" />
|
||||
<Folder Include="typings\globals\grunt\" />
|
||||
<Folder Include="typings\globals\mocha\" />
|
||||
<Folder Include="typings\globals\node\" />
|
||||
<Folder Include="typings\globals\semaphore\" />
|
||||
<Folder Include="typings\globals\underscore\" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<TypeScriptCompile Include="typings\globals\grunt\index.d.ts" />
|
||||
<TypeScriptCompile Include="typings\globals\mocha\index.d.ts" />
|
||||
<TypeScriptCompile Include="typings\globals\node\index.d.ts" />
|
||||
<TypeScriptCompile Include="typings\globals\semaphore\index.d.ts" />
|
||||
<TypeScriptCompile Include="typings\globals\underscore\index.d.ts" />
|
||||
<TypeScriptCompile Include="typings\index.d.ts" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildToolsPath)\Microsoft.Common.targets" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
|
||||
<!--Do not delete the following Import Project. While this appears to do nothing it is a marker for setting TypeScript properties before our import that depends on them.-->
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\TypeScript\Microsoft.TypeScript.targets" Condition="False" />
|
||||
<Import Project="$(VSToolsPath)\Node.js Tools\Microsoft.NodejsTools.targets" />
|
||||
<ProjectExtensions>
|
||||
<VisualStudio>
|
||||
<FlavorProperties GUID="{349c5851-65df-11da-9384-00065b846f21}">
|
||||
<WebProjectProperties>
|
||||
<UseIIS>False</UseIIS>
|
||||
<AutoAssignPort>True</AutoAssignPort>
|
||||
<DevelopmentServerPort>0</DevelopmentServerPort>
|
||||
<DevelopmentServerVPath>/</DevelopmentServerVPath>
|
||||
<IISUrl>http://localhost:48022/</IISUrl>
|
||||
<NTLMAuthentication>False</NTLMAuthentication>
|
||||
<UseCustomServer>True</UseCustomServer>
|
||||
<CustomServerUrl>http://localhost:1337</CustomServerUrl>
|
||||
<SaveServerSettingsInUserFile>False</SaveServerSettingsInUserFile>
|
||||
</WebProjectProperties>
|
||||
</FlavorProperties>
|
||||
<FlavorProperties GUID="{349c5851-65df-11da-9384-00065b846f21}" User="">
|
||||
<WebProjectProperties>
|
||||
<StartPageUrl>
|
||||
</StartPageUrl>
|
||||
<StartAction>CurrentPage</StartAction>
|
||||
<AspNetDebugging>True</AspNetDebugging>
|
||||
<SilverlightDebugging>False</SilverlightDebugging>
|
||||
<NativeDebugging>False</NativeDebugging>
|
||||
<SQLDebugging>False</SQLDebugging>
|
||||
<ExternalProgram>
|
||||
</ExternalProgram>
|
||||
<StartExternalURL>
|
||||
</StartExternalURL>
|
||||
<StartCmdLineArguments>
|
||||
</StartCmdLineArguments>
|
||||
<StartWorkingDirectory>
|
||||
</StartWorkingDirectory>
|
||||
<EnableENC>False</EnableENC>
|
||||
<AlwaysStartWebServerOnDebug>False</AlwaysStartWebServerOnDebug>
|
||||
</WebProjectProperties>
|
||||
</FlavorProperties>
|
||||
</VisualStudio>
|
||||
</ProjectExtensions>
|
||||
</Project>
|
|
@ -1,22 +0,0 @@
|
|||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 14
|
||||
VisualStudioVersion = 14.0.24720.0
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{9092AA53-FB77-4645-B42D-1CCCA6BD08BD}") = "DocumentDB.Node.master", "DocumentDB.Node.master.njsproj", "{411C2C02-66EF-40C8-A964-AF8FDACF3961}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Release|Any CPU = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{411C2C02-66EF-40C8-A964-AF8FDACF3961}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{411C2C02-66EF-40C8-A964-AF8FDACF3961}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{411C2C02-66EF-40C8-A964-AF8FDACF3961}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{411C2C02-66EF-40C8-A964-AF8FDACF3961}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
EndGlobal
|
|
@ -1,20 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2014 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
module.exports = require('./lib/');
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var crypto = require("crypto");
|
||||
|
||||
var AuthHandler = {
|
||||
getAuthorizationHeader: function (documentClient, verb, path, resourceId, resourceType, headers) {
|
||||
if (documentClient.masterKey) {
|
||||
return encodeURIComponent(this.getAuthorizationTokenUsingMasterKey(verb, resourceId, resourceType, headers, documentClient.masterKey));
|
||||
} else if (documentClient.resourceTokens) {
|
||||
return encodeURIComponent(this.getAuthorizationTokenUsingResourceTokens(documentClient.resourceTokens, path, resourceId));
|
||||
}
|
||||
},
|
||||
|
||||
getAuthorizationTokenUsingMasterKey: function (verb, resourceId, resourceType, headers, masterKey) {
|
||||
var key = new Buffer(masterKey, "base64");
|
||||
|
||||
var text = (verb || "").toLowerCase() + "\n" +
|
||||
(resourceType || "").toLowerCase() + "\n" +
|
||||
(resourceId || "") + "\n" +
|
||||
(headers["x-ms-date"] || "").toLowerCase() + "\n" +
|
||||
(headers["date"] || "").toLowerCase() + "\n";
|
||||
|
||||
var body = new Buffer(text, "utf8");
|
||||
|
||||
var signature = crypto.createHmac("sha256", key).update(body).digest("base64");
|
||||
|
||||
var MasterToken = "master";
|
||||
|
||||
var TokenVersion = "1.0";
|
||||
|
||||
return "type=" + MasterToken + "&ver=" + TokenVersion + "&sig=" + signature;
|
||||
},
|
||||
|
||||
getAuthorizationTokenUsingResourceTokens: function (resourceTokens, path, resourceId) {
|
||||
if (resourceTokens && Object.keys(resourceTokens).length > 0) {
|
||||
// For database account access(through getDatabaseAccount API), path and resourceId are "",
|
||||
// so in this case we return the first token to be used for creating the auth header as the service will accept any token in this case
|
||||
if (!path && !resourceId) {
|
||||
return resourceTokens[Object.keys(resourceTokens)[0]];
|
||||
}
|
||||
|
||||
if (resourceId && resourceTokens[resourceId]) {
|
||||
return resourceTokens[resourceId];
|
||||
}
|
||||
|
||||
//minimum valid path /dbs
|
||||
if (!path || path.length < 4) {
|
||||
return null;
|
||||
}
|
||||
|
||||
//remove '/' from left and right of path
|
||||
path = path[0] == '/' ? path.substring(1) : path;
|
||||
path = path[path.length - 1] == '/' ? path.substring(0, path.length - 1) : path;
|
||||
|
||||
var pathSegments = (path && path.split("/")) || [];
|
||||
|
||||
//if it's an incomplete path like /dbs/db1/colls/, start from the paretn resource
|
||||
var index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
|
||||
for (; index > 0; index -= 2) {
|
||||
var id = decodeURI(pathSegments[index]);
|
||||
if (resourceTokens[id]) {
|
||||
return resourceTokens[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = AuthHandler;
|
||||
}
|
|
@ -1,543 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var AuthHandler = require("./auth");
|
||||
var Constants = require("./constants");
|
||||
var Platform = require("./platform");
|
||||
|
||||
//SCRIPT START
|
||||
function initializeProperties(target, members, prefix) {
|
||||
var keys = Object.keys(members);
|
||||
var properties;
|
||||
var i, len;
|
||||
for (i = 0, len = keys.length; i < len; i++) {
|
||||
var key = keys[i];
|
||||
var enumerable = key.charCodeAt(0) !== /*_*/ 95;
|
||||
var member = members[key];
|
||||
if (member && typeof member === "object") {
|
||||
if (member.value !== undefined || typeof member.get === "function" || typeof member.set === "function") {
|
||||
if (member.enumerable === undefined) {
|
||||
member.enumerable = enumerable;
|
||||
}
|
||||
if (prefix && member.setName && typeof member.setName === "function") {
|
||||
member.setName(prefix + "." + key);
|
||||
}
|
||||
properties = properties || {};
|
||||
properties[key] = member;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (!enumerable) {
|
||||
properties = properties || {};
|
||||
properties[key] = { value: member, enumerable: enumerable, configurable: true, writable: true };
|
||||
continue;
|
||||
}
|
||||
target[key] = member;
|
||||
}
|
||||
if (properties) {
|
||||
Object.defineProperties(target, properties);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a new namespace with the specified name under the specified parent namespace.
|
||||
* @param {Object} parentNamespace - The parent namespace.
|
||||
* @param {String} name - The name of the new namespace.
|
||||
* @param {Object} members - The members of the new namespace.
|
||||
* @returns {Function} - The newly-defined namespace.
|
||||
*/
|
||||
function defineWithParent(parentNamespace, name, members) {
|
||||
var currentNamespace = parentNamespace || {};
|
||||
|
||||
if (name) {
|
||||
var namespaceFragments = name.split(".");
|
||||
for (var i = 0, len = namespaceFragments.length; i < len; i++) {
|
||||
var namespaceName = namespaceFragments[i];
|
||||
if (!currentNamespace[namespaceName]) {
|
||||
Object.defineProperty(currentNamespace, namespaceName,
|
||||
{ value: {}, writable: false, enumerable: true, configurable: true }
|
||||
);
|
||||
}
|
||||
currentNamespace = currentNamespace[namespaceName];
|
||||
}
|
||||
}
|
||||
|
||||
if (members) {
|
||||
initializeProperties(currentNamespace, members, name || "<ANONYMOUS>");
|
||||
}
|
||||
|
||||
return currentNamespace;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a new namespace with the specified name.
|
||||
* @param {String} name - The name of the namespace. This could be a dot-separated name for nested namespaces.
|
||||
* @param {Object} members - The members of the new namespace.
|
||||
* @returns {Function} - The newly-defined namespace.
|
||||
*/
|
||||
function define(name, members) {
|
||||
return defineWithParent(undefined, name, members);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a class using the given constructor and the specified instance members.
|
||||
* @param {Function} constructor - A constructor function that is used to instantiate this class.
|
||||
* @param {Object} instanceMembers - The set of instance fields, properties, and methods to be made available on the class.
|
||||
* @param {Object} staticMembers - The set of static fields, properties, and methods to be made available on the class.
|
||||
* @returns {Function} - The newly-defined class.
|
||||
*/
|
||||
function defineClass(constructor, instanceMembers, staticMembers) {
|
||||
constructor = constructor || function () { };
|
||||
if (instanceMembers) {
|
||||
initializeProperties(constructor.prototype, instanceMembers);
|
||||
}
|
||||
if (staticMembers) {
|
||||
initializeProperties(constructor, staticMembers);
|
||||
}
|
||||
return constructor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a sub-class based on the supplied baseClass parameter, using prototypal inheritance.
|
||||
* @param {Function} baseClass - The class to inherit from.
|
||||
* @param {Function} constructor - A constructor function that is used to instantiate this class.
|
||||
* @param {Object} instanceMembers - The set of instance fields, properties, and methods to be made available on the class.
|
||||
* @param {Object} staticMembers - The set of static fields, properties, and methods to be made available on the class.
|
||||
* @returns {Function} - The newly-defined class.
|
||||
*/
|
||||
function derive(baseClass, constructor, instanceMembers, staticMembers) {
|
||||
if (baseClass) {
|
||||
constructor = constructor || function () { };
|
||||
var basePrototype = baseClass.prototype;
|
||||
constructor.prototype = Object.create(basePrototype);
|
||||
Object.defineProperty(constructor.prototype, "constructor", { value: constructor, writable: true, configurable: true, enumerable: true });
|
||||
if (instanceMembers) {
|
||||
initializeProperties(constructor.prototype, instanceMembers);
|
||||
}
|
||||
if (staticMembers) {
|
||||
initializeProperties(constructor, staticMembers);
|
||||
}
|
||||
return constructor;
|
||||
} else {
|
||||
return defineClass(constructor, instanceMembers, staticMembers);
|
||||
}
|
||||
}
|
||||
|
||||
var Base = {
|
||||
NotImplementedException: "NotImplementedException",
|
||||
|
||||
defineWithParent: defineWithParent,
|
||||
|
||||
define: define,
|
||||
|
||||
defineClass: defineClass,
|
||||
|
||||
derive: derive,
|
||||
|
||||
extend: function (obj, extent) {
|
||||
for (var property in extent) {
|
||||
if (typeof extent[property] !== "function") {
|
||||
obj[property] = extent[property];
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
map: function (list, fn) {
|
||||
var result = [];
|
||||
for (var i = 0, n = list.length; i < n; i++) {
|
||||
result.push(fn(list[i]));
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
jsonStringifyAndEscapeNonASCII: function (arg) {
|
||||
// escapes non-ASCII characters as \uXXXX
|
||||
return JSON.stringify(arg).replace(/[\u0080-\uFFFF]/g, function(m) {
|
||||
return "\\u" + ("0000" + m.charCodeAt(0).toString(16)).slice(-4);
|
||||
});
|
||||
},
|
||||
|
||||
getHeaders: function (documentClient, defaultHeaders, verb, path, resourceId, resourceType, options, partitionKeyRangeId) {
|
||||
|
||||
var headers = Base.extend({}, defaultHeaders);
|
||||
options = options || {};
|
||||
|
||||
if (options.continuation) {
|
||||
headers[Constants.HttpHeaders.Continuation] = options.continuation;
|
||||
}
|
||||
|
||||
if (options.preTriggerInclude) {
|
||||
headers[Constants.HttpHeaders.PreTriggerInclude] = options.preTriggerInclude.constructor === Array ? options.preTriggerInclude.join(",") : options.preTriggerInclude;
|
||||
}
|
||||
|
||||
if (options.postTriggerInclude) {
|
||||
headers[Constants.HttpHeaders.PostTriggerInclude] = options.postTriggerInclude.constructor === Array ? options.postTriggerInclude.join(",") : options.postTriggerInclude;
|
||||
}
|
||||
|
||||
if (options.offerType) {
|
||||
headers[Constants.HttpHeaders.OfferType] = options.offerType;
|
||||
}
|
||||
|
||||
if (options.offerThroughput) {
|
||||
headers[Constants.HttpHeaders.OfferThroughput] = options.offerThroughput;
|
||||
}
|
||||
|
||||
if (options.maxItemCount) {
|
||||
headers[Constants.HttpHeaders.PageSize] = options.maxItemCount;
|
||||
}
|
||||
|
||||
if (options.accessCondition) {
|
||||
if (options.accessCondition.type === "IfMatch") {
|
||||
headers[Constants.HttpHeaders.IfMatch] = options.accessCondition.condition;
|
||||
} else {
|
||||
headers[Constants.HttpHeaders.IfNoneMatch] = options.accessCondition.condition;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.a_im) {
|
||||
headers[Constants.HttpHeaders.A_IM] = options.a_im;
|
||||
}
|
||||
|
||||
if (options.indexingDirective) {
|
||||
headers[Constants.HttpHeaders.IndexingDirective] = options.indexingDirective;
|
||||
}
|
||||
|
||||
// TODO: add consistency level validation.
|
||||
if (options.consistencyLevel) {
|
||||
headers[Constants.HttpHeaders.ConsistencyLevel] = options.consistencyLevel;
|
||||
}
|
||||
|
||||
if (options.resourceTokenExpirySeconds) {
|
||||
headers[Constants.HttpHeaders.ResourceTokenExpiry] = options.resourceTokenExpirySeconds;
|
||||
}
|
||||
|
||||
// TODO: add session token automatic handling in case of session consistency.
|
||||
if (options.sessionToken) {
|
||||
headers[Constants.HttpHeaders.SessionToken] = options.sessionToken;
|
||||
}
|
||||
|
||||
if (options.enableScanInQuery) {
|
||||
headers[Constants.HttpHeaders.EnableScanInQuery] = options.enableScanInQuery;
|
||||
}
|
||||
|
||||
if (options.enableCrossPartitionQuery) {
|
||||
headers[Constants.HttpHeaders.EnableCrossPartitionQuery] = options.enableCrossPartitionQuery;
|
||||
}
|
||||
|
||||
if (options.maxDegreeOfParallelism != undefined) {
|
||||
headers[Constants.HttpHeaders.ParallelizeCrossPartitionQuery] = true;
|
||||
}
|
||||
|
||||
if (options.populateQuotaInfo) {
|
||||
headers[Constants.HttpHeaders.PopulateQuotaInfo] = true;
|
||||
}
|
||||
|
||||
// If the user is not using partition resolver, we add options.partitonKey to the header for elastic collections
|
||||
if (documentClient.partitionResolver === undefined || documentClient.partitionResolver === null) {
|
||||
if (options.partitionKey !== undefined) {
|
||||
var partitionKey = options.partitionKey;
|
||||
if (partitionKey === null || partitionKey.constructor !== Array) {
|
||||
partitionKey = [partitionKey];
|
||||
}
|
||||
headers[Constants.HttpHeaders.PartitionKey] = this.jsonStringifyAndEscapeNonASCII(partitionKey);
|
||||
}
|
||||
}
|
||||
|
||||
if (documentClient.masterKey) {
|
||||
headers[Constants.HttpHeaders.XDate] = new Date().toUTCString();
|
||||
}
|
||||
|
||||
if (documentClient.masterKey || documentClient.resourceTokens) {
|
||||
headers[Constants.HttpHeaders.Authorization] = AuthHandler.getAuthorizationHeader(documentClient, verb, path, resourceId, resourceType, headers);
|
||||
}
|
||||
|
||||
if (verb === "post" || verb === "put") {
|
||||
if (!headers[Constants.HttpHeaders.ContentType]) {
|
||||
headers[Constants.HttpHeaders.ContentType] = Constants.MediaTypes.Json;
|
||||
}
|
||||
}
|
||||
|
||||
if (!headers[Constants.HttpHeaders.Accept]) {
|
||||
headers[Constants.HttpHeaders.Accept] = Constants.MediaTypes.Json;
|
||||
}
|
||||
|
||||
if (partitionKeyRangeId !== undefined) {
|
||||
headers[Constants.HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId;
|
||||
}
|
||||
|
||||
if (options.enableScriptLogging) {
|
||||
headers[Constants.HttpHeaders.EnableScriptLogging] = options.enableScriptLogging;
|
||||
}
|
||||
|
||||
if (options.offerEnableRUPerMinuteThroughput) {
|
||||
headers[Constants.HttpHeaders.OfferIsRUPerMinuteThroughputEnabled] = true;
|
||||
}
|
||||
|
||||
if (options.disableRUPerMinuteUsage) {
|
||||
headers[Constants.HttpHeaders.DisableRUPerMinuteUsage] = true;
|
||||
}
|
||||
|
||||
return headers;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
parseLink: function (resourcePath) {
|
||||
if (resourcePath.length === 0) {
|
||||
/* for DatabaseAccount case, both type and objectBody will be undefined. */
|
||||
return {
|
||||
type: undefined,
|
||||
objectBody: undefined
|
||||
};
|
||||
}
|
||||
|
||||
if (resourcePath[resourcePath.length - 1] !== "/") {
|
||||
resourcePath = resourcePath + "/";
|
||||
}
|
||||
|
||||
if (resourcePath[0] !== "/") {
|
||||
resourcePath = "/" + resourcePath;
|
||||
}
|
||||
|
||||
/*
|
||||
/ The path will be in the form of /[resourceType]/[resourceId]/ .... /[resourceType]//[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/
|
||||
/ or /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/
|
||||
/ The result of split will be in the form of [[[resourceType], [resourceId] ... ,[resourceType], [resourceId], ""]
|
||||
/ In the first case, to extract the resourceId it will the element before last ( at length -2 ) and the the type will before it ( at length -3 )
|
||||
/ In the second case, to extract the resource type it will the element before last ( at length -2 )
|
||||
*/
|
||||
var pathParts = resourcePath.split("/");
|
||||
var id, type;
|
||||
if (pathParts.length % 2 === 0) {
|
||||
// request in form /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId].
|
||||
id = pathParts[pathParts.length - 2];
|
||||
type = pathParts[pathParts.length - 3];
|
||||
} else {
|
||||
// request in form /[resourceType]/[resourceId]/ .... /[resourceType]/.
|
||||
id = pathParts[pathParts.length - 3];
|
||||
type = pathParts[pathParts.length - 2];
|
||||
}
|
||||
|
||||
var result = {
|
||||
type: type,
|
||||
objectBody: {
|
||||
id: id,
|
||||
self: resourcePath
|
||||
}
|
||||
};
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
parsePath: function (path) {
|
||||
var pathParts = [];
|
||||
var currentIndex = 0;
|
||||
|
||||
var throwError = function () {
|
||||
throw new Error("Path " + path + " is invalid at index " + currentIndex);
|
||||
};
|
||||
|
||||
var getEscapedToken = function () {
|
||||
var quote = path[currentIndex];
|
||||
var newIndex = ++currentIndex;
|
||||
|
||||
while (true) {
|
||||
newIndex = path.indexOf(quote, newIndex);
|
||||
if (newIndex == -1) {
|
||||
throwError();
|
||||
}
|
||||
|
||||
if (path[newIndex - 1] !== '\\') break;
|
||||
|
||||
++newIndex;
|
||||
}
|
||||
|
||||
var token = path.substr(currentIndex, newIndex - currentIndex);
|
||||
currentIndex = newIndex + 1;
|
||||
return token;
|
||||
};
|
||||
|
||||
var getToken = function () {
|
||||
var newIndex = path.indexOf('/', currentIndex);
|
||||
var token = null;
|
||||
if (newIndex == -1) {
|
||||
token = path.substr(currentIndex);
|
||||
currentIndex = path.length;
|
||||
}
|
||||
else {
|
||||
token = path.substr(currentIndex, newIndex - currentIndex);
|
||||
currentIndex = newIndex;
|
||||
}
|
||||
|
||||
token = token.trim();
|
||||
return token;
|
||||
};
|
||||
|
||||
while (currentIndex < path.length) {
|
||||
if (path[currentIndex] !== '/') {
|
||||
throwError();
|
||||
}
|
||||
|
||||
if (++currentIndex == path.length) break;
|
||||
|
||||
if (path[currentIndex] === '\"' || path[currentIndex] === '\'') {
|
||||
pathParts.push(getEscapedToken());
|
||||
}
|
||||
else {
|
||||
pathParts.push(getToken());
|
||||
}
|
||||
}
|
||||
|
||||
return pathParts;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
getDatabaseLink: function (link) {
|
||||
return link.split('/').slice(0, 2).join('/');
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
getCollectionLink: function (link) {
|
||||
return link.split('/').slice(0, 4).join('/');
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
getAttachmentIdFromMediaId: function (mediaId) {
|
||||
// Replace - with / on the incoming mediaId. This will preserve the / so that we can revert it later.
|
||||
var buffer = new Buffer(mediaId.replace(/-/g, "/"), "base64");
|
||||
var ResoureIdLength = 20;
|
||||
var attachmentId = "";
|
||||
if (buffer.length > ResoureIdLength) {
|
||||
// After the base64 conversion, change the / back to a - to get the proper attachmentId
|
||||
attachmentId = buffer.toString("base64", 0, ResoureIdLength).replace(/\//g, "-");
|
||||
} else {
|
||||
attachmentId = mediaId;
|
||||
}
|
||||
|
||||
return attachmentId;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
getHexaDigit: function () {
|
||||
return Math.floor(Math.random() * 16).toString(16);
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
generateGuidId: function () {
|
||||
var id = "";
|
||||
|
||||
for (var i = 0; i < 8; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (var i = 0; i < 4; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (var i = 0; i < 4; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (var i = 0; i < 4; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (var i = 0; i < 12; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
return id;
|
||||
},
|
||||
|
||||
isLinkNameBased: function (link) {
|
||||
var parts = link.split("/");
|
||||
var firstId = "";
|
||||
var count = 0;
|
||||
// Get the first id from path.
|
||||
for (var i = 0; i < parts.length; ++i) {
|
||||
if (!parts[i]) {
|
||||
// Skip empty string.
|
||||
continue;
|
||||
}
|
||||
++count;
|
||||
if (count === 1 && parts[i].toLowerCase() !== "dbs") {
|
||||
return false;
|
||||
}
|
||||
if (count === 2) {
|
||||
firstId = parts[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!firstId) return false;
|
||||
if (firstId.length !== 8) return true;
|
||||
var decodedDataLength = Platform.getDecodedDataLength(firstId);
|
||||
if (decodedDataLength !== 4) return true;
|
||||
return false;
|
||||
},
|
||||
/** @ignore */
|
||||
_trimSlashes: function (source) {
|
||||
return source.replace(Constants.RegularExpressions.TrimLeftSlashes, "")
|
||||
.replace(Constants.RegularExpressions.TrimRightSlashes, "");
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_isValidCollectionLink: function (link) {
|
||||
if (typeof link !== "string") {
|
||||
return false;
|
||||
}
|
||||
|
||||
var parts = Base._trimSlashes(link).split("/");
|
||||
|
||||
if (parts && parts.length !== 4) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parts[0] !== "dbs") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parts[2] !== "colls") {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
};
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = Base;
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,317 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base"),
|
||||
RetryOptions = require("./retryOptions");
|
||||
//SCRIPT START
|
||||
|
||||
var AzureDocuments = Base.defineClass(null, null,
|
||||
{
|
||||
/**
|
||||
* Represents a DatabaseAccount in the Azure Cosmos DB database service. A DatabaseAccount is the container for databases.
|
||||
* @global
|
||||
* @property {string} DatabasesLink - The self-link for Databases in the databaseAccount.
|
||||
* @property {string} MediaLink - The self-link for Media in the databaseAccount.
|
||||
* @property {number} MaxMediaStorageUsageInMB - Attachment content (media) storage quota in MBs ( Retrieved from gateway ).
|
||||
* @property {number} CurrentMediaStorageUsageInMB - <p> Current attachment content (media) usage in MBs (Retrieved from gateway )<br>
|
||||
Value is returned from cached information updated periodically and is not guaranteed to be real time. </p>
|
||||
* @property {object} ConsistencyPolicy - Gets the UserConsistencyPolicy settings.
|
||||
* @property {string} ConsistencyPolicy.defaultConsistencyLevel - The default consistency level and it's of type {@link ConsistencyLevel}.
|
||||
* @property {number} ConsistencyPolicy.maxStalenessPrefix - In bounded staleness consistency, the maximum allowed staleness in terms difference in sequence numbers (aka version).
|
||||
* @property {number} ConsistencyPolicy.maxStalenessIntervalInSeconds - In bounded staleness consistency, the maximum allowed staleness in terms time interval.
|
||||
|
||||
* @property {Array} WritableLocations - The list of writable locations for a geo-replicated database account.
|
||||
* @property {Array} ReadableLocations - The list of readable locations for a geo-replicated database account.
|
||||
*/
|
||||
DatabaseAccount: Base.defineClass(function () {
|
||||
this._writableLocations = [];
|
||||
this._readableLocations = [];
|
||||
|
||||
Object.defineProperty(this, "DatabasesLink", {
|
||||
value: "",
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "MediaLink", {
|
||||
value: "",
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "MaxMediaStorageUsageInMB", {
|
||||
value: 0,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "CurrentMediaStorageUsageInMB", {
|
||||
value: 0,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "ConsumedDocumentStorageInMB", {
|
||||
value: 0,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "ReservedDocumentStorageInMB", {
|
||||
value: 0,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "ProvisionedDocumentStorageInMB", {
|
||||
value: 0,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "ConsistencyPolicy", {
|
||||
value: "",
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "WritableLocations", {
|
||||
get: function () {
|
||||
return this._writableLocations;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "ReadableLocations", {
|
||||
get: function () {
|
||||
return this._readableLocations;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
}),
|
||||
|
||||
/**
|
||||
* <p>Represents the consistency levels supported for Azure Cosmos DB client operations.<br>
|
||||
* The requested ConsistencyLevel must match or be weaker than that provisioned for the database account. Consistency levels.<br>
|
||||
* Consistency levels by order of strength are Strong, BoundedStaleness, Session and Eventual.</p>
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property Strong Strong Consistency guarantees that read operations always return the value that was last written.
|
||||
* @property BoundedStaleness Bounded Staleness guarantees that reads are not too out-of-date. This can be configured based on number of operations (MaxStalenessPrefix) or time (MaxStalenessIntervalInSeconds).
|
||||
* @property Session Session Consistency guarantees monotonic reads (you never read old data, then new, then old again), monotonic writes (writes are ordered)
|
||||
and read your writes (your writes are immediately visible to your reads) within any single session.
|
||||
* @property Eventual Eventual Consistency guarantees that reads will return a subset of writes. All writes
|
||||
will be eventually be available for reads.
|
||||
* @property ConsistentPrefix ConsistentPrefix Consistency guarantees that reads will return some prefix of all writes with no gaps.
|
||||
All writes will be eventually be available for reads.
|
||||
*/
|
||||
ConsistencyLevel: Object.freeze({
|
||||
Strong: "Strong",
|
||||
BoundedStaleness: "BoundedStaleness",
|
||||
Session: "Session",
|
||||
Eventual: "Eventual",
|
||||
ConsistentPrefix: "ConsistentPrefix"
|
||||
}),
|
||||
|
||||
|
||||
/**
|
||||
* Specifies the supported indexing modes.
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property Consistent <p>Index is updated synchronously with a create or update operation. <br>
|
||||
With consistent indexing, query behavior is the same as the default consistency level for the collection. The index is
|
||||
always kept up to date with the data. </p>
|
||||
* @property Lazy <p>Index is updated asynchronously with respect to a create or update operation. <br>
|
||||
With lazy indexing, queries are eventually consistent. The index is updated when the collection is idle.</p>
|
||||
*/
|
||||
IndexingMode: Object.freeze({
|
||||
Consistent: "consistent",
|
||||
Lazy: "lazy",
|
||||
None: "none"
|
||||
}),
|
||||
|
||||
/**
|
||||
* Specifies the supported Index types.
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property Hash This is supplied for a path which has no sorting requirement.
|
||||
* This kind of an index has better precision than corresponding range index.
|
||||
* @property Range This is supplied for a path which requires sorting.
|
||||
* @property Spatial This is supplied for a path which requires geospatial indexing.
|
||||
*/
|
||||
|
||||
IndexKind: Object.freeze({
|
||||
Hash: "Hash",
|
||||
Range: "Range",
|
||||
Spatial: "Spatial"
|
||||
}),
|
||||
|
||||
DataType: Object.freeze({
|
||||
Number: "Number",
|
||||
String: "String",
|
||||
Point: "Point",
|
||||
LineString: "LineString",
|
||||
Polygon: "Polygon"
|
||||
}),
|
||||
|
||||
PartitionKind: Object.freeze({
|
||||
Hash: "Hash"
|
||||
}),
|
||||
|
||||
ConnectionMode: Object.freeze({
|
||||
Gateway: 0
|
||||
}),
|
||||
|
||||
QueryCompatibilityMode: Object.freeze({
|
||||
Default: 0,
|
||||
Query: 1,
|
||||
SqlQuery: 2
|
||||
}),
|
||||
|
||||
/**
|
||||
* Enum for media read mode values.
|
||||
* @readonly
|
||||
* @enum {sting}
|
||||
* @property Buffered Content is buffered at the client and not directly streamed from the content store.
|
||||
<p>Use Buffered to reduce the time taken to read and write media files.</p>
|
||||
* @property Streamed Content is directly streamed from the content store without any buffering at the client.
|
||||
<p>Use Streamed to reduce the client memory overhead of reading and writing media files. </p>
|
||||
*/
|
||||
MediaReadMode: Object.freeze({
|
||||
Buffered: "Buffered",
|
||||
Streamed: "Streamed"
|
||||
}),
|
||||
|
||||
/**
|
||||
* Enum for permission mode values.
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property None Permission not valid.
|
||||
* @property Read Permission applicable for read operations only.
|
||||
* @property All Permission applicable for all operations.
|
||||
*/
|
||||
PermissionMode: Object.freeze({
|
||||
None: "none",
|
||||
Read: "read",
|
||||
All: "all"
|
||||
}),
|
||||
|
||||
/**
|
||||
* Enum for trigger type values.
|
||||
* Specifies the type of the trigger.
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property Pre Trigger should be executed before the associated operation(s).
|
||||
* @property Post Trigger should be executed after the associated operation(s).
|
||||
*/
|
||||
TriggerType: Object.freeze({
|
||||
Pre: "pre",
|
||||
Post: "post"
|
||||
}),
|
||||
|
||||
/**
|
||||
* Enum for trigger operation values.
|
||||
* specifies the operations on which a trigger should be executed.
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property All All operations.
|
||||
* @property Create Create operations only.
|
||||
* @property Update Update operations only.
|
||||
* @property Delete Delete operations only.
|
||||
* @property Replace Replace operations only.
|
||||
*/
|
||||
TriggerOperation: Object.freeze({
|
||||
All: "all",
|
||||
Create: "create",
|
||||
Update: "update",
|
||||
Delete: "delete",
|
||||
Replace: "replace"
|
||||
}),
|
||||
|
||||
/**
|
||||
* Enum for udf type values.
|
||||
* Specifies the types of user defined functions.
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
* @property Javascript Javascript type.
|
||||
*/
|
||||
UserDefinedFunctionType: Object.freeze({
|
||||
Javascript: "Javascript"
|
||||
}),
|
||||
|
||||
/**
|
||||
* @global
|
||||
* Represents the Connection policy associated with a DocumentClient in the Azure Cosmos DB database service.
|
||||
* @property {string} MediaReadMode - Attachment content (aka media) download mode. Should be one of the values of {@link MediaReadMode}
|
||||
* @property {number} MediaRequestTimeout - Time to wait for response from network peer for attachment content (aka media) operations. Represented in milliseconds.
|
||||
* @property {number} RequestTimeout - Request timeout (time to wait for response from network peer). Represented in milliseconds.
|
||||
* @property {bool} EnableEndpointDiscovery - Flag to enable/disable automatic redirecting of requests based on read/write operations.
|
||||
* @property {Array} PreferredLocations - List of azure regions to be used as preferred locations for read requests.
|
||||
* @property {RetryOptions} RetryOptions - RetryOptions instance which defines several configurable properties used during retry.
|
||||
* @property {bool} DisableSSLVerification - Flag to disable SSL verification for the requests. SSL verification is enabled by default. Don't set this when targeting production endpoints.
|
||||
* This is intended to be used only when targeting emulator endpoint to avoid failing your requests with SSL related error.
|
||||
* @property {string} ProxyUrl - Http/Https proxy url
|
||||
*/
|
||||
ConnectionPolicy: Base.defineClass(function() {
|
||||
Object.defineProperty(this, "_defaultRequestTimeout", {
|
||||
value: 60000,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: false // this is the default value, so it could be excluded during JSON.stringify
|
||||
});
|
||||
|
||||
// defaultMediaRequestTimeout is based upon the blob client timeout and the retry policy.
|
||||
Object.defineProperty(this, "_defaultMediaRequestTimeout", {
|
||||
value: 300000,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: false // this is the default value, so it could be excluded during JSON.stringify
|
||||
});
|
||||
|
||||
this.ConnectionMode = AzureDocuments.ConnectionMode.Gateway;
|
||||
this.MediaReadMode = AzureDocuments.MediaReadMode.Buffered;
|
||||
this.MediaRequestTimeout = this._defaultMediaRequestTimeout;
|
||||
this.RequestTimeout = this._defaultRequestTimeout;
|
||||
this.EnableEndpointDiscovery = true;
|
||||
this.PreferredLocations = [];
|
||||
this.RetryOptions = new RetryOptions();
|
||||
this.DisableSSLVerification = false;
|
||||
this.ProxyUrl = "";
|
||||
})
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = AzureDocuments;
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base");
|
||||
|
||||
//SCRIPT START
|
||||
/**
|
||||
* This class implements the retry policy for endpoint discovery.
|
||||
* @property {int} _maxRetryAttemptCount - Max number of retry attempts to perform.
|
||||
* @property {int} currentRetryAttemptCount - Current retry attempt count.
|
||||
* @property {object} globalEndpointManager - The GlobalEndpointManager instance.
|
||||
* @property {int} retryAfterInMilliseconds - Retry interval in milliseconds.
|
||||
*/
|
||||
var EndpointDiscoveryRetryPolicy = Base.defineClass(
|
||||
/**
|
||||
* @constructor EndpointDiscoveryRetryPolicy
|
||||
* @param {object} globalEndpointManager - The GlobalEndpointManager instance.
|
||||
*/
|
||||
function (globalEndpointManager) {
|
||||
this._maxRetryAttemptCount = EndpointDiscoveryRetryPolicy.maxRetryAttemptCount;
|
||||
this.currentRetryAttemptCount = 0;
|
||||
this.globalEndpointManager = globalEndpointManager;
|
||||
this.retryAfterInMilliseconds = EndpointDiscoveryRetryPolicy.retryAfterInMilliseconds;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Determines whether the request should be retried or not.
|
||||
* @param {object} err - Error returned by the request.
|
||||
* @param {function} callback - The callback function which takes bool argument which specifies whether the request will be retried or not.
|
||||
*/
|
||||
shouldRetry: function (err, callback) {
|
||||
if (err) {
|
||||
if (this.currentRetryAttemptCount < this._maxRetryAttemptCount && this.globalEndpointManager.enableEndpointDiscovery) {
|
||||
this.currentRetryAttemptCount++;
|
||||
console.log("Write region was changed, refreshing the regions list from database account and will retry the request.");
|
||||
var that = this;
|
||||
this.globalEndpointManager.refreshEndpointList(function (writeEndpoint, readEndpoint) {
|
||||
that.globalEndpointManager.setWriteEndpoint(writeEndpoint);
|
||||
that.globalEndpointManager.setReadEndpoint(readEndpoint);
|
||||
callback(true);
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
return callback(false);
|
||||
}
|
||||
},
|
||||
{
|
||||
maxRetryAttemptCount : 120,
|
||||
retryAfterInMilliseconds : 1000,
|
||||
FORBIDDEN_STATUS_CODE : 403,
|
||||
WRITE_FORBIDDEN_SUB_STATUS_CODE : 3
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = EndpointDiscoveryRetryPolicy;
|
||||
}
|
|
@ -1,272 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base")
|
||||
, Constants = require("./constants")
|
||||
, url = require("url");
|
||||
|
||||
//SCRIPT START
|
||||
/**
|
||||
* This internal class implements the logic for endpoint management for geo-replicated
|
||||
database accounts.
|
||||
* @property {object} client - The document client instance.
|
||||
* @property {string} defaultEndpoint - The endpoint used to create the client instance.
|
||||
* @property {bool} enableEndpointDiscovery - Flag to enable/disable automatic redirecting of requests based on read/write operations.
|
||||
* @property {Array} preferredLocations - List of azure regions to be used as preferred locations for read requests.
|
||||
* @property {bool} isEndpointCacheInitialized - Flag to determine whether the endpoint cache is initialized or not.
|
||||
*/
|
||||
var GlobalEndpointManager = Base.defineClass(
|
||||
/**
|
||||
* @constructor GlobalEndpointManager
|
||||
* @param {object} client - The document client instance.
|
||||
*/
|
||||
function (client) {
|
||||
this.client = client;
|
||||
this.defaultEndpoint = client.urlConnection;
|
||||
this._readEndpoint = client.urlConnection;
|
||||
this._writeEndpoint = client.urlConnection;
|
||||
this.enableEndpointDiscovery = client.connectionPolicy.EnableEndpointDiscovery;
|
||||
this.preferredLocations = client.connectionPolicy.PreferredLocations;
|
||||
this.isEndpointCacheInitialized = false;
|
||||
},
|
||||
{
|
||||
/** Gets the current read endpoint from the endpoint cache.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {function} callback - The callback function which takes readEndpoint(string) as an argument.
|
||||
*/
|
||||
getReadEndpoint: function (callback) {
|
||||
if (!this.isEndpointCacheInitialized) {
|
||||
this.refreshEndpointList(function (writeEndpoint, readEndpoint) {
|
||||
callback(readEndpoint);
|
||||
});
|
||||
} else {
|
||||
callback(this._readEndpoint);
|
||||
}
|
||||
},
|
||||
|
||||
/** Sets the current read endpoint.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {string} readEndpoint - The endpoint to be set as readEndpoint.
|
||||
*/
|
||||
setReadEndpoint: function (readEndpoint) {
|
||||
this._readEndpoint = readEndpoint;
|
||||
},
|
||||
|
||||
/** Gets the current write endpoint from the endpoint cache.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {function} callback - The callback function which takes writeEndpoint(string) as an argument.
|
||||
*/
|
||||
getWriteEndpoint: function (callback) {
|
||||
if (!this.isEndpointCacheInitialized) {
|
||||
this.refreshEndpointList(function (writeEndpoint, readEndpoint) {
|
||||
callback(writeEndpoint);
|
||||
});
|
||||
} else {
|
||||
callback(this._writeEndpoint);
|
||||
}
|
||||
},
|
||||
|
||||
/** Sets the current write endpoint.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {string} writeEndpoint - The endpoint to be set as writeEndpoint.
|
||||
*/
|
||||
setWriteEndpoint: function (writeEndpoint) {
|
||||
this._writeEndpoint = writeEndpoint;
|
||||
},
|
||||
|
||||
/** Refreshes the endpoint list by retrieving the writable and readable locations
|
||||
from the geo-replicated database account and then updating the locations cache.
|
||||
We skip the refreshing if EnableEndpointDiscovery is set to False
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {function} callback - The callback function which takes writeEndpoint(string) and readEndpoint(string) as arguments.
|
||||
*/
|
||||
refreshEndpointList: function (callback) {
|
||||
var writableLocations = [];
|
||||
var readableLocations = [];
|
||||
var databaseAccount;
|
||||
|
||||
var that = this;
|
||||
if (this.enableEndpointDiscovery) {
|
||||
this._getDatabaseAccount(function (databaseAccount) {
|
||||
if (databaseAccount) {
|
||||
writableLocations = databaseAccount.WritableLocations;
|
||||
readableLocations = databaseAccount.ReadableLocations;
|
||||
}
|
||||
|
||||
// Read and Write endpoints will be initialized to default endpoint if we were not able to get the database account info
|
||||
that._updateLocationsCache(writableLocations, readableLocations, function (endpoints) {
|
||||
that._writeEndpoint = endpoints[0];
|
||||
that._readEndpoint = endpoints[1];
|
||||
that.isEndpointCacheInitialized = true;
|
||||
callback(that._writeEndpoint, that._readEndpoint);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
callback(that._writeEndpoint, that._readEndpoint);
|
||||
}
|
||||
},
|
||||
|
||||
/** Gets the database account first by using the default endpoint, and if that doesn't returns
|
||||
use the endpoints for the preferred locations in the order they are specified to get
|
||||
the database account.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {function} callback - The callback function which takes databaseAccount(object) as an argument.
|
||||
*/
|
||||
_getDatabaseAccount: function (callback) {
|
||||
var that = this;
|
||||
var options = { urlConnection: this.defaultEndpoint };
|
||||
this.client.getDatabaseAccount(options, function (err, databaseAccount) {
|
||||
// If for any reason(non - globaldb related), we are not able to get the database account from the above call to getDatabaseAccount,
|
||||
// we would try to get this information from any of the preferred locations that the user might have specified(by creating a locational endpoint)
|
||||
// and keeping eating the exception until we get the database account and return None at the end, if we are not able to get that info from any endpoints
|
||||
|
||||
if (err) {
|
||||
var func = function (defaultEndpoint, preferredLocations, index) {
|
||||
if (index < preferredLocations.length) {
|
||||
var locationalEndpoint = that._getLocationalEndpoint(defaultEndpoint, preferredLocations[index]);
|
||||
var options = { urlConnection: locationalEndpoint };
|
||||
that.client.getDatabaseAccount(options, function (err, databaseAccount) {
|
||||
if (err) {
|
||||
func(defaultEndpoint, preferredLocations, index + 1);
|
||||
} else {
|
||||
return callback(databaseAccount);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return callback(null);
|
||||
}
|
||||
}
|
||||
func(that.defaultEndpoint, that.preferredLocations, 0);
|
||||
|
||||
} else {
|
||||
return callback(databaseAccount);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/** Gets the locational endpoint using the location name passed to it using the default endpoint.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {string} defaultEndpoint - The default endpoint to use for teh endpoint.
|
||||
* @param {string} locationName - The location name for the azure region like "East US".
|
||||
*/
|
||||
_getLocationalEndpoint: function (defaultEndpoint, locationName) {
|
||||
// For defaultEndpoint like 'https://contoso.documents.azure.com:443/' parse it to generate URL format
|
||||
// This defaultEndpoint should be global endpoint(and cannot be a locational endpoint) and we agreed to document that
|
||||
var endpointUrl = url.parse(defaultEndpoint, true, true);
|
||||
|
||||
// hostname attribute in endpointUrl will return 'contoso.documents.azure.com'
|
||||
if (endpointUrl.hostname) {
|
||||
var hostnameParts = (endpointUrl.hostname).toString().toLowerCase().split(".");
|
||||
if (hostnameParts) {
|
||||
// globalDatabaseAccountName will return 'contoso'
|
||||
var globalDatabaseAccountName = hostnameParts[0];
|
||||
|
||||
// Prepare the locationalDatabaseAccountName as contoso-EastUS for location_name 'East US'
|
||||
var locationalDatabaseAccountName = globalDatabaseAccountName + "-" + locationName.replace(" ", "");
|
||||
|
||||
// Replace 'contoso' with 'contoso-EastUS' and return locationalEndpoint as https://contoso-EastUS.documents.azure.com:443/
|
||||
var locationalEndpoint = defaultEndpoint.toLowerCase().replace(globalDatabaseAccountName, locationalDatabaseAccountName);
|
||||
return locationalEndpoint;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
/** Updates the read and write endpoints from the passed-in readable and writable locations.
|
||||
* @memberof GlobalEndpointManager
|
||||
* @instance
|
||||
* @param {Array} writableLocations - The list of writable locations for the geo-enabled database account.
|
||||
* @param {Array} readableLocations - The list of readable locations for the geo-enabled database account.
|
||||
* @param {function} callback - The function to be called as callback after executing this method.
|
||||
*/
|
||||
_updateLocationsCache: function (writableLocations, readableLocations, callback) {
|
||||
var writeEndpoint;
|
||||
var readEndpoint;
|
||||
// Use the default endpoint as Read and Write endpoints if EnableEndpointDiscovery
|
||||
// is set to False.
|
||||
if (!this.enableEndpointDiscovery) {
|
||||
writeEndpoint = this.defaultEndpoint;
|
||||
readEndpoint = this.defaultEndpoint;
|
||||
return callback([writeEndpoint, readEndpoint]);
|
||||
}
|
||||
|
||||
// Use the default endpoint as Write endpoint if there are no writable locations, or
|
||||
// first writable location as Write endpoint if there are writable locations
|
||||
if (writableLocations.length === 0) {
|
||||
writeEndpoint = this.defaultEndpoint;
|
||||
} else {
|
||||
writeEndpoint = writableLocations[0][Constants.DatabaseAccountEndpoint];
|
||||
}
|
||||
|
||||
// Use the Write endpoint as Read endpoint if there are no readable locations
|
||||
if (readableLocations.length === 0) {
|
||||
readEndpoint = writeEndpoint;
|
||||
return callback([writeEndpoint, readEndpoint]);
|
||||
} else {
|
||||
// Use the writable location as Read endpoint if there are no preferred locations or
|
||||
// none of the preferred locations are in read or write locations
|
||||
readEndpoint = writeEndpoint;
|
||||
|
||||
if (!this.preferredLocations) {
|
||||
return callback([writeEndpoint, readEndpoint]);
|
||||
}
|
||||
|
||||
for (var i= 0; i < this.preferredLocations.length; i++) {
|
||||
var preferredLocation = this.preferredLocations[i];
|
||||
// Use the first readable location as Read endpoint from the preferred locations
|
||||
for (var j = 0; j < readableLocations.length; j++) {
|
||||
var readLocation = readableLocations[j];
|
||||
if (readLocation[Constants.Name] === preferredLocation) {
|
||||
readEndpoint = readLocation[Constants.DatabaseAccountEndpoint];
|
||||
return callback([writeEndpoint, readEndpoint]);
|
||||
}
|
||||
}
|
||||
// Else, use the first writable location as Read endpoint from the preferred locations
|
||||
for (var k = 0; k < writableLocations.length; k++) {
|
||||
var writeLocation = writableLocations[k];
|
||||
if (writeLocation[Constants.Name] === preferredLocation) {
|
||||
readEndpoint = writeLocation[Constants.DatabaseAccountEndpoint];
|
||||
return callback([writeEndpoint, readEndpoint]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return callback([writeEndpoint, readEndpoint]);
|
||||
}
|
||||
}
|
||||
});
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = GlobalEndpointManager;
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base");
|
||||
var MurmurHash = require('./murmurHash.js').MurmurHash;
|
||||
|
||||
//SCRIPT START
|
||||
var ConsistentHashRing = Base.defineClass(
|
||||
/**
|
||||
* Initializes a new instance of the ConsistentHashRing
|
||||
* @param {string[]} nodes - Array of collection links
|
||||
* @param {object} options - Options to initialize the ConsistentHashRing
|
||||
* @param {function} options.computeHash - Function to compute the hash for a given link or partition key
|
||||
* @param {function} options.numberOfVirtualNodesPerCollection - Number of points in the ring to assign to each collection link
|
||||
*/
|
||||
function (nodes, options) {
|
||||
ConsistentHashRing._throwIfInvalidNodes(nodes);
|
||||
|
||||
options = options || {};
|
||||
options.numberOfVirtualNodesPerCollection = options.numberOfVirtualNodesPerCollection || 128;
|
||||
options.computeHash = options.computeHash || MurmurHash.hash;
|
||||
|
||||
this._computeHash = options.computeHash;
|
||||
this._partitions = ConsistentHashRing._constructPartitions(nodes, options.numberOfVirtualNodesPerCollection, options.computeHash);
|
||||
}, {
|
||||
getNode: function (key) {
|
||||
var hash = this._computeHash(key);
|
||||
var partition = ConsistentHashRing._search(this._partitions, hash);
|
||||
return this._partitions[partition].node;
|
||||
}
|
||||
},{
|
||||
/** @ignore */
|
||||
_constructPartitions: function (nodes, partitionsPerNode, computeHashFunction) {
|
||||
var partitions = new Array();
|
||||
nodes.forEach(function (node) {
|
||||
var hashValue = computeHashFunction(node);
|
||||
for (var j = 0; j < partitionsPerNode; j++) {
|
||||
partitions.push({
|
||||
hashValue: hashValue,
|
||||
node: node
|
||||
});
|
||||
|
||||
hashValue = computeHashFunction(hashValue);
|
||||
}
|
||||
});
|
||||
|
||||
partitions.sort(function (x, y) {
|
||||
return ConsistentHashRing._compareHashes(x.hashValue, y.hashValue);
|
||||
});
|
||||
return partitions;
|
||||
},
|
||||
/** @ignore */
|
||||
_compareHashes: function (x, y) {
|
||||
if (x < y) return -1;
|
||||
if (x > y) return 1;
|
||||
return 0;
|
||||
},
|
||||
/** @ignore */
|
||||
_search: function (partitions, hashValue) {
|
||||
for (var i = 0; i < partitions.length - 1; i++) {
|
||||
if (hashValue >= partitions[i].hashValue && hashValue < partitions[i + 1].hashValue) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
return partitions.length - 1;
|
||||
},
|
||||
/** @ignore */
|
||||
_throwIfInvalidNodes: function (nodes) {
|
||||
if (Array.isArray(nodes)) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error("Invalid argument: 'nodes' has to be an array.");
|
||||
}
|
||||
}
|
||||
|
||||
);
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.ConsistentHashRing = ConsistentHashRing;
|
||||
}
|
|
@ -1,122 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require('../base');
|
||||
var ConsistentHashRing = require('./consistentHashRing.js').ConsistentHashRing;
|
||||
|
||||
//SCRIPT START
|
||||
var HashPartitionResolver = Base.defineClass(
|
||||
/**
|
||||
* HashPartitionResolver implements partitioning based on the value of a hash function,
|
||||
* allowing you to evenly distribute requests and data across a number of partitions for
|
||||
* the Azure Cosmos DB database service.
|
||||
* @class HashPartitionResolver
|
||||
* @param {string | function} partitionKeyExtractor - If partitionKeyExtractor is a string, it should be the name of the property in the document to execute the hashing on.
|
||||
* If partitionKeyExtractor is a function, it should be a function to extract the partition key from an object.
|
||||
**/
|
||||
function (partitionKeyExtractor, collectionLinks, options) {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKeyExtractor(partitionKeyExtractor);
|
||||
HashPartitionResolver._throwIfInvalidCollectionLinks(collectionLinks);
|
||||
this.partitionKeyExtractor = partitionKeyExtractor;
|
||||
|
||||
options = options || {};
|
||||
this.consistentHashRing = new ConsistentHashRing(collectionLinks, options);
|
||||
this.collectionLinks = collectionLinks;
|
||||
}, {
|
||||
/**
|
||||
* Extracts the partition key from the specified document using the partitionKeyExtractor
|
||||
* @memberof HashPartitionResolver
|
||||
* @instance
|
||||
* @param {object} document - The document from which to extract the partition key.
|
||||
* @returns {object}
|
||||
**/
|
||||
getPartitionKey: function (document) {
|
||||
return (typeof this.partitionKeyExtractor === "string")
|
||||
? document[this.partitionKeyExtractor]
|
||||
: this.partitionKeyExtractor(document);
|
||||
},
|
||||
/**
|
||||
* Given a partition key, returns a list of collection links to read from.
|
||||
* @memberof HashPartitionResolver
|
||||
* @instance
|
||||
* @param {any} partitionKey - The partition key used to determine the target collection for query
|
||||
**/
|
||||
resolveForRead: function (partitionKey) {
|
||||
if (partitionKey === undefined || partitionKey === null) {
|
||||
return this.collectionLinks;
|
||||
}
|
||||
|
||||
return [this._resolve(partitionKey)];
|
||||
},
|
||||
/**
|
||||
* Given a partition key, returns the correct collection link for creating a document.
|
||||
* @memberof HashPartitionResolver
|
||||
* @instance
|
||||
* @param {any} partitionKey - The partition key used to determine the target collection for create
|
||||
* @returns {string} - The target collection link that will be used for document creation.
|
||||
**/
|
||||
resolveForCreate: function (partitionKey) {
|
||||
return this._resolve(partitionKey);
|
||||
},
|
||||
/** @ignore */
|
||||
_resolve: function (partitionKey) {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKey(partitionKey);
|
||||
return this.consistentHashRing.getNode(partitionKey);
|
||||
}
|
||||
}, {
|
||||
/** @ignore */
|
||||
_throwIfInvalidPartitionKeyExtractor: function (partitionKeyExtractor) {
|
||||
if (partitionKeyExtractor === undefined || partitionKeyExtractor === null) {
|
||||
throw new Error("partitionKeyExtractor cannot be null or undefined");
|
||||
}
|
||||
|
||||
if (typeof partitionKeyExtractor !== "string" && typeof partitionKeyExtractor !== "function") {
|
||||
throw new Error("partitionKeyExtractor must be either a 'string' or a 'function'");
|
||||
}
|
||||
},
|
||||
/** @ignore */
|
||||
_throwIfInvalidPartitionKey: function (partitionKey) {
|
||||
var partitionKeyType = typeof partitionKey;
|
||||
if (partitionKeyType !== "string") {
|
||||
throw new Error("partitionKey must be a 'string'");
|
||||
}
|
||||
},
|
||||
/** @ignore */
|
||||
_throwIfInvalidCollectionLinks: function (collectionLinks) {
|
||||
if (!Array.isArray(collectionLinks)) {
|
||||
throw new Error("collectionLinks must be an array.");
|
||||
}
|
||||
|
||||
if (collectionLinks.some(function (collectionLink) { return !Base._isValidCollectionLink(collectionLink); })) {
|
||||
throw new Error("All elements of collectionLinks must be collection links.");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.HashPartitionResolver = HashPartitionResolver;
|
||||
}
|
|
@ -1,170 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base");
|
||||
|
||||
//SCRIPT START
|
||||
var MurmurHash = Base.defineClass(
|
||||
undefined,
|
||||
undefined,
|
||||
{
|
||||
/**
|
||||
* Hashes a string, a unsigned 32-bit integer, or a Buffer into a new unsigned 32-bit integer that represents the output hash.
|
||||
* @param {string, number of Buffer} key - The preimage of the hash
|
||||
* @param {number} seed - Optional value used to initialize the hash generator
|
||||
* @returns {}
|
||||
*/
|
||||
hash: function (key, seed) {
|
||||
key = key || '';
|
||||
seed = seed || 0;
|
||||
|
||||
MurmurHash._throwIfInvalidKey(key);
|
||||
MurmurHash._throwIfInvalidSeed(seed);
|
||||
|
||||
var buffer;
|
||||
if (typeof key === "string") {
|
||||
buffer = MurmurHash._getBufferFromString(key);
|
||||
}
|
||||
else if (typeof key === "number") {
|
||||
buffer = MurmurHash._getBufferFromNumber(key);
|
||||
}
|
||||
else {
|
||||
buffer = key;
|
||||
}
|
||||
|
||||
return MurmurHash._hashBytes(buffer, seed);
|
||||
},
|
||||
/** @ignore */
|
||||
_throwIfInvalidKey: function (key) {
|
||||
if (key instanceof Buffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof key === "string") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof key === "number") {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error("Invalid argument: 'key' has to be a Buffer, string, or number.");
|
||||
},
|
||||
/** @ignore */
|
||||
_throwIfInvalidSeed: function (seed) {
|
||||
if (isNaN(seed)) {
|
||||
throw new Error("Invalid argument: 'seed' is not and cannot be converted to a number.");
|
||||
}
|
||||
},
|
||||
/** @ignore */
|
||||
_getBufferFromString: function (key) {
|
||||
var buffer = new Buffer(key);
|
||||
return buffer;
|
||||
},
|
||||
/** @ignore */
|
||||
_getBufferFromNumber: function (i) {
|
||||
i = i >>> 0;
|
||||
|
||||
var buffer = new Uint8Array([
|
||||
i >>> 0,
|
||||
i >>> 8,
|
||||
i >>> 16,
|
||||
i >>> 24
|
||||
]);
|
||||
|
||||
return buffer;
|
||||
},
|
||||
/** @ignore */
|
||||
_hashBytes: function (bytes, seed) {
|
||||
var c1 = 0xcc9e2d51;
|
||||
var c2 = 0x1b873593;
|
||||
|
||||
var h1 = seed;
|
||||
var reader = new Uint32Array(bytes);
|
||||
{
|
||||
for (var i = 0; i < bytes.length - 3; i += 4) {
|
||||
var k1 = MurmurHash._readUInt32(reader, i);
|
||||
|
||||
k1 = MurmurHash._multiply(k1, c1);
|
||||
k1 = MurmurHash._rotateLeft(k1, 15);
|
||||
k1 = MurmurHash._multiply(k1, c2);
|
||||
|
||||
h1 ^= k1;
|
||||
h1 = MurmurHash._rotateLeft(h1, 13);
|
||||
h1 = MurmurHash._multiply(h1, 5) + 0xe6546b64;
|
||||
}
|
||||
}
|
||||
|
||||
var k = 0;
|
||||
switch (bytes.length & 3) {
|
||||
case 3:
|
||||
k ^= reader[i + 2] << 16;
|
||||
k ^= reader[i + 1] << 8;
|
||||
k ^= reader[i];
|
||||
break;
|
||||
|
||||
case 2:
|
||||
k ^= reader[i + 1] << 8;
|
||||
k ^= reader[i];
|
||||
break;
|
||||
|
||||
case 1:
|
||||
k ^= reader[i];
|
||||
break;
|
||||
}
|
||||
|
||||
k = MurmurHash._multiply(k, c1);
|
||||
k = MurmurHash._rotateLeft(k, 15);
|
||||
k = MurmurHash._multiply(k, c2);
|
||||
|
||||
h1 ^= k;
|
||||
h1 ^= bytes.length;
|
||||
h1 ^= h1 >>> 16;
|
||||
h1 = MurmurHash._multiply(h1, 0x85ebca6b);
|
||||
h1 ^= h1 >>> 13;
|
||||
h1 = MurmurHash._multiply(h1, 0xc2b2ae35);
|
||||
h1 ^= h1 >>> 16;
|
||||
|
||||
return h1 >>> 0;
|
||||
},
|
||||
/** @ignore */
|
||||
_rotateLeft: function (n, numBits) {
|
||||
return (n << numBits) | (n >>> (32 - numBits));
|
||||
},
|
||||
/** @ignore */
|
||||
_multiply: function (m, n) {
|
||||
return ((m & 0xffff) * n) + ((((m >>> 16) * n) & 0xffff) << 16);
|
||||
},
|
||||
/** @ignore */
|
||||
_readUInt32: function (uintArray, i) {
|
||||
return (uintArray[i]) | (uintArray[i + 1] << 8) | (uintArray[i + 2] << 16) | (uintArray[i + 3] << 24) >>> 0;
|
||||
}
|
||||
});
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.MurmurHash = MurmurHash;
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict"
|
||||
|
||||
var Base = require("./base"),
|
||||
Constants = require("./constants");
|
||||
|
||||
var Regexes = Constants.RegularExpressions,
|
||||
ResourceTypes = Constants.ResourceTypes;
|
||||
|
||||
|
||||
//SCRIPT START
|
||||
var Helper = Base.defineClass(
|
||||
|
||||
/**************************CONSTRUCTORS**************************/
|
||||
undefined,
|
||||
|
||||
/************************INSTANCE MEMBERS************************/
|
||||
undefined,
|
||||
|
||||
/*************************STATIC METHODS*************************/
|
||||
{
|
||||
isStringNullOrEmpty: function (inputString) {
|
||||
//checks whether string is null, undefined, empty or only contains space
|
||||
return !inputString || /^\s*$/.test(inputString);
|
||||
},
|
||||
|
||||
trimSlashFromLeftAndRight: function (inputString) {
|
||||
if (typeof inputString != 'string') {
|
||||
throw "invalid input: input is not string";
|
||||
}
|
||||
|
||||
return inputString.replace(Regexes.TrimLeftSlashes, "").replace(Regexes.TrimRightSlashes, "");
|
||||
},
|
||||
|
||||
validateResourceId: function (resourceId) {
|
||||
// if resourceId is not a string or is empty throw an error
|
||||
if (typeof resourceId !== 'string' || this.isStringNullOrEmpty(resourceId)) {
|
||||
throw "Resource Id must be a string and cannot be undefined, null or empty";
|
||||
}
|
||||
|
||||
// if resourceId starts or ends with space throw an error
|
||||
if (resourceId[resourceId.length - 1] == " ") {
|
||||
throw "Resource Id cannot end with space";
|
||||
}
|
||||
|
||||
// if resource id contains illegal characters throw an error
|
||||
if (Regexes.IllegalResourceIdCharacters.test(resourceId)) {
|
||||
throw "Illegal characters ['/', '\\', '?', '#'] cannot be used in resourceId";
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
},
|
||||
|
||||
getResourceIdFromPath: function(resourcePath) {
|
||||
if (!resourcePath || typeof resourcePath !== "string") {
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmedPath = this.trimSlashFromLeftAndRight(resourcePath);
|
||||
var pathSegments = trimmedPath.split('/');
|
||||
|
||||
//number of segments of a path must always be even
|
||||
if (pathSegments.length % 2 !== 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return pathSegments[pathSegments.length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.Helper = Helper;
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Client = require("./documentclient")
|
||||
, Hash = require("./hash/hashPartitionResolver")
|
||||
, Range = require("./range")
|
||||
, UriFactory = require("./uriFactory");
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.DocumentClient = Client.DocumentClient;
|
||||
exports.DocumentBase = Client.DocumentBase;
|
||||
exports.Base = Client.Base;
|
||||
exports.Constants = Client.Constants;
|
||||
exports.RetryOptions = Client.RetryOptions;
|
||||
exports.Range = Range.Range;
|
||||
exports.RangePartitionResolver = Range.RangePartitionResolver;
|
||||
exports.HashPartitionResolver = Hash.HashPartitionResolver;
|
||||
exports.UriFactory = UriFactory.UriFactory;
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Constants = require("./constants");
|
||||
var os = require("os");
|
||||
var util = require("util");
|
||||
var semaphore = require("semaphore");
|
||||
var Platform = {
|
||||
/** @ignore */
|
||||
getPlatformDefaultHeaders: function () {
|
||||
var defaultHeaders = {};
|
||||
defaultHeaders[Constants.HttpHeaders.UserAgent] = Platform.getUserAgent();
|
||||
return defaultHeaders;
|
||||
},
|
||||
/** @ignore */
|
||||
getDecodedDataLength: function (encodedData) {
|
||||
var buffer = new Buffer(encodedData, "base64");
|
||||
return buffer.length;
|
||||
},
|
||||
/** @ignore */
|
||||
getUserAgent: function () {
|
||||
// gets the user agent in the following format
|
||||
// "{OSName}/{OSVersion} Nodejs/{NodejsVersion} documentdb-nodejs-sdk/{SDKVersion}"
|
||||
// for example:
|
||||
// "linux/3.4.0+ Nodejs/v0.10.25 documentdb-nodejs-sdk/1.10.0"
|
||||
// "win32/10.0.14393 Nodejs/v4.4.7 documentdb-nodejs-sdk/1.10.0"
|
||||
var osName = Platform._getSafeUserAgentSegmentInfo(os.platform());
|
||||
var osVersion = Platform._getSafeUserAgentSegmentInfo(os.release());
|
||||
var nodejsVersion = Platform._getSafeUserAgentSegmentInfo(process.version);
|
||||
|
||||
var userAgent = util.format("%s/%s Nodejs/%s %s/%s", osName, osVersion,
|
||||
nodejsVersion,
|
||||
Constants.SDKName, Constants.SDKVersion);
|
||||
|
||||
return userAgent;
|
||||
},
|
||||
/** @ignore */
|
||||
_getSafeUserAgentSegmentInfo: function (s) {
|
||||
// catch null, undefined, etc
|
||||
if (typeof (s) !== 'string') {
|
||||
s = "unknown";
|
||||
}
|
||||
// remove all white spaces
|
||||
s = s.replace(/\s+/g, '');
|
||||
if (!s) {
|
||||
s = "unknown";
|
||||
}
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = Platform;
|
||||
}
|
|
@ -1,236 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, DocumentProducer = require("./documentProducer")
|
||||
, OrderByDocumentProducerComparator = require("./orderByDocumentProducerComparator");
|
||||
|
||||
//SCRIPT START
|
||||
|
||||
var AverageAggregator = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents an aggregator for AVG operator.
|
||||
* @constructor AverageAggregator
|
||||
* @ignore
|
||||
*/
|
||||
function () {
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Add the provided item to aggregation result.
|
||||
* @memberof AverageAggregator
|
||||
* @instance
|
||||
* @param other
|
||||
*/
|
||||
aggregate: function (other) {
|
||||
if (other == null || other.sum == null) {
|
||||
return;
|
||||
}
|
||||
if (this.sum == null) {
|
||||
this.sum = 0.0;
|
||||
this.count = 0;
|
||||
}
|
||||
this.sum += other.sum;
|
||||
this.count += other.count;
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the aggregation result.
|
||||
* @memberof AverageAggregator
|
||||
* @instance
|
||||
*/
|
||||
getResult: function () {
|
||||
if (this.sum == null || this.count <= 0) {
|
||||
return undefined;
|
||||
}
|
||||
return this.sum / this.count;
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
var CountAggregator = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents an aggregator for COUNT operator.
|
||||
* @constructor CountAggregator
|
||||
* @ignore
|
||||
*/
|
||||
function () {
|
||||
this.value = 0;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Add the provided item to aggregation result.
|
||||
* @memberof CountAggregator
|
||||
* @instance
|
||||
* @param other
|
||||
*/
|
||||
aggregate: function (other) {
|
||||
this.value += other;
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the aggregation result.
|
||||
* @memberof CountAggregator
|
||||
* @instance
|
||||
*/
|
||||
getResult: function () {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
var MinAggregator = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents an aggregator for MIN operator.
|
||||
* @constructor MinAggregator
|
||||
* @ignore
|
||||
*/
|
||||
function () {
|
||||
this.value = undefined;
|
||||
this.comparer = new OrderByDocumentProducerComparator("Ascending");
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Add the provided item to aggregation result.
|
||||
* @memberof MinAggregator
|
||||
* @instance
|
||||
* @param other
|
||||
*/
|
||||
aggregate: function (other) {
|
||||
if (this.value == undefined) {
|
||||
this.value = other;
|
||||
}
|
||||
else {
|
||||
var otherType = other == null ? 'NoValue' : typeof (other);
|
||||
if (this.comparer.compareValue(other, otherType, this.value, typeof (this.value)) < 0) {
|
||||
this.value = other;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the aggregation result.
|
||||
* @memberof MinAggregator
|
||||
* @instance
|
||||
*/
|
||||
getResult: function () {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
var MaxAggregator = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents an aggregator for MAX operator.
|
||||
* @constructor MaxAggregator
|
||||
* @ignore
|
||||
*/
|
||||
function () {
|
||||
this.value = undefined;
|
||||
this.comparer = new OrderByDocumentProducerComparator("Ascending");
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Add the provided item to aggregation result.
|
||||
* @memberof MaxAggregator
|
||||
* @instance
|
||||
* @param other
|
||||
*/
|
||||
aggregate: function (other) {
|
||||
if (this.value == undefined) {
|
||||
this.value = other;
|
||||
}
|
||||
else if (this.comparer.compareValue(other, typeof (other), this.value, typeof (this.value)) > 0) {
|
||||
this.value = other;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the aggregation result.
|
||||
* @memberof MaxAggregator
|
||||
* @instance
|
||||
*/
|
||||
getResult: function () {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
var SumAggregator = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents an aggregator for SUM operator.
|
||||
* @constructor SumAggregator
|
||||
* @ignore
|
||||
*/
|
||||
function () {
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Add the provided item to aggregation result.
|
||||
* @memberof SumAggregator
|
||||
* @instance
|
||||
* @param other
|
||||
*/
|
||||
aggregate: function (other) {
|
||||
if (other == undefined) {
|
||||
return;
|
||||
}
|
||||
if (this.sum == undefined) {
|
||||
this.sum = other;
|
||||
}
|
||||
else {
|
||||
this.sum += other;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the aggregation result.
|
||||
* @memberof SumAggregator
|
||||
* @instance
|
||||
*/
|
||||
getResult: function () {
|
||||
return this.sum;
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.AverageAggregator = AverageAggregator;
|
||||
exports.CountAggregator = CountAggregator;
|
||||
exports.MinAggregator = MinAggregator;
|
||||
exports.MaxAggregator = MaxAggregator;
|
||||
exports.SumAggregator = SumAggregator;
|
||||
}
|
|
@ -1,168 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, Constants = require("../constants");
|
||||
|
||||
//SCRIPT START
|
||||
var DefaultQueryExecutionContext = Base.defineClass(
|
||||
/**
|
||||
* Provides the basic Query Execution Context. This wraps the internal logic query execution using provided fetch functions
|
||||
* @constructor DefaultQueryExecutionContext
|
||||
* @param {DocumentClient} documentclient - The service endpoint to use to create the client.
|
||||
* @param {SqlQuerySpec | string} query - A SQL query.
|
||||
* @param {FeedOptions} [options] - Represents the feed options.
|
||||
* @param {callback | callback[]} fetchFunctions - A function to retrieve each page of data. An array of functions may be used to query more than one partition.
|
||||
* @ignore
|
||||
*/
|
||||
function(documentclient, query, options, fetchFunctions){
|
||||
this.documentclient = documentclient;
|
||||
this.query = query;
|
||||
this.resources = [];
|
||||
this.currentIndex = 0;
|
||||
this.currentPartitionIndex = 0;
|
||||
this.fetchFunctions = (Array.isArray(fetchFunctions)) ? fetchFunctions : [fetchFunctions];
|
||||
this.options = options || {};
|
||||
this.continuation = this.options.continuation || null;
|
||||
this.state = DefaultQueryExecutionContext.STATES.start;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Execute a provided callback on the next element in the execution context.
|
||||
* @memberof DefaultQueryExecutionContext
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
var that = this;
|
||||
this.current(function (err, resources, headers) {
|
||||
++that.currentIndex;
|
||||
callback(err, resources, headers);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the execution context.
|
||||
* @memberof DefaultQueryExecutionContext
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function(callback) {
|
||||
var that = this;
|
||||
if (this.currentIndex < this.resources.length) {
|
||||
return callback(undefined, this.resources[this.currentIndex], undefined);
|
||||
}
|
||||
|
||||
if (this._canFetchMore()) {
|
||||
this.fetchMore(function (err, resources, headers) {
|
||||
if (err) {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
|
||||
that.resources = resources;
|
||||
if (that.resources.length === 0) {
|
||||
if (!that.continuation && that.currentPartitionIndex >= that.fetchFunctions.length) {
|
||||
that.state = DefaultQueryExecutionContext.STATES.ended;
|
||||
callback(undefined, undefined, headers);
|
||||
} else {
|
||||
that.current(callback);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
callback(undefined, that.resources[that.currentIndex], headers);
|
||||
});
|
||||
} else {
|
||||
this.state = DefaultQueryExecutionContext.STATES.ended;
|
||||
callback(undefined, undefined, undefined);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if there are still remaining resources to processs based on the value of the continuation token or the elements remaining on the current batch in the execution context.
|
||||
* @memberof DefaultQueryExecutionContext
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the DefaultQueryExecutionContext.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return this.state === DefaultQueryExecutionContext.STATES.start || this.continuation !== undefined || this.currentIndex < this.resources.length || this.currentPartitionIndex < this.fetchFunctions.length;
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches the next batch of the feed and pass them as an array to a callback
|
||||
* @memberof DefaultQueryExecutionContext
|
||||
* @instance
|
||||
* @param {callback} callback - Function execute on the feed response, takes two parameters error, resourcesList
|
||||
*/
|
||||
fetchMore: function (callback) {
|
||||
if (this.currentPartitionIndex >= this.fetchFunctions.length) {
|
||||
return callback(undefined, undefined, undefined);
|
||||
}
|
||||
var that = this;
|
||||
// Keep to the original continuation and to restore the value after fetchFunction call
|
||||
var originalContinuation = this.options.continuation;
|
||||
this.options.continuation = this.continuation;
|
||||
|
||||
// Return undefined if there is no more results
|
||||
if (this.currentPartitionIndex >= that.fetchFunctions.length) {
|
||||
return callback(undefined, undefined, undefined);
|
||||
}
|
||||
|
||||
var fetchFunction = this.fetchFunctions[this.currentPartitionIndex];
|
||||
fetchFunction(this.options, function(err, resources, responseHeaders){
|
||||
if(err) {
|
||||
that.state = DefaultQueryExecutionContext.STATES.ended;
|
||||
return callback(err, undefined, responseHeaders);
|
||||
}
|
||||
|
||||
that.continuation = responseHeaders[Constants.HttpHeaders.Continuation];
|
||||
if (!that.continuation) {
|
||||
++that.currentPartitionIndex;
|
||||
}
|
||||
|
||||
that.state = DefaultQueryExecutionContext.STATES.inProgress;
|
||||
that.currentIndex = 0;
|
||||
that.options.continuation = originalContinuation;
|
||||
callback(undefined, resources, responseHeaders);
|
||||
});
|
||||
},
|
||||
|
||||
_canFetchMore: function () {
|
||||
var res = (this.state === DefaultQueryExecutionContext.STATES.start
|
||||
|| (this.continuation && this.state === DefaultQueryExecutionContext.STATES.inProgress)
|
||||
|| (this.currentPartitionIndex < this.fetchFunctions.length
|
||||
&& this.state === DefaultQueryExecutionContext.STATES.inProgress));
|
||||
return res;
|
||||
}
|
||||
}, {
|
||||
|
||||
STATES: Object.freeze({ start: "start", inProgress: "inProgress", ended: "ended" })
|
||||
}
|
||||
);
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = DefaultQueryExecutionContext;
|
||||
}
|
|
@ -1,331 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, DefaultQueryExecutionContext = require("./defaultQueryExecutionContext")
|
||||
, HttpHeaders = require("../constants").HttpHeaders
|
||||
, HeaderUtils = require("./headerUtils")
|
||||
, StatusCodes = require("../statusCodes").StatusCodes
|
||||
, SubStatusCodes = require("../statusCodes").SubStatusCodes
|
||||
, assert = require("assert")
|
||||
|
||||
//SCRIPT START
|
||||
var DocumentProducer = Base.defineClass(
|
||||
/**
|
||||
* Provides the Target Partition Range Query Execution Context.
|
||||
* @constructor DocumentProducer
|
||||
* @param {DocumentClient} documentclient - The service endpoint to use to create the client.
|
||||
* @param {String} collectionLink - Represents collection link
|
||||
* @param {SqlQuerySpec | string} query - A SQL query.
|
||||
* @param {object} targetPartitionKeyRange - Query Target Partition key Range
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient, collectionLink, query, targetPartitionKeyRange, options) {
|
||||
this.documentclient = documentclient;
|
||||
this.collectionLink = collectionLink;
|
||||
this.query = query;
|
||||
this.targetPartitionKeyRange = targetPartitionKeyRange;
|
||||
this.fetchResults = [];
|
||||
|
||||
this.state = DocumentProducer.STATES.started;
|
||||
this.allFetched = false;
|
||||
this.err = undefined;
|
||||
|
||||
this.previousContinuationToken = undefined;
|
||||
this.continuationToken = undefined;
|
||||
this._respHeaders = HeaderUtils.getInitialHeader();
|
||||
|
||||
var isNameBased = Base.isLinkNameBased(collectionLink);
|
||||
var path = this.documentclient.getPathFromLink(collectionLink, "docs", isNameBased);
|
||||
var id = this.documentclient.getIdFromLink(collectionLink, isNameBased);
|
||||
|
||||
var that = this;
|
||||
var fetchFunction = function (options, callback) {
|
||||
that.documentclient.queryFeed.call(documentclient,
|
||||
documentclient,
|
||||
path,
|
||||
"docs",
|
||||
id,
|
||||
function (result) { return result.Documents; },
|
||||
function (parent, body) { return body; },
|
||||
query,
|
||||
options,
|
||||
callback,
|
||||
that.targetPartitionKeyRange["id"]);
|
||||
};
|
||||
this.internalExecutionContext = new DefaultQueryExecutionContext(documentclient, query, options, fetchFunction);
|
||||
this.state = DocumentProducer.STATES.inProgress;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Synchronously gives the contiguous buffered results (stops at the first non result) if any
|
||||
* @returns {Object} - buffered current items if any
|
||||
* @ignore
|
||||
*/
|
||||
peekBufferedItems: function () {
|
||||
var bufferedResults = [];
|
||||
for (var i = 0, done = false; i < this.fetchResults.length && !done; i++) {
|
||||
var fetchResult = this.fetchResults[i];
|
||||
switch (fetchResult.fetchResultType) {
|
||||
case FetchResultType.Done:
|
||||
done = true;
|
||||
break;
|
||||
case FetchResultType.Exception:
|
||||
done = true;
|
||||
break;
|
||||
case FetchResultType.Result:
|
||||
bufferedResults.push(fetchResult.feedResponse);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return bufferedResults;
|
||||
},
|
||||
|
||||
hasMoreResults: function () {
|
||||
return this.internalExecutionContext.hasMoreResults() || this.fetchResults.length != 0;
|
||||
},
|
||||
|
||||
gotSplit: function () {
|
||||
var fetchResult = this.fetchResults[0];
|
||||
if (fetchResult.fetchResultType == FetchResultType.Exception) {
|
||||
if (this._needPartitionKeyRangeCacheRefresh(fetchResult.error)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
/**
|
||||
* Synchronously gives the buffered items if any and moves inner indices.
|
||||
* @returns {Object} - buffered current items if any
|
||||
* @ignore
|
||||
*/
|
||||
consumeBufferedItems: function () {
|
||||
var res = this._getBufferedResults();
|
||||
this.fetchResults = [];
|
||||
this._updateStates(undefined, this.continuationToken === null || this.continuationToken === undefined);
|
||||
return res;
|
||||
},
|
||||
|
||||
_getAndResetActiveResponseHeaders: function () {
|
||||
var ret = this._respHeaders;
|
||||
this._respHeaders = HeaderUtils.getInitialHeader();
|
||||
return ret;
|
||||
},
|
||||
|
||||
_updateStates: function (err, allFetched) {
|
||||
if (err) {
|
||||
this.state = DocumentProducer.STATES.ended;
|
||||
this.err = err
|
||||
return;
|
||||
}
|
||||
if (allFetched) {
|
||||
this.allFetched = true;
|
||||
}
|
||||
if (this.allFetched && this.peekBufferedItems().length === 0) {
|
||||
this.state = DocumentProducer.STATES.ended;
|
||||
}
|
||||
if (this.internalExecutionContext.continuation === this.continuationToken) {
|
||||
// nothing changed
|
||||
return;
|
||||
}
|
||||
this.previousContinuationToken = this.continuationToken;
|
||||
this.continuationToken = this.internalExecutionContext.continuation;
|
||||
},
|
||||
|
||||
_needPartitionKeyRangeCacheRefresh: function (error) {
|
||||
return (error.code === StatusCodes.Gone) && ('substatus' in error) && (error['substatus'] === SubStatusCodes.PartitionKeyRangeGone);
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches and bufferes the next page of results and executes the given callback
|
||||
* @memberof DocumentProducer
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for next page of result.
|
||||
* the function takes three parameters error, resources, headerResponse.
|
||||
*/
|
||||
bufferMore: function (callback) {
|
||||
var that = this;
|
||||
if (that.err) {
|
||||
return callback(that.err);
|
||||
}
|
||||
|
||||
this.internalExecutionContext.fetchMore(function (err, resources, headerResponse) {
|
||||
if (err) {
|
||||
if (that._needPartitionKeyRangeCacheRefresh(err)) {
|
||||
// Split just happend
|
||||
// Buffer the error so the execution context can still get the feedResponses in the itemBuffer
|
||||
var bufferedError = new FetchResult(undefined, err);
|
||||
that.fetchResults.push(bufferedError);
|
||||
// Putting a dummy result so that the rest of code flows
|
||||
return callback(undefined, [bufferedError], headerResponse);
|
||||
}
|
||||
else {
|
||||
that._updateStates(err, resources === undefined);
|
||||
return callback(err, undefined, headerResponse);
|
||||
}
|
||||
}
|
||||
|
||||
that._updateStates(undefined, resources === undefined);
|
||||
if (resources != undefined) {
|
||||
// some more results
|
||||
resources.forEach(function (element) {
|
||||
that.fetchResults.push(new FetchResult(element, undefined));
|
||||
});
|
||||
}
|
||||
|
||||
return callback(undefined, resources, headerResponse);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Synchronously gives the bufferend current item if any
|
||||
* @returns {Object} - buffered current item if any
|
||||
* @ignore
|
||||
*/
|
||||
getTargetParitionKeyRange: function () {
|
||||
return this.targetPartitionKeyRange;
|
||||
},
|
||||
|
||||
/**
|
||||
* Execute a provided function on the next element in the DocumentProducer.
|
||||
* @memberof DocumentProducer
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
var that = this;
|
||||
if (that.err) {
|
||||
that._updateStates(err, undefined);
|
||||
return callback(that.err);
|
||||
}
|
||||
|
||||
this.current(function (err, item, headers) {
|
||||
if (err) {
|
||||
that._updateStates(err, item === undefined);
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
|
||||
var fetchResult = that.fetchResults.shift();
|
||||
that._updateStates(undefined, item === undefined);
|
||||
assert.equal(fetchResult.feedResponse, item);
|
||||
switch (fetchResult.fetchResultType) {
|
||||
case FetchResultType.Done:
|
||||
return callback(undefined, undefined, headers);
|
||||
case FetchResultType.Exception:
|
||||
return callback(fetchResult.error, undefined, headers);
|
||||
case FetchResultType.Result:
|
||||
return callback(undefined, fetchResult.feedResponse, headers);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the DocumentProducer.
|
||||
* @memberof DocumentProducer
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function (callback) {
|
||||
// If something is buffered just give that
|
||||
if (this.fetchResults.length > 0) {
|
||||
var fetchResult = this.fetchResults[0];
|
||||
//Need to unwrap fetch results
|
||||
switch (fetchResult.fetchResultType) {
|
||||
case FetchResultType.Done:
|
||||
return callback(undefined, undefined, this._getAndResetActiveResponseHeaders());
|
||||
case FetchResultType.Exception:
|
||||
return callback(fetchResult.error, undefined, this._getAndResetActiveResponseHeaders());
|
||||
case FetchResultType.Result:
|
||||
return callback(undefined, fetchResult.feedResponse, this._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
}
|
||||
|
||||
// If there isn't anymore items left to fetch then let the user know.
|
||||
if (this.allFetched) {
|
||||
return callback(undefined, undefined, this._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
// If there are no more bufferd items and there are still items to be fetched then buffer more
|
||||
var that = this;
|
||||
this.bufferMore(function (err, items, headers) {
|
||||
if (err) {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
|
||||
if (items === undefined) {
|
||||
return callback(undefined, undefined, headers);
|
||||
}
|
||||
HeaderUtils.mergeHeaders(that._respHeaders, headers);
|
||||
|
||||
that.current(callback);
|
||||
});
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
// Static Members
|
||||
STATES: Object.freeze({ started: "started", inProgress: "inProgress", ended: "ended" })
|
||||
}
|
||||
);
|
||||
|
||||
var FetchResultType = {
|
||||
"Done": 0,
|
||||
"Exception": 1,
|
||||
"Result": 2
|
||||
};
|
||||
|
||||
var FetchResult = Base.defineClass(
|
||||
/**
|
||||
* Wraps fetch results for the document producer.
|
||||
* This allows the document producer to buffer exceptions so that actual results don't get flushed during splits.
|
||||
* @constructor DocumentProducer
|
||||
* @param {object} feedReponse - The response the document producer got back on a successful fetch
|
||||
* @param {object} error - The exception meant to be buffered on an unsuccessful fetch
|
||||
* @ignore
|
||||
*/
|
||||
function (feedResponse, error) {
|
||||
if (feedResponse) {
|
||||
this.feedResponse = feedResponse;
|
||||
this.fetchResultType = FetchResultType.Result;
|
||||
} else {
|
||||
this.error = error;
|
||||
this.fetchResultType = FetchResultType.Exception;
|
||||
}
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
DoneResult : {
|
||||
fetchResultType: FetchResultType.Done
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = DocumentProducer;
|
||||
}
|
|
@ -1,308 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, aggregators = require('./aggregators');
|
||||
|
||||
var AverageAggregator = aggregators.AverageAggregator
|
||||
, CountAggregator = aggregators.CountAggregator
|
||||
, MaxAggregator = aggregators.MaxAggregator
|
||||
, MinAggregator = aggregators.MinAggregator
|
||||
, SumAggregator = aggregators.SumAggregator;
|
||||
|
||||
//SCRIPT START
|
||||
var OrderByEndpointComponent = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents an endpoint in handling an order by query. For each processed orderby result it returns 'payload' item of the result
|
||||
* @constructor OrderByEndpointComponent
|
||||
* @param {object} executionContext - Underlying Execution Context
|
||||
* @ignore
|
||||
*/
|
||||
function (executionContext) {
|
||||
this.executionContext = executionContext;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Execute a provided function on the next element in the OrderByEndpointComponent.
|
||||
* @memberof OrderByEndpointComponent
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
this.executionContext.nextItem(function (err, item, headers) {
|
||||
if (err) {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
if (item === undefined) {
|
||||
return callback(undefined, undefined, headers);
|
||||
}
|
||||
callback(undefined, item["payload"], headers);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the OrderByEndpointComponent.
|
||||
* @memberof OrderByEndpointComponent
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function(callback) {
|
||||
this.executionContext.current(function (err, item, headers) {
|
||||
if (err) {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
if (item === undefined) {
|
||||
return callback(undefined, undefined, headers);
|
||||
}
|
||||
callback(undefined, item["payload"], headers);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if there are still remaining resources to processs.
|
||||
* @memberof OrderByEndpointComponent
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the OrderByEndpointComponent.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return this.executionContext.hasMoreResults();
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
var TopEndpointComponent = Base.defineClass(
|
||||
/**
|
||||
* Represents an endpoint in handling top query. It only returns as many results as top arg specified.
|
||||
* @constructor TopEndpointComponent
|
||||
* @param { object } executionContext - Underlying Execution Context
|
||||
* @ignore
|
||||
*/
|
||||
function (executionContext, topCount) {
|
||||
this.executionContext = executionContext;
|
||||
this.topCount = topCount;
|
||||
},
|
||||
{
|
||||
|
||||
/**
|
||||
* Execute a provided function on the next element in the TopEndpointComponent.
|
||||
* @memberof TopEndpointComponent
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
if (this.topCount <= 0) {
|
||||
return callback(undefined, undefined, undefined);
|
||||
}
|
||||
this.topCount--;
|
||||
this.executionContext.nextItem(function (err, item, headers) {
|
||||
callback(err, item, headers);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the TopEndpointComponent.
|
||||
* @memberof TopEndpointComponent
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function (callback) {
|
||||
if (this.topCount <= 0) {
|
||||
return callback(undefined, undefined);
|
||||
}
|
||||
this.executionContext.current(function (err, item, headers) {
|
||||
return callback(err, item, headers);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if there are still remaining resources to processs.
|
||||
* @memberof TopEndpointComponent
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the TopEndpointComponent.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return (this.topCount > 0 && this.executionContext.hasMoreResults());
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
var AggregateEndpointComponent = Base.defineClass(
|
||||
/**
|
||||
* Represents an endpoint in handling aggregate queries.
|
||||
* @constructor AggregateEndpointComponent
|
||||
* @param { object } executionContext - Underlying Execution Context
|
||||
* @ignore
|
||||
*/
|
||||
function (executionContext, aggregateOperators) {
|
||||
this.executionContext = executionContext;
|
||||
this.localAggregators = [];
|
||||
var that = this;
|
||||
aggregateOperators.forEach(function (aggregateOperator) {
|
||||
switch (aggregateOperator) {
|
||||
case 'Average':
|
||||
that.localAggregators.push(new AverageAggregator());
|
||||
break;
|
||||
case 'Count':
|
||||
that.localAggregators.push(new CountAggregator());
|
||||
break;
|
||||
case 'Max':
|
||||
that.localAggregators.push(new MaxAggregator());
|
||||
break;
|
||||
case 'Min':
|
||||
that.localAggregators.push(new MinAggregator());
|
||||
break;
|
||||
case 'Sum':
|
||||
that.localAggregators.push(new SumAggregator());
|
||||
break;
|
||||
}
|
||||
});
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Populate the aggregated values
|
||||
* @ignore
|
||||
*/
|
||||
_getAggregateResult: function (callback) {
|
||||
this.toArrayTempResources = [];
|
||||
this.aggregateValues = [];
|
||||
this.aggregateValuesIndex = -1;
|
||||
var that = this;
|
||||
|
||||
this._getQueryResults(function (err, resources) {
|
||||
if (err) {
|
||||
return callback(err, undefined);
|
||||
}
|
||||
|
||||
resources.forEach(function (resource) {
|
||||
that.localAggregators.forEach(function (aggregator) {
|
||||
var itemValue = undefined;
|
||||
// Get the value of the first property if it exists
|
||||
if (resource && Object.keys(resource).length > 0) {
|
||||
var key = Object.keys(resource)[0];
|
||||
itemValue = resource[key];
|
||||
}
|
||||
aggregator.aggregate(itemValue);
|
||||
});
|
||||
});
|
||||
|
||||
// Get the aggregated results
|
||||
that.localAggregators.forEach(function (aggregator) {
|
||||
that.aggregateValues.push(aggregator.getResult());
|
||||
});
|
||||
|
||||
return callback(undefined, that.aggregateValues);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the results of queries from all partitions
|
||||
* @ignore
|
||||
*/
|
||||
_getQueryResults: function (callback) {
|
||||
var that = this;
|
||||
|
||||
this.executionContext.nextItem(function (err, item) {
|
||||
if (err) {
|
||||
return callback(err, undefined);
|
||||
}
|
||||
|
||||
if (item === undefined) {
|
||||
// no more results
|
||||
return callback(undefined, that.toArrayTempResources);
|
||||
}
|
||||
|
||||
that.toArrayTempResources = that.toArrayTempResources.concat(item);
|
||||
return that._getQueryResults(callback);
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Execute a provided function on the next element in the AggregateEndpointComponent.
|
||||
* @memberof AggregateEndpointComponent
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
var that = this;
|
||||
var _nextItem = function (err, resources) {
|
||||
if (err || that.aggregateValues.length <= 0) {
|
||||
return callback(undefined, undefined);
|
||||
}
|
||||
|
||||
var resource = that.aggregateValuesIndex < that.aggregateValues.length
|
||||
? that.aggregateValues[++that.aggregateValuesIndex]
|
||||
: undefined;
|
||||
|
||||
return callback(undefined, resource);
|
||||
};
|
||||
|
||||
if (that.aggregateValues == undefined) {
|
||||
that._getAggregateResult(function (err, resources) {
|
||||
return _nextItem(err, resources);
|
||||
});
|
||||
}
|
||||
else {
|
||||
return _nextItem(undefined, that.aggregateValues);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the AggregateEndpointComponent.
|
||||
* @memberof AggregateEndpointComponent
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function (callback) {
|
||||
var that = this;
|
||||
if (that.aggregateValues == undefined) {
|
||||
that._getAggregateResult(function (err, resources) {
|
||||
return callback(undefined, that.aggregateValues[that.aggregateValuesIndex]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
return callback(undefined, that.aggregateValues[that.aggregateValuesIndex]);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if there are still remaining resources to processs.
|
||||
* @memberof AggregateEndpointComponent
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the AggregateEndpointComponent.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return this.aggregateValues != null && this.aggregateValuesIndex < this.aggregateValues.length - 1;
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.OrderByEndpointComponent = OrderByEndpointComponent;
|
||||
exports.TopEndpointComponent = TopEndpointComponent;
|
||||
exports.AggregateEndpointComponent = AggregateEndpointComponent;
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, Constants = require("../constants")
|
||||
, assert = require("assert")
|
||||
, util = require("util");
|
||||
|
||||
//SCRIPT START
|
||||
var HeaderUtils = Base.defineClass(
|
||||
undefined, undefined,
|
||||
{
|
||||
getRequestChargeIfAny: function (headers) {
|
||||
if (typeof (headers) == 'number') {
|
||||
return headers;
|
||||
} else if (typeof (headers) == 'string') {
|
||||
return parseFloat(headers);
|
||||
}
|
||||
|
||||
if (headers) {
|
||||
var rc = headers[Constants.HttpHeaders.RequestCharge];
|
||||
if (rc) {
|
||||
return parseFloat(rc);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
getInitialHeader: function () {
|
||||
var headers = {};
|
||||
headers[Constants.HttpHeaders.RequestCharge] = 0;
|
||||
return headers;
|
||||
},
|
||||
|
||||
mergeHeaders: function (headers, toBeMergedHeaders) {
|
||||
if (headers[Constants.HttpHeaders.RequestCharge] == undefined) {
|
||||
headers[Constants.HttpHeaders.RequestCharge] = 0;
|
||||
}
|
||||
if (!toBeMergedHeaders) {
|
||||
return;
|
||||
}
|
||||
headers[Constants.HttpHeaders.RequestCharge] += this.getRequestChargeIfAny(toBeMergedHeaders);
|
||||
if (toBeMergedHeaders[Constants.HttpHeaders.IsRUPerMinuteUsed]) {
|
||||
headers[Constants.HttpHeaders.IsRUPerMinuteUsed] = toBeMergedHeaders[Constants.HttpHeaders.IsRUPerMinuteUsed];
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = HeaderUtils;
|
||||
}
|
|
@ -1,166 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, DocumentProducer = require("./documentProducer")
|
||||
, assert = require("assert")
|
||||
, util = require("util");
|
||||
|
||||
//SCRIPT START
|
||||
var OrderByDocumentProducerComparator = Base.defineClass(
|
||||
function (sortOrder) {
|
||||
this.sortOrder = sortOrder;
|
||||
this.targetPartitionKeyRangeDocProdComparator = function () {
|
||||
return function (docProd1, docProd2) {
|
||||
var a = docProd1.getTargetParitionKeyRange()['minInclusive'];
|
||||
var b = docProd2.getTargetParitionKeyRange()['minInclusive'];
|
||||
return (a == b ? 0 : (a > b ? 1 : -1));
|
||||
};
|
||||
};
|
||||
|
||||
this._typeOrdComparator = Object.freeze({
|
||||
NoValue: {
|
||||
ord: 0
|
||||
},
|
||||
undefined: {
|
||||
ord: 1
|
||||
},
|
||||
boolean: {
|
||||
ord: 2,
|
||||
compFunc: function (a, b) {
|
||||
return (a == b ? 0 : (a > b ? 1 : -1));
|
||||
}
|
||||
},
|
||||
number: {
|
||||
ord: 4,
|
||||
compFunc: function (a, b) {
|
||||
return (a == b ? 0 : (a > b ? 1 : -1));
|
||||
}
|
||||
},
|
||||
string: {
|
||||
ord: 5,
|
||||
compFunc: function (a, b) {
|
||||
return (a == b ? 0 : (a > b ? 1 : -1));
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
{
|
||||
compare: function (docProd1, docProd2) {
|
||||
// Need to check for split, since we don't want to dereference "item" of undefined / exception
|
||||
if (docProd1.gotSplit()) {
|
||||
return -1;
|
||||
}
|
||||
if (docProd2.gotSplit()) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
var orderByItemsRes1 = this.getOrderByItems(docProd1.peekBufferedItems()[0]);
|
||||
var orderByItemsRes2 = this.getOrderByItems(docProd2.peekBufferedItems()[0]);
|
||||
|
||||
// validate order by items and types
|
||||
// TODO: once V1 order by on different types is fixed this need to change
|
||||
this.validateOrderByItems(orderByItemsRes1, orderByItemsRes2);
|
||||
|
||||
// no async call in the for loop
|
||||
for (var i = 0; i < orderByItemsRes1.length; i++) {
|
||||
// compares the orderby items one by one
|
||||
var compRes = this.compareOrderByItem(orderByItemsRes1[i], orderByItemsRes2[i]);
|
||||
if (compRes !== 0) {
|
||||
if (this.sortOrder[i] === 'Ascending') {
|
||||
return compRes;
|
||||
} else if (this.sortOrder[i] === 'Descending') {
|
||||
return -compRes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.targetPartitionKeyRangeDocProdComparator(docProd1, docProd2);
|
||||
},
|
||||
|
||||
compareValue: function (item1, type1, item2, type2) {
|
||||
var type1Ord = this._typeOrdComparator[type1].ord;
|
||||
var type2Ord = this._typeOrdComparator[type2].ord;
|
||||
var typeCmp = type1Ord - type2Ord;
|
||||
|
||||
if (typeCmp !== 0) {
|
||||
// if the types are different, use type ordinal
|
||||
return typeCmp;
|
||||
}
|
||||
|
||||
// both are of the same type
|
||||
if ((type1Ord === this._typeOrdComparator['undefined'].ord) || (type1Ord === this._typeOrdComparator['NoValue'].ord)) {
|
||||
// if both types are undefined or Null they are equal
|
||||
return 0;
|
||||
}
|
||||
|
||||
var compFunc = this._typeOrdComparator[type1].compFunc;
|
||||
assert.notEqual(compFunc, undefined, "cannot find the comparison function");
|
||||
// same type and type is defined compare the items
|
||||
return compFunc(item1, item2);
|
||||
},
|
||||
|
||||
compareOrderByItem: function (orderByItem1, orderByItem2) {
|
||||
var type1 = this.getType(orderByItem1);
|
||||
var type2 = this.getType(orderByItem2);
|
||||
return this.compareValue(orderByItem1['item'], type1, orderByItem2['item'], type2);
|
||||
},
|
||||
|
||||
validateOrderByItems: function (res1, res2) {
|
||||
this._throwIf(res1.length != res2.length, util.format("Expected %s, but got %s.", type1, type2));
|
||||
this._throwIf(res1.length != this.sortOrder.length, 'orderByItems cannot have a different size than sort orders.');
|
||||
|
||||
for (var i = 0; i < this.sortOrder.length; i++) {
|
||||
var type1 = this.getType(res1[i]);
|
||||
var type2 = this.getType(res2[i]);
|
||||
this._throwIf(type1 !== type2, util.format("Expected %s, but got %s.", type1, type2));
|
||||
}
|
||||
},
|
||||
|
||||
getType: function (orderByItem) {
|
||||
if (!'item' in orderByItem) {
|
||||
return 'NoValue';
|
||||
}
|
||||
var type = typeof (orderByItem['item']);
|
||||
this._throwIf(!type in this._typeOrdComparator, util.format("unrecognizable type %s", type));
|
||||
return type;
|
||||
},
|
||||
|
||||
getOrderByItems: function (res) {
|
||||
return res['orderByItems'];
|
||||
},
|
||||
|
||||
_throwIf: function (condition, msg) {
|
||||
if (condition) {
|
||||
throw Error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = OrderByDocumentProducerComparator;
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, ParallelQueryExecutionContextBase = require('./parallelQueryExecutionContextBase')
|
||||
, OrderByDocumentProducerComparator = require('./orderByDocumentProducerComparator')
|
||||
, assert = require('assert');
|
||||
|
||||
//SCRIPT START
|
||||
|
||||
var OrderByQueryExecutionContext = Base.derive(
|
||||
ParallelQueryExecutionContextBase,
|
||||
/**
|
||||
* Provides the OrderByQueryExecutionContext.
|
||||
* This class is capable of handling orderby queries and dervives from ParallelQueryExecutionContextBase.
|
||||
*
|
||||
* When handling a parallelized query, it instantiates one instance of
|
||||
* DocumentProcuder per target partition key range and aggregates the result of each.
|
||||
*
|
||||
* @constructor ParallelQueryExecutionContext
|
||||
* @param {DocumentClient} documentclient - The service endpoint to use to create the client.
|
||||
* @param {string} collectionLink - The Collection Link
|
||||
* @param {FeedOptions} [options] - Represents the feed options.
|
||||
* @param {object} partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient, collectionLink, query, options, partitionedQueryExecutionInfo) {
|
||||
// Calling on base class constructor
|
||||
ParallelQueryExecutionContextBase.call(this, documentclient, collectionLink, query, options, partitionedQueryExecutionInfo);
|
||||
this._orderByComparator = new OrderByDocumentProducerComparator(this.sortOrders);
|
||||
},
|
||||
{
|
||||
// Instance members are inherited
|
||||
|
||||
// Overriding documentProducerComparator for OrderByQueryExecutionContexts
|
||||
/**
|
||||
* Provides a Comparator for document producers which respects orderby sort order.
|
||||
* @returns {object} - Comparator Function
|
||||
* @ignore
|
||||
*/
|
||||
documentProducerComparator: function (docProd1, docProd2) {
|
||||
return this._orderByComparator.compare(docProd1, docProd2);
|
||||
},
|
||||
},
|
||||
{
|
||||
// Static members are inherited
|
||||
}
|
||||
);
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = OrderByQueryExecutionContext;
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, ParallelQueryExecutionContextBase = require('./parallelQueryExecutionContextBase')
|
||||
, Constants = require("../constants")
|
||||
, InMemoryCollectionRoutingMap = require("../routing/inMemoryCollectionRoutingMap")
|
||||
, HeaderUtils = require("./headerUtils")
|
||||
, assert = require('assert');
|
||||
|
||||
var _PartitionKeyRange = InMemoryCollectionRoutingMap._PartitionKeyRange;
|
||||
|
||||
//SCRIPT START
|
||||
|
||||
var ParallelQueryExecutionContext = Base.derive(
|
||||
ParallelQueryExecutionContextBase,
|
||||
/**
|
||||
* Provides the ParallelQueryExecutionContext.
|
||||
* This class is capable of handling parallelized queries and dervives from ParallelQueryExecutionContextBase.
|
||||
*
|
||||
* @constructor ParallelQueryExecutionContext
|
||||
* @param {DocumentClient} documentclient - The service endpoint to use to create the client.
|
||||
* @param {string} collectionLink - The Collection Link
|
||||
* @param {FeedOptions} [options] - Represents the feed options.
|
||||
* @param {object} partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient, collectionLink, query, options, partitionedQueryExecutionInfo) {
|
||||
// Calling on base class constructor
|
||||
ParallelQueryExecutionContextBase.call(this, documentclient, collectionLink, query, options, partitionedQueryExecutionInfo);
|
||||
},
|
||||
{
|
||||
// Instance members are inherited
|
||||
|
||||
// Overriding documentProducerComparator for ParallelQueryExecutionContexts
|
||||
/**
|
||||
* Provides a Comparator for document producers using the min value of the corresponding target partition.
|
||||
* @returns {object} - Comparator Function
|
||||
* @ignore
|
||||
*/
|
||||
documentProducerComparator: function (docProd1, docProd2) {
|
||||
var a = docProd1.getTargetParitionKeyRange()['minInclusive'];
|
||||
var b = docProd2.getTargetParitionKeyRange()['minInclusive'];
|
||||
return (a == b ? 0 : (a > b ? 1 : -1));
|
||||
},
|
||||
|
||||
_buildContinuationTokenFrom: function (documentProducer) {
|
||||
// given the document producer constructs the continuation token
|
||||
if (documentProducer.allFetched && documentProducer.peekBufferedItems().length == 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
var min = documentProducer.targetPartitionKeyRange[_PartitionKeyRange.MinInclusive];
|
||||
var max = documentProducer.targetPartitionKeyRange[_PartitionKeyRange.MaxExclusive];
|
||||
var range = {
|
||||
'min': min,
|
||||
'max': max,
|
||||
'id': documentProducer.targetPartitionKeyRange.id
|
||||
};
|
||||
|
||||
var withNullDefault = function (token) {
|
||||
if (token) {
|
||||
return token;
|
||||
} else if (token === null || token === undefined) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
var documentProducerContinuationToken = undefined;
|
||||
|
||||
if (documentProducer.peekBufferedItems().length > 0) {
|
||||
// has unused buffered item so use the previous continuation token
|
||||
documentProducerContinuationToken = documentProducer.previousContinuationToken;
|
||||
} else {
|
||||
documentProducerContinuationToken = documentProducer.continuationToken;
|
||||
}
|
||||
|
||||
return {
|
||||
'token': withNullDefault(documentProducerContinuationToken),
|
||||
'range': range
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
// Static members are inherited
|
||||
}
|
||||
);
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = ParallelQueryExecutionContext;
|
||||
}
|
|
@ -1,562 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, Constants = require("../constants")
|
||||
, PriorityQueue = require("priorityqueuejs")
|
||||
, SmartRoutingMapProvider = require("../routing/smartRoutingMapProvider")
|
||||
, InMemoryCollectionRoutingMap = require("../routing/inMemoryCollectionRoutingMap")
|
||||
, DocumentProducer = require("./documentProducer")
|
||||
, PartitionedQueryExecutionContextInfoParser = require("./partitionedQueryExecutionContextInfoParser")
|
||||
, bs = require("binary-search-bounds")
|
||||
, HeaderUtils = require("./headerUtils")
|
||||
, semaphore = require("semaphore")
|
||||
, StatusCodes = require("../statusCodes").StatusCodes
|
||||
, SubStatusCodes = require("../statusCodes").SubStatusCodes
|
||||
, assert = require('assert');
|
||||
|
||||
var QueryRange = InMemoryCollectionRoutingMap.QueryRange;
|
||||
var _PartitionKeyRange = InMemoryCollectionRoutingMap._PartitionKeyRange;
|
||||
|
||||
//SCRIPT START
|
||||
|
||||
var ParallelQueryExecutionContextBase = Base.defineClass(
|
||||
/**
|
||||
* Provides the ParallelQueryExecutionContextBase.
|
||||
* This is the base class that ParallelQueryExecutionContext and OrderByQueryExecutionContext will derive from.
|
||||
*
|
||||
* When handling a parallelized query, it instantiates one instance of
|
||||
* DocumentProcuder per target partition key range and aggregates the result of each.
|
||||
*
|
||||
* @constructor ParallelQueryExecutionContext
|
||||
* @param {DocumentClient} documentclient - The service endpoint to use to create the client.
|
||||
* @param {string} collectionLink - The Collection Link
|
||||
* @param {FeedOptions} [options] - Represents the feed options.
|
||||
* @param {object} partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient, collectionLink, query, options, partitionedQueryExecutionInfo) {
|
||||
this.documentclient = documentclient;
|
||||
this.collectionLink = collectionLink;
|
||||
this.query = query;
|
||||
this.options = options;
|
||||
this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo;
|
||||
|
||||
this.err = undefined;
|
||||
this.state = ParallelQueryExecutionContextBase.STATES.start;
|
||||
this.routingProvider = new SmartRoutingMapProvider(this.documentclient);
|
||||
this.sortOrders = PartitionedQueryExecutionContextInfoParser.parseOrderBy(this.partitionedQueryExecutionInfo);
|
||||
this.state = ParallelQueryExecutionContextBase.STATES.started;
|
||||
|
||||
if (options === undefined || options["maxItemCount"] === undefined) {
|
||||
this.pageSize = ParallelQueryExecutionContextBase.DEFAULT_PAGE_SIZE;
|
||||
this.options["maxItemCount"] = this.pageSize;
|
||||
} else {
|
||||
this.pageSize = options["maxItemCount"];
|
||||
}
|
||||
|
||||
this.requestContinuation = options ? options.continuation : null
|
||||
// response headers of undergoing operation
|
||||
this._respHeaders = HeaderUtils.getInitialHeader();
|
||||
var that = this;
|
||||
|
||||
// Make priority queue for documentProducers
|
||||
// The comparator is supplied by the derived class
|
||||
this.orderByPQ = new PriorityQueue(function (a, b) { return that.documentProducerComparator(b, a); });
|
||||
// Creating the documentProducers
|
||||
this.sem = new semaphore(1);
|
||||
// Creating callback for semaphore
|
||||
var createDocumentProducersAndFillUpPriorityQueueFunc = function () {
|
||||
// ensure the lock is released after finishing up
|
||||
that._onTargetPartitionRanges(function (err, targetPartitionRanges) {
|
||||
if (err) {
|
||||
that.err = err;
|
||||
// release the lock
|
||||
that.sem.leave();
|
||||
return;
|
||||
}
|
||||
|
||||
that.waitingForInternalExecutionContexts = targetPartitionRanges.length;
|
||||
// default to 1 if none is provided.
|
||||
var maxDegreeOfParallelism = options.maxDegreeOfParallelism || 1;
|
||||
if (maxDegreeOfParallelism > 0) {
|
||||
// at most you will need 1 documentProducer for each partition
|
||||
maxDegreeOfParallelism = Math.min(maxDegreeOfParallelism, targetPartitionRanges.length)
|
||||
} else {
|
||||
// if user provided a negative number then we automatically pick 1 documentProducer per partition
|
||||
maxDegreeOfParallelism = targetPartitionRanges.length;
|
||||
}
|
||||
|
||||
var parallelismSem = semaphore(maxDegreeOfParallelism);
|
||||
var filteredPartitionKeyRanges = [];
|
||||
// The document producers generated from filteredPartitionKeyRanges
|
||||
var targetPartitionQueryExecutionContextList = [];
|
||||
|
||||
if (that.requestContinuation) {
|
||||
// Need to create the first documentProducer with the suppliedCompositeContinuationToken
|
||||
try {
|
||||
var suppliedCompositeContinuationToken = JSON.parse(that.requestContinuation);
|
||||
filteredPartitionKeyRanges = that.getPartitionKeyRangesForContinuation(
|
||||
suppliedCompositeContinuationToken, targetPartitionRanges
|
||||
);
|
||||
if (filteredPartitionKeyRanges.length > 0) {
|
||||
targetPartitionQueryExecutionContextList.push(
|
||||
that._createTargetPartitionQueryExecutionContext(
|
||||
filteredPartitionKeyRanges[0], suppliedCompositeContinuationToken.token
|
||||
)
|
||||
);
|
||||
// Slicing the first element off, since we already made a documentProducer for it
|
||||
filteredPartitionKeyRanges = filteredPartitionKeyRanges.slice(1);
|
||||
}
|
||||
} catch (e) {
|
||||
that.err = e;
|
||||
that.sem.leave();
|
||||
}
|
||||
} else {
|
||||
filteredPartitionKeyRanges = targetPartitionRanges;
|
||||
}
|
||||
|
||||
// Create one documentProducer for each partitionTargetRange
|
||||
filteredPartitionKeyRanges.forEach(
|
||||
function (partitionTargetRange) {
|
||||
// no async callback
|
||||
targetPartitionQueryExecutionContextList.push(
|
||||
that._createTargetPartitionQueryExecutionContext(partitionTargetRange)
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
// Fill up our priority queue with documentProducers
|
||||
targetPartitionQueryExecutionContextList.forEach(
|
||||
function (documentProducer) {
|
||||
// has async callback
|
||||
var throttledFunc = function () {
|
||||
documentProducer.current(function (err, document, headers) {
|
||||
try {
|
||||
that._mergeWithActiveResponseHeaders(headers);
|
||||
if (err) {
|
||||
that.err = err;
|
||||
return;
|
||||
}
|
||||
|
||||
if (document == undefined) {
|
||||
// no results on this one
|
||||
return;
|
||||
}
|
||||
// if there are matching results in the target ex range add it to the priority queue
|
||||
try {
|
||||
that.orderByPQ.enq(documentProducer);
|
||||
} catch (e) {
|
||||
that.err = e;
|
||||
}
|
||||
} finally {
|
||||
parallelismSem.leave();
|
||||
that._decrementInitiationLock();
|
||||
}
|
||||
});
|
||||
}
|
||||
parallelismSem.take(throttledFunc);
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
||||
this.sem.take(createDocumentProducersAndFillUpPriorityQueueFunc);
|
||||
},
|
||||
|
||||
{
|
||||
getPartitionKeyRangesForContinuation: function (suppliedCompositeContinuationToken, partitionKeyRanges) {
|
||||
|
||||
var startRange = {};
|
||||
startRange[_PartitionKeyRange.MinInclusive] = suppliedCompositeContinuationToken.range.min;
|
||||
startRange[_PartitionKeyRange.MaxExclusive] = suppliedCompositeContinuationToken.range.max;
|
||||
|
||||
var vbCompareFunction = function (x, y) {
|
||||
if (x[_PartitionKeyRange.MinInclusive] > y[_PartitionKeyRange.MinInclusive]) return 1;
|
||||
if (x[_PartitionKeyRange.MinInclusive] < y[_PartitionKeyRange.MinInclusive]) return -1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
var minIndex = bs.le(partitionKeyRanges, startRange, vbCompareFunction);
|
||||
// that's an error
|
||||
|
||||
if (minIndex > 0) {
|
||||
throw new Error("BadRequestException: InvalidContinuationToken");
|
||||
}
|
||||
|
||||
// return slice of the partition key ranges
|
||||
return partitionKeyRanges.slice(minIndex, partitionKeyRanges.length - minIndex);
|
||||
},
|
||||
|
||||
_decrementInitiationLock: function () {
|
||||
// decrements waitingForInternalExecutionContexts
|
||||
// if waitingForInternalExecutionContexts reaches 0 releases the semaphore and changes the state
|
||||
this.waitingForInternalExecutionContexts = this.waitingForInternalExecutionContexts - 1;
|
||||
if (this.waitingForInternalExecutionContexts === 0) {
|
||||
this.sem.leave();
|
||||
if (this.orderByPQ.size() === 0) {
|
||||
this.state = ParallelQueryExecutionContextBase.STATES.inProgress;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
_mergeWithActiveResponseHeaders: function (headers) {
|
||||
HeaderUtils.mergeHeaders(this._respHeaders, headers);
|
||||
},
|
||||
|
||||
_getAndResetActiveResponseHeaders: function () {
|
||||
var ret = this._respHeaders;
|
||||
this._respHeaders = HeaderUtils.getInitialHeader();
|
||||
return ret;
|
||||
},
|
||||
|
||||
_onTargetPartitionRanges: function (callback) {
|
||||
// invokes the callback when the target partition ranges are ready
|
||||
var parsedRanges = PartitionedQueryExecutionContextInfoParser.parseQueryRanges(this.partitionedQueryExecutionInfo);
|
||||
var queryRanges = parsedRanges.map(function (item) { return QueryRange.parseFromDict(item); });
|
||||
return this.routingProvider.getOverlappingRanges(callback, this.collectionLink, queryRanges);
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the replacement ranges for a partitionkeyrange that has been split
|
||||
* @memberof ParallelQueryExecutionContextBase
|
||||
* @instance
|
||||
*/
|
||||
_getReplacementPartitionKeyRanges: function (callback, documentProducer) {
|
||||
var routingMapProvider = this.documentclient.partitionKeyDefinitionCache;
|
||||
var partitionKeyRange = documentProducer.targetPartitionKeyRange;
|
||||
// Download the new routing map
|
||||
this.routingProvider = new SmartRoutingMapProvider(this.documentclient);
|
||||
// Get the queryRange that relates to this partitionKeyRange
|
||||
var queryRange = QueryRange.parsePartitionKeyRange(partitionKeyRange);
|
||||
this.routingProvider.getOverlappingRanges(callback, this.collectionLink, [queryRange]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes the current document producer from the priqueue,
|
||||
* replaces that document producer with child document producers,
|
||||
* then reexecutes the originFunction with the corrrected executionContext
|
||||
* @memberof ParallelQueryExecutionContextBase
|
||||
* @instance
|
||||
*/
|
||||
_repairExecutionContext: function (originFunction) {
|
||||
// Get the replacement ranges
|
||||
var that = this;
|
||||
// Removing the invalid documentProducer from the orderByPQ
|
||||
var parentDocumentProducer = that.orderByPQ.deq();
|
||||
var afterReplacementRanges = function (err, replacementPartitionKeyRanges) {
|
||||
if (err) {
|
||||
that.err = err;
|
||||
return;
|
||||
}
|
||||
var replacementDocumentProducers = [];
|
||||
// Create the replacement documentProducers
|
||||
replacementPartitionKeyRanges.forEach(function (partitionKeyRange) {
|
||||
// Create replacment document producers with the parent's continuationToken
|
||||
var replacementDocumentProducer = that._createTargetPartitionQueryExecutionContext(
|
||||
partitionKeyRange,
|
||||
parentDocumentProducer.continuationToken);
|
||||
replacementDocumentProducers.push(replacementDocumentProducer);
|
||||
});
|
||||
// We need to check if the documentProducers even has anything left to fetch from before enqueing them
|
||||
var checkAndEnqueueDocumentProducer = function (documentProducerToCheck, checkNextDocumentProducerCallback) {
|
||||
documentProducerToCheck.current(function (err, afterItem, headers) {
|
||||
if (err) {
|
||||
// Something actually bad happened
|
||||
that.err = err;
|
||||
return;
|
||||
} else if (afterItem === undefined) {
|
||||
// no more results left in this document producer, so we don't enqueue it
|
||||
} else {
|
||||
// Safe to put document producer back in the queue
|
||||
that.orderByPQ.enq(documentProducerToCheck);
|
||||
}
|
||||
|
||||
checkNextDocumentProducerCallback();
|
||||
});
|
||||
};
|
||||
var checkAndEnqueueDocumentProducers = function(replacementDocumentProducers) {
|
||||
if (replacementDocumentProducers.length > 0) {
|
||||
// We still have a replacementDocumentProducer to check
|
||||
var replacementDocumentProducer = replacementDocumentProducers.shift();
|
||||
checkAndEnqueueDocumentProducer(
|
||||
replacementDocumentProducer,
|
||||
function() { checkAndEnqueueDocumentProducers(replacementDocumentProducers); }
|
||||
);
|
||||
} else {
|
||||
// reexecutes the originFunction with the corrrected executionContext
|
||||
return originFunction();
|
||||
}
|
||||
}
|
||||
// Invoke the recursive function to get the ball rolling
|
||||
checkAndEnqueueDocumentProducers(replacementDocumentProducers);
|
||||
};
|
||||
this._getReplacementPartitionKeyRanges(afterReplacementRanges, parentDocumentProducer);
|
||||
},
|
||||
|
||||
_needPartitionKeyRangeCacheRefresh: function (error) {
|
||||
return (error.code === StatusCodes.Gone) && ('substatus' in error) && (error['substatus'] === SubStatusCodes.PartitionKeyRangeGone);
|
||||
},
|
||||
|
||||
/**
|
||||
* Checks to see if the executionContext needs to be repaired.
|
||||
* if so it repairs the execution context and executes the ifCallback,
|
||||
* else it continues with the current execution context and executes the elseCallback
|
||||
* @memberof ParallelQueryExecutionContextBase
|
||||
* @instance
|
||||
*/
|
||||
_repairExecutionContextIfNeeded: function (ifCallback, elseCallback) {
|
||||
var that = this;
|
||||
var documentProducer = that.orderByPQ.peek();
|
||||
// Check if split happened
|
||||
documentProducer.current(function (err, element) {
|
||||
if (err) {
|
||||
if (that._needPartitionKeyRangeCacheRefresh(err)) {
|
||||
// Split has happened so we need to repair execution context before continueing
|
||||
return that._repairExecutionContext(ifCallback);
|
||||
} else {
|
||||
// Something actually bad happened ...
|
||||
that.err = err;
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// Just continue with the original execution context
|
||||
return elseCallback();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Execute a provided function on the next element in the ParallelQueryExecutionContextBase.
|
||||
* @memberof ParallelQueryExecutionContextBase
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
if (this.err) {
|
||||
// if there is a prior error return error
|
||||
return callback(this.err, undefined);
|
||||
}
|
||||
|
||||
var that = this;
|
||||
this.sem.take(function () {
|
||||
// NOTE: lock must be released before invoking quitting
|
||||
if (that.err) {
|
||||
// release the lock before invoking callback
|
||||
that.sem.leave();
|
||||
// if there is a prior error return error
|
||||
return callback(that.err, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
if (that.orderByPQ.size() === 0) {
|
||||
// there is no more results
|
||||
that.state = ParallelQueryExecutionContextBase.STATES.ended;
|
||||
// release the lock before invoking callback
|
||||
that.sem.leave();
|
||||
return callback(undefined, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
var ifCallback = function () {
|
||||
// Release the semaphore to avoid deadlock
|
||||
that.sem.leave();
|
||||
// Reexcute the function
|
||||
return that.nextItem(callback);
|
||||
};
|
||||
var elseCallback = function () {
|
||||
try {
|
||||
var documentProducer = that.orderByPQ.deq();
|
||||
} catch (e) {
|
||||
// if comparing elements of the priority queue throws exception
|
||||
// set that error and return error
|
||||
that.err = e;
|
||||
// release the lock before invoking callback
|
||||
that.sem.leave();
|
||||
return callback(that.err, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
documentProducer.nextItem(function (err, item, headers) {
|
||||
that._mergeWithActiveResponseHeaders(headers);
|
||||
if (err) {
|
||||
// this should never happen
|
||||
// because the documentProducer already has buffered an item
|
||||
// assert err === undefined
|
||||
that.err =
|
||||
new Error(
|
||||
util.format(
|
||||
"Extracted DocumentProducer from the priority queue fails to get the buffered item. Due to %s",
|
||||
JSON.stringify(err)));
|
||||
// release the lock before invoking callback
|
||||
that.sem.leave();
|
||||
return callback(that.err, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
if (item === undefined) {
|
||||
// this should never happen
|
||||
// because the documentProducer already has buffered an item
|
||||
// assert item !== undefined
|
||||
that.err =
|
||||
new Error(
|
||||
util.format(
|
||||
"Extracted DocumentProducer from the priority queue doesn't have any buffered item!"));
|
||||
// release the lock before invoking callback
|
||||
that.sem.leave();
|
||||
return callback(that.err, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
// we need to put back the document producer to the queue if it has more elements.
|
||||
// the lock will be released after we know document producer must be put back in the queue or not
|
||||
documentProducer.current(function (err, afterItem, headers) {
|
||||
try {
|
||||
that._mergeWithActiveResponseHeaders(headers);
|
||||
if (err) {
|
||||
if (that._needPartitionKeyRangeCacheRefresh(err)) {
|
||||
// We want the document producer enqueued
|
||||
// So that later parts of the code can repair the execution context
|
||||
that.orderByPQ.enq(documentProducer);
|
||||
return;
|
||||
} else {
|
||||
// Something actually bad happened
|
||||
that.err = err;
|
||||
return;
|
||||
}
|
||||
} else if (afterItem === undefined) {
|
||||
// no more results is left in this document producer
|
||||
return;
|
||||
} else {
|
||||
try {
|
||||
var headItem = documentProducer.fetchResults[0];
|
||||
assert.notStrictEqual(headItem, undefined,
|
||||
'Extracted DocumentProducer from PQ is invalid state with no result!');
|
||||
that.orderByPQ.enq(documentProducer);
|
||||
} catch (e) {
|
||||
// if comparing elements in priority queue throws exception
|
||||
// set error
|
||||
that.err = e;
|
||||
}
|
||||
return;
|
||||
}
|
||||
} finally {
|
||||
// release the lock before returning
|
||||
that.sem.leave();
|
||||
}
|
||||
});
|
||||
|
||||
// invoke the callback on the item
|
||||
return callback(undefined, item, that._getAndResetActiveResponseHeaders());
|
||||
});
|
||||
}
|
||||
that._repairExecutionContextIfNeeded(ifCallback, elseCallback);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the ParallelQueryExecutionContextBase.
|
||||
* @memberof ParallelQueryExecutionContextBase
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function (callback) {
|
||||
if (this.err) {
|
||||
return callback(this.err, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
var that = this;
|
||||
this.sem.take(function () {
|
||||
try {
|
||||
if (that.err) {
|
||||
return callback(that.err, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
if (that.orderByPQ.size() === 0) {
|
||||
return callback(undefined, undefined, that._getAndResetActiveResponseHeaders());
|
||||
}
|
||||
|
||||
var ifCallback = function () {
|
||||
// Reexcute the function
|
||||
return that.current(callback);
|
||||
};
|
||||
|
||||
var elseCallback = function () {
|
||||
var documentProducer = that.orderByPQ.peek();
|
||||
documentProducer.current(callback);
|
||||
};
|
||||
|
||||
that._repairExecutionContextIfNeeded(ifCallback, elseCallback);
|
||||
} finally {
|
||||
that.sem.leave();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if there are still remaining resources to processs based on the value of the continuation token or the elements remaining on the current batch in the QueryIterator.
|
||||
* @memberof ParallelQueryExecutionContextBase
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the ParallelQueryExecutionContextBase.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return !(this.state === ParallelQueryExecutionContextBase.STATES.ended || this.err !== undefined);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates document producers
|
||||
*/
|
||||
_createTargetPartitionQueryExecutionContext: function (partitionKeyTargetRange, continuationToken) {
|
||||
// creates target partition range Query Execution Context
|
||||
var rewrittenQuery = PartitionedQueryExecutionContextInfoParser.parseRewrittenQuery(this.partitionedQueryExecutionInfo);
|
||||
var query = this.query;
|
||||
if (typeof (query) === 'string') {
|
||||
query = { 'query': query };
|
||||
}
|
||||
|
||||
var formatPlaceHolder = "{documentdb-formattableorderbyquery-filter}";
|
||||
if (rewrittenQuery) {
|
||||
query = JSON.parse(JSON.stringify(query));
|
||||
// We hardcode the formattable filter to true for now
|
||||
rewrittenQuery = rewrittenQuery.replace(formatPlaceHolder, "true");
|
||||
query['query'] = rewrittenQuery;
|
||||
}
|
||||
|
||||
var options = JSON.parse(JSON.stringify(this.options));
|
||||
if (continuationToken) {
|
||||
options.continuation = continuationToken;
|
||||
} else {
|
||||
options.continuation = undefined;
|
||||
}
|
||||
|
||||
return new DocumentProducer(this.documentclient, this.collectionLink, query, partitionKeyTargetRange, options);
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
STATES: Object.freeze({ started: "started", inProgress: "inProgress", ended: "ended" }),
|
||||
DEFAULT_PAGE_SIZE: 10
|
||||
}
|
||||
);
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = ParallelQueryExecutionContextBase;
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, assert = require("assert")
|
||||
, util = require("util");
|
||||
|
||||
//SCRIPT START
|
||||
var PartitionedQueryContants = {
|
||||
QueryInfoPath : 'queryInfo',
|
||||
TopPath: ['queryInfo', 'top'],
|
||||
OrderByPath: ['queryInfo', 'orderBy'],
|
||||
AggregatePath: ['queryInfo', 'aggregates'],
|
||||
QueryRangesPath : 'queryRanges',
|
||||
RewrittenQueryPath: ['queryInfo', 'rewrittenQuery']
|
||||
};
|
||||
|
||||
var PartitionedQueryExecutionContextInfoParser = Base.defineClass(
|
||||
undefined, undefined,
|
||||
{
|
||||
parseRewrittenQuery: function (partitionedQueryExecutionInfo) {
|
||||
return this._extract(partitionedQueryExecutionInfo, PartitionedQueryContants.RewrittenQueryPath);
|
||||
},
|
||||
parseQueryRanges: function (partitionedQueryExecutionInfo) {
|
||||
return this._extract(partitionedQueryExecutionInfo, PartitionedQueryContants.QueryRangesPath);
|
||||
},
|
||||
parseOrderBy: function (partitionedQueryExecutionInfo) {
|
||||
return this._extract(partitionedQueryExecutionInfo, PartitionedQueryContants.OrderByPath);
|
||||
},
|
||||
parseAggregates: function (partitionedQueryExecutionInfo) {
|
||||
return this._extract(partitionedQueryExecutionInfo, PartitionedQueryContants.AggregatePath);
|
||||
},
|
||||
parseTop: function (partitionedQueryExecutionInfo) {
|
||||
return this._extract(partitionedQueryExecutionInfo, PartitionedQueryContants.TopPath);
|
||||
},
|
||||
_extract: function (partitionedQueryExecutionInfo, path) {
|
||||
var item = partitionedQueryExecutionInfo;
|
||||
if (typeof (path) === 'string') {
|
||||
return item[path];
|
||||
}
|
||||
assert.ok(Array.isArray(path),
|
||||
util.format("%s is expected to be an array", JSON.stringify(path)));
|
||||
for (var index = 0; index < path.length; index++) {
|
||||
item = item[path[index]];
|
||||
if (item === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
return item;
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = PartitionedQueryExecutionContextInfoParser;
|
||||
}
|
|
@ -1,163 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, endpointComponent = require('./endpointComponent')
|
||||
, assert = require("assert")
|
||||
, PartitionedQueryExecutionContextInfoParser = require("./partitionedQueryExecutionContextInfoParser")
|
||||
, HeaderUtils = require("./headerUtils");
|
||||
|
||||
var ParallelQueryExecutionContext = require("./parallelQueryExecutionContext")
|
||||
, OrderByQueryExecutionContext = require("./orderByQueryExecutionContext");
|
||||
|
||||
var AggregateEndpointComponent = endpointComponent.AggregateEndpointComponent
|
||||
, OrderByEndpointComponent = endpointComponent.OrderByEndpointComponent
|
||||
, TopEndpointComponent = endpointComponent.TopEndpointComponent;
|
||||
|
||||
|
||||
//SCRIPT START
|
||||
var PipelinedQueryExecutionContext = Base.defineClass(
|
||||
/**
|
||||
* Provides the PipelinedQueryExecutionContext. It piplelines top and orderby execution context if necessary
|
||||
* @constructor PipelinedQueryExecutionContext
|
||||
* @param {object} documentclient - The documentclient object.
|
||||
* @param {SqlQuerySpec | string} query - A SQL query.
|
||||
* @param {FeedOptions} options - Represents the feed options.
|
||||
* @param {callback | callback[]} fetchFunctions - A function to retrieve each page of data. An array of functions may be used to query more than one partition.
|
||||
* @param {string} [resourceLink] - collectionLink for parallelized query execution.
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient, collectionLink, query, options, partitionedQueryExecutionInfo) {
|
||||
this.documentclient = documentclient;
|
||||
this.collectionLink = collectionLink;
|
||||
this.query = query;
|
||||
this.options = options;
|
||||
this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo;
|
||||
this.endpoint = null;
|
||||
this.pageSize = options["maxItemCount"];
|
||||
if (this.pageSize === undefined) {
|
||||
this.pageSize = PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE;
|
||||
}
|
||||
|
||||
// Pick between parallel vs order by execution context
|
||||
var sortOrders = PartitionedQueryExecutionContextInfoParser.parseOrderBy(partitionedQueryExecutionInfo);
|
||||
if (Array.isArray(sortOrders) && sortOrders.length > 0) {
|
||||
// Need to wrap orderby execution context in endpoint component, since the data is nested as a "payload" property.
|
||||
this.endpoint = new OrderByEndpointComponent(
|
||||
new OrderByQueryExecutionContext(
|
||||
this.documentclient,
|
||||
this.collectionLink,
|
||||
this.query,
|
||||
this.options,
|
||||
this.partitionedQueryExecutionInfo));
|
||||
} else {
|
||||
this.endpoint = new ParallelQueryExecutionContext(
|
||||
this.documentclient,
|
||||
this.collectionLink,
|
||||
this.query,
|
||||
this.options,
|
||||
this.partitionedQueryExecutionInfo);
|
||||
}
|
||||
|
||||
// If aggregate then add that to the pipeline
|
||||
var aggregates = PartitionedQueryExecutionContextInfoParser.parseAggregates(partitionedQueryExecutionInfo);
|
||||
if (Array.isArray(aggregates) && aggregates.length > 0) {
|
||||
this.endpoint = new AggregateEndpointComponent(this.endpoint, aggregates);
|
||||
}
|
||||
|
||||
// If top then add that to the pipeline
|
||||
var top = PartitionedQueryExecutionContextInfoParser.parseTop(partitionedQueryExecutionInfo);
|
||||
if (typeof (top) === 'number') {
|
||||
this.endpoint = new TopEndpointComponent(this.endpoint, top);
|
||||
}
|
||||
},
|
||||
{
|
||||
nextItem: function (callback) {
|
||||
return this.endpoint.nextItem(callback);
|
||||
},
|
||||
|
||||
current: function (callback) {
|
||||
return this.endpoint.current(callback);
|
||||
},
|
||||
|
||||
hasMoreResults: function (callback) {
|
||||
return this.endpoint.hasMoreResults(callback);
|
||||
},
|
||||
|
||||
fetchMore: function (callback) {
|
||||
// if the wrapped endpoint has different implementation for fetchMore use that
|
||||
// otherwise use the default implementation
|
||||
if (typeof this.endpoint.fetchMore === 'function') {
|
||||
return this.endpoint.fetchMore(callback);
|
||||
} else {
|
||||
this._fetchBuffer = [];
|
||||
this._fetchMoreRespHeaders = HeaderUtils.getInitialHeader();
|
||||
return this._fetchMoreImplementation(callback);
|
||||
}
|
||||
},
|
||||
|
||||
_fetchMoreImplementation: function (callback) {
|
||||
var that = this;
|
||||
this.endpoint.nextItem(function (err, item, headers) {
|
||||
HeaderUtils.mergeHeaders(that._fetchMoreRespHeaders, headers);
|
||||
if (err) {
|
||||
return callback(err, undefined, that._fetchMoreRespHeaders);
|
||||
}
|
||||
|
||||
if (item === undefined) {
|
||||
// no more results
|
||||
if (that._fetchBuffer.length === 0) {
|
||||
return callback(undefined, undefined, that._fetchMoreRespHeaders);
|
||||
} else {
|
||||
// Just give what we have
|
||||
var temp = that._fetchBuffer;
|
||||
that._fetchBuffer = [];
|
||||
return callback(undefined, temp, that._fetchMoreRespHeaders);
|
||||
}
|
||||
} else {
|
||||
// append the result
|
||||
that._fetchBuffer.push(item);
|
||||
if (that._fetchBuffer.length >= that.pageSize) {
|
||||
// fetched enough results
|
||||
var temp = that._fetchBuffer.slice(0, that.pageSize);
|
||||
that._fetchBuffer = that._fetchBuffer.splice(that.pageSize);
|
||||
return callback(undefined, temp, that._fetchMoreRespHeaders);
|
||||
} else {
|
||||
// recursively fetch more
|
||||
that._fetchMoreImplementation(callback);
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
DEFAULT_PAGE_SIZE: 10
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = PipelinedQueryExecutionContext;
|
||||
}
|
|
@ -1,166 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, DefaultQueryExecutionContext = require("./defaultQueryExecutionContext")
|
||||
, PipelinedQueryExecutionContext = require("./pipelinedQueryExecutionContext")
|
||||
, StatusCodes = require("../statusCodes").StatusCodes
|
||||
, SubStatusCodes = require("../statusCodes").SubStatusCodes
|
||||
, assert = require("assert")
|
||||
|
||||
//SCRIPT START
|
||||
var ProxyQueryExecutionContext = Base.defineClass(
|
||||
/**
|
||||
* Represents a ProxyQueryExecutionContext Object. If the query is a partitioned query which can be parallelized it switches the execution context.
|
||||
* @constructor ProxyQueryExecutionContext
|
||||
* @param {object} documentclient - The documentclient object.
|
||||
* @param {SqlQuerySpec | string} query - A SQL query.
|
||||
* @param {FeedOptions} options - Represents the feed options.
|
||||
* @param {callback | callback[]} fetchFunctions - A function to retrieve each page of data. An array of functions may be used to query more than one partition.
|
||||
* @param {string} [resourceLink] - collectionLink for parallelized query execution.
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient, query, options, fetchFunctions, resourceLink) {
|
||||
this.documentclient = documentclient;
|
||||
this.query = query;
|
||||
this.fetchFunctions = fetchFunctions;
|
||||
// clone options
|
||||
this.options = JSON.parse(JSON.stringify(options || {}));
|
||||
this.resourceLink = resourceLink;
|
||||
this.queryExecutionContext = new DefaultQueryExecutionContext(this.documentclient, this.query, this.options, this.fetchFunctions);
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Execute a provided function on the next element in the ProxyQueryExecutionContext.
|
||||
* @memberof ProxyQueryExecutionContext
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
var that = this;
|
||||
this.queryExecutionContext.nextItem(function (err, resources, headers) {
|
||||
if (err) {
|
||||
if (that._hasPartitionedExecutionInfo(err)) {
|
||||
// if that's a partitioned execution info switches the execution context
|
||||
var partitionedExecutionInfo = that._getParitionedExecutionInfo(err);
|
||||
that.queryExecutionContext = that._createPipelinedExecutionContext(partitionedExecutionInfo);
|
||||
return that.nextItem(callback);
|
||||
} else {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
} else {
|
||||
callback(undefined, resources, headers);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_createPipelinedExecutionContext: function (partitionedExecutionInfo) {
|
||||
assert.notStrictEqual(this.resourceLink, undefined, "for top/orderby resourceLink is required.");
|
||||
assert.ok(!Array.isArray(this.resourceLink) || this.resourceLink.length === 1,
|
||||
"for top/orderby exactly one collectionLink is required");
|
||||
|
||||
var collectionLink = undefined;
|
||||
if (Array.isArray(this.resourceLink)) {
|
||||
collectionLink = this.resourceLink[0];
|
||||
} else {
|
||||
collectionLink = this.resourceLink;
|
||||
}
|
||||
|
||||
return new PipelinedQueryExecutionContext(
|
||||
this.documentclient,
|
||||
collectionLink,
|
||||
this.query,
|
||||
this.options,
|
||||
partitionedExecutionInfo);
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the ProxyQueryExecutionContext.
|
||||
* @memberof ProxyQueryExecutionContext
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function (callback) {
|
||||
var that = this;
|
||||
this.queryExecutionContext.current(function (err, resources, headers) {
|
||||
if (err) {
|
||||
if (that._hasPartitionedExecutionInfo(err)) {
|
||||
// if that's a partitioned execution info switches the execution context
|
||||
var partitionedExecutionInfo = that._getParitionedExecutionInfo(err);
|
||||
that.queryExecutionContext = that._createPipelinedExecutionContext(partitionedExecutionInfo);
|
||||
return that.current(callback);
|
||||
} else {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
} else {
|
||||
callback(undefined, resources, headers);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if there are still remaining resources to process.
|
||||
* @memberof ProxyQueryExecutionContext
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the ProxyQueryExecutionContext.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return this.queryExecutionContext.hasMoreResults();
|
||||
},
|
||||
|
||||
fetchMore: function (callback) {
|
||||
var that = this;
|
||||
|
||||
this.queryExecutionContext.fetchMore(function (err, resources, headers) {
|
||||
if (err) {
|
||||
if (that._hasPartitionedExecutionInfo(err)) {
|
||||
// if that's a partitioned execution info switches the execution context
|
||||
var partitionedExecutionInfo = that._getParitionedExecutionInfo(err);
|
||||
that.queryExecutionContext = that._createPipelinedExecutionContext(partitionedExecutionInfo);
|
||||
return that.queryExecutionContext.fetchMore(callback);
|
||||
} else {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
} else {
|
||||
callback(undefined, resources, headers);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_hasPartitionedExecutionInfo: function (error) {
|
||||
return (error.code === StatusCodes.BadRequest) && ('substatus' in error) && (error['substatus'] === SubStatusCodes.CrossPartitionQueryNotServable);
|
||||
},
|
||||
|
||||
_getParitionedExecutionInfo: function (error) {
|
||||
|
||||
return JSON.parse(JSON.parse(error.body).additionalErrorInfo);
|
||||
},
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = ProxyQueryExecutionContext;
|
||||
}
|
|
@ -1,195 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base"),
|
||||
Constants = require("./constants"),
|
||||
ProxyQueryExecutionContext = require("./queryExecutionContext/proxyQueryExecutionContext");
|
||||
|
||||
//SCRIPT START
|
||||
var QueryIterator = Base.defineClass(
|
||||
/**
|
||||
* Represents a QueryIterator Object, an implmenetation of feed or query response that enables traversal and iterating over the response
|
||||
* in the Azure Cosmos DB database service.
|
||||
* @class QueryIterator
|
||||
* @param {object} documentclient - The documentclient object.
|
||||
* @param {SqlQuerySpec | string} query - A SQL query.
|
||||
* @param {FeedOptions} options - Represents the feed options.
|
||||
* @param {callback | callback[]} fetchFunctions - A function to retrieve each page of data. An array of functions may be used to query more than one partition.
|
||||
* @param {string} [resourceLink] - An optional parameter that represents the resourceLink (will be used in orderby/top/parallel query)
|
||||
*/
|
||||
function (documentclient, query, options, fetchFunctions, resourceLink) {
|
||||
|
||||
this.documentclient = documentclient;
|
||||
this.query = query;
|
||||
this.fetchFunctions = fetchFunctions;
|
||||
this.options = options;
|
||||
this.resourceLink = resourceLink;
|
||||
this.queryExecutionContext = this._createQueryExecutionContext();
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Execute a provided function once per feed element.
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
* Note: the last element the callback will be called on will be undefined.
|
||||
* If the callback explicitly returned false, the loop gets stopped.
|
||||
*/
|
||||
forEach: function(callback) {
|
||||
this.reset();
|
||||
this._forEachImplementation(callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Execute a provided function on the next element in the QueryIterator.
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
|
||||
*/
|
||||
nextItem: function (callback) {
|
||||
this.queryExecutionContext.nextItem(callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the current element on the QueryIterator.
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
|
||||
*/
|
||||
current: function(callback) {
|
||||
this.queryExecutionContext.current(callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* @deprecated Instead check if callback(undefined, undefined) is invoked by nextItem(callback) or current(callback)
|
||||
*
|
||||
* Determine if there are still remaining resources to processs based on the value of the continuation token or the elements remaining on the current batch in the QueryIterator.
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
* @returns {Boolean} true if there is other elements to process in the QueryIterator.
|
||||
*/
|
||||
hasMoreResults: function () {
|
||||
return this.queryExecutionContext.hasMoreResults();
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve all the elements of the feed and pass them as an array to a function
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
* @param {callback} callback - Function execute on the feed response, takes two parameters error, resourcesList
|
||||
*/
|
||||
toArray: function (callback) {
|
||||
this.reset();
|
||||
this.toArrayTempResources = [];
|
||||
this._toArrayImplementation(callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve the next batch of the feed and pass them as an array to a function
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
* @param {callback} callback - Function execute on the feed response, takes two parameters error, resourcesList
|
||||
*/
|
||||
executeNext: function(callback) {
|
||||
this.queryExecutionContext.fetchMore(function(err, resources, responseHeaders) {
|
||||
if (err) {
|
||||
return callback(err, undefined, responseHeaders);
|
||||
}
|
||||
|
||||
callback(undefined, resources, responseHeaders);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Reset the QueryIterator to the beginning and clear all the resources inside it
|
||||
* @memberof QueryIterator
|
||||
* @instance
|
||||
*/
|
||||
reset: function() {
|
||||
this.queryExecutionContext = this._createQueryExecutionContext();
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_toArrayImplementation: function(callback) {
|
||||
var that = this;
|
||||
|
||||
this.queryExecutionContext.nextItem(function (err, resource, headers) {
|
||||
|
||||
if (err) {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
// concatinate the results and fetch more
|
||||
that.toArrayLastResHeaders = headers;
|
||||
|
||||
if (resource === undefined) {
|
||||
|
||||
// no more results
|
||||
return callback(undefined, that.toArrayTempResources, that.toArrayLastResHeaders);
|
||||
}
|
||||
|
||||
that.toArrayTempResources.push(resource);
|
||||
|
||||
setImmediate(function () {
|
||||
that._toArrayImplementation(callback);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_forEachImplementation: function (callback) {
|
||||
var that = this;
|
||||
this.queryExecutionContext.nextItem(function (err, resource, headers) {
|
||||
if (err) {
|
||||
return callback(err, undefined, headers);
|
||||
}
|
||||
|
||||
if (resource === undefined) {
|
||||
// no more results. This is last iteration
|
||||
return callback(undefined, undefined, headers);
|
||||
}
|
||||
|
||||
if (callback(undefined, resource, headers) === false) {
|
||||
// callback instructed to stop further iteration
|
||||
return;
|
||||
}
|
||||
|
||||
// recursively call itself to iterate to the remaining elements
|
||||
setImmediate(function () {
|
||||
that._forEachImplementation(callback);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_createQueryExecutionContext: function () {
|
||||
return new ProxyQueryExecutionContext(this.documentclient, this.query, this.options, this.fetchFunctions, this.resourceLink);
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = QueryIterator;
|
||||
}
|
|
@ -1,271 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base");
|
||||
|
||||
//SCRIPT START
|
||||
var Range = Base.defineClass(
|
||||
/**
|
||||
* Represents a range object used by the RangePartitionResolver in the Azure Cosmos DB database service.
|
||||
* @class Range
|
||||
* @param {object} options - The Range constructor options.
|
||||
* @param {any} options.low - The low value in the range.
|
||||
* @param {any} options.high - The high value in the range.
|
||||
**/
|
||||
function(options) {
|
||||
if (options === undefined) {
|
||||
options = {};
|
||||
}
|
||||
if (options === null) {
|
||||
throw new Error("Invalid argument: 'options' is null");
|
||||
}
|
||||
if (typeof options !== "object") {
|
||||
throw new Error("Invalid argument: 'options' is not an object");
|
||||
}
|
||||
if (options.high === undefined) {
|
||||
options.high = options.low;
|
||||
}
|
||||
this.low = options.low;
|
||||
this.high = options.high;
|
||||
Object.freeze(this);
|
||||
},
|
||||
{
|
||||
/** @ignore */
|
||||
_compare: function (x, y, compareFunction) {
|
||||
// Same semantics as Array.sort
|
||||
// http://www.ecma-international.org/ecma-262/6.0/#sec-sortcompare
|
||||
if (x === undefined && y === undefined)
|
||||
return 0;
|
||||
if (x === undefined)
|
||||
return 1;
|
||||
if (y === undefined)
|
||||
return -1;
|
||||
if (compareFunction !== undefined) {
|
||||
var v = Number(compareFunction(x, y));
|
||||
if (v === NaN)
|
||||
return 0;
|
||||
return v;
|
||||
}
|
||||
var xString = String(x);
|
||||
var yString = String(y);
|
||||
if (xString < yString)
|
||||
return -1;
|
||||
if (xString > yString)
|
||||
return 1;
|
||||
return 0;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_contains: function (other, compareFunction) {
|
||||
if (Range._isRange(other)) {
|
||||
return this._containsRange(other, compareFunction);
|
||||
}
|
||||
else {
|
||||
return this._containsPoint(other, compareFunction);
|
||||
}
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_containsPoint: function (point, compareFunction) {
|
||||
if (this._compare(point, this.low, compareFunction) >= 0 && this._compare(point, this.high, compareFunction) <= 0) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_containsRange: function (other, compareFunction) {
|
||||
if (this._compare(other.low, this.low, compareFunction) >= 0 && this._compare(other.high, this.high, compareFunction) <= 0) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_intersect: function (other, compareFunction) {
|
||||
if (other === undefined || other === null) {
|
||||
throw new Error("Invalid Argument: 'other' is undefined or null");
|
||||
}
|
||||
var maxLow = this._compare(this.low, other.low, compareFunction) >= 0 ? this.low : other.low;
|
||||
var minHigh = this._compare(this.high, other.high, compareFunction) <= 0 ? this.high : other.high;
|
||||
if (this._compare(maxLow, minHigh, compareFunction) <= 0) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_toString: function () {
|
||||
return String(this.low) + "," + String(this.high);
|
||||
}
|
||||
},
|
||||
{
|
||||
/** @ignore */
|
||||
_isRange: function (pointOrRange) {
|
||||
if (pointOrRange === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (pointOrRange === null) {
|
||||
return false;
|
||||
}
|
||||
if (typeof pointOrRange !== "object") {
|
||||
return false;
|
||||
}
|
||||
return ("low" in pointOrRange && "high" in pointOrRange);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
var RangePartitionResolver = Base.defineClass(
|
||||
/**
|
||||
* RangePartitionResolver implements partitioning using a partition map of ranges of values to a collection link in the Azure Cosmos DB database service.
|
||||
* @class RangePartitionResolver
|
||||
* @param {string | function} partitionKeyExtractor - If partitionKeyExtractor is a string, it should be the name of the property in the document to execute the hashing on.
|
||||
* If partitionKeyExtractor is a function, it should be a function to extract the partition key from an object.
|
||||
* @param {Array} partitionKeyMap - The map from Range to collection link that is used for partitioning requests.
|
||||
* @param {function} compareFunction - Optional function that accepts two arguments x and y and returns a negative value if x < y, zero if x = y, or a positive value if x > y.
|
||||
**/
|
||||
function(partitionKeyExtractor, partitionKeyMap, compareFunction) {
|
||||
if (partitionKeyExtractor === undefined || partitionKeyExtractor === null) {
|
||||
throw new Error("partitionKeyExtractor cannot be null or undefined");
|
||||
}
|
||||
if (typeof partitionKeyExtractor !== "string" && typeof partitionKeyExtractor !== "function") {
|
||||
throw new Error("partitionKeyExtractor must be either a 'string' or a 'function'");
|
||||
}
|
||||
if (partitionKeyMap === undefined || partitionKeyMap === null) {
|
||||
throw new Error("partitionKeyMap cannot be null or undefined");
|
||||
}
|
||||
if (!(Array.isArray(partitionKeyMap))) {
|
||||
throw new Error("partitionKeyMap has to be an Array");
|
||||
}
|
||||
var allMapEntriesAreValid = partitionKeyMap.every(function (mapEntry) {
|
||||
if ((mapEntry === undefined) || mapEntry === null) {
|
||||
return false;
|
||||
}
|
||||
if (mapEntry.range === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (!(mapEntry.range instanceof Range)) {
|
||||
return false;
|
||||
}
|
||||
if (mapEntry.link === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (typeof mapEntry.link !== "string") {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (!allMapEntriesAreValid) {
|
||||
throw new Error("All partitionKeyMap entries have to be a tuple {range: Range, link: string }");
|
||||
}
|
||||
if (compareFunction !== undefined && typeof compareFunction !== "function") {
|
||||
throw new Error("Invalid argument: 'compareFunction' is not a function");
|
||||
}
|
||||
|
||||
this.partitionKeyExtractor = partitionKeyExtractor;
|
||||
this.partitionKeyMap = partitionKeyMap;
|
||||
this.compareFunction = compareFunction;
|
||||
}, {
|
||||
/**
|
||||
* Extracts the partition key from the specified document using the partitionKeyExtractor
|
||||
* @memberof RangePartitionResolver
|
||||
* @instance
|
||||
* @param {object} document - The document from which to extract the partition key.
|
||||
* @returns {}
|
||||
**/
|
||||
getPartitionKey: function (document) {
|
||||
if (typeof this.partitionKeyExtractor === "string") {
|
||||
return document[this.partitionKeyExtractor];
|
||||
}
|
||||
if (typeof this.partitionKeyExtractor === "function") {
|
||||
return this.partitionKeyExtractor(document);
|
||||
}
|
||||
throw new Error("Unable to extract partition key from document. Ensure PartitionKeyExtractor is a valid function or property name.");
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a partition key, returns the correct collection link for creating a document using the range partition map.
|
||||
* @memberof RangePartitionResolver
|
||||
* @instance
|
||||
* @param {any} partitionKey - The partition key used to determine the target collection for create
|
||||
* @returns {string} - The target collection link that will be used for document creation.
|
||||
**/
|
||||
resolveForCreate: function (partitionKey) {
|
||||
var range = new Range({ low: partitionKey });
|
||||
var mapEntry = this._getFirstContainingMapEntryOrNull(range);
|
||||
if (mapEntry !== undefined && mapEntry !== null) {
|
||||
return mapEntry.link;
|
||||
}
|
||||
throw new Error("Invalid operation: A containing range for '" + range._toString() + "' doesn't exist in the partition map.");
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a partition key, returns a list of collection links to read from using the range partition map.
|
||||
* @memberof RangePartitionResolver
|
||||
* @instance
|
||||
* @param {any} partitionKey - The partition key used to determine the target collection for query
|
||||
* @returns {string[]} - The list of target collection links.
|
||||
**/
|
||||
resolveForRead: function (partitionKey) {
|
||||
if (partitionKey === undefined || partitionKey === null) {
|
||||
return this.partitionKeyMap.map(function (i) { return i.link; });
|
||||
}
|
||||
else {
|
||||
return this._getIntersectingMapEntries(partitionKey).map(function (i) { return i.link; });
|
||||
}
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_getFirstContainingMapEntryOrNull: function (point) {
|
||||
var _this = this;
|
||||
var containingMapEntries = this.partitionKeyMap.filter(function (p) { return p.range !== undefined && p.range._contains(point, _this.compareFunction); });
|
||||
if (containingMapEntries && containingMapEntries.length > 0) {
|
||||
return containingMapEntries[0];
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
/** @ignore */
|
||||
_getIntersectingMapEntries: function (partitionKey) {
|
||||
var _this = this;
|
||||
var partitionKeys = (partitionKey instanceof Array) ? partitionKey : [partitionKey];
|
||||
var ranges = partitionKeys.map(function (p) { return Range._isRange(p) ? p : new Range({ low: p }); });
|
||||
var result = new Array();
|
||||
ranges.forEach(function (range) {
|
||||
result = result.concat(_this.partitionKeyMap.filter(function (entry) {
|
||||
return entry.range._intersect(range, _this.compareFunction);
|
||||
}));
|
||||
});
|
||||
return result;
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.Range = Range;
|
||||
exports.RangePartitionResolver = RangePartitionResolver;
|
||||
}
|
|
@ -1,209 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Documents = require("./documents")
|
||||
, Constants = require("./constants")
|
||||
, https = require("https")
|
||||
, url = require("url")
|
||||
, querystring = require("querystring")
|
||||
, RetryUtility = require("./retryUtility");
|
||||
|
||||
//----------------------------------------------------------------------------
|
||||
// Utility methods
|
||||
//
|
||||
|
||||
function javaScriptFriendlyJSONStringify(s) {
|
||||
// two line terminators (Line separator and Paragraph separator) are not needed to be escaped in JSON
|
||||
// but are needed to be escaped in JavaScript.
|
||||
return JSON.stringify(s).
|
||||
replace(/\u2028/g, '\\u2028').
|
||||
replace(/\u2029/g, '\\u2029');
|
||||
}
|
||||
|
||||
function bodyFromData(data) {
|
||||
if (data.pipe) return data;
|
||||
if (Buffer.isBuffer(data)) return data;
|
||||
if (typeof data === "string") return data;
|
||||
if (typeof data === "object") return javaScriptFriendlyJSONStringify(data);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function parse(urlString) { return url.parse(urlString); }
|
||||
|
||||
function createRequestObject(connectionPolicy, requestOptions, callback) {
|
||||
function onTimeout() {
|
||||
httpsRequest.abort();
|
||||
}
|
||||
|
||||
var isMedia = (requestOptions.path.indexOf("//media") === 0);
|
||||
|
||||
var httpsRequest = https.request(requestOptions, function (response) {
|
||||
// In case of media response, return the stream to the user and the user will need to handle reading the stream.
|
||||
if (isMedia && connectionPolicy.MediaReadMode === Documents.MediaReadMode.Streamed) {
|
||||
return callback(undefined, response, response.headers);
|
||||
}
|
||||
|
||||
var data = "";
|
||||
|
||||
//if the requested data is text (not attachment/media) set the encoding to UTF-8
|
||||
if (!isMedia) {
|
||||
response.setEncoding("utf8");
|
||||
}
|
||||
|
||||
response.on("data", function (chunk) {
|
||||
data += chunk;
|
||||
});
|
||||
response.on("end", function () {
|
||||
if (response.statusCode >= 400) {
|
||||
return callback(getErrorBody(response, data), undefined, response.headers);
|
||||
}
|
||||
|
||||
var result;
|
||||
try {
|
||||
if (isMedia) {
|
||||
result = data;
|
||||
} else {
|
||||
result = data.length > 0 ? JSON.parse(data) : undefined;
|
||||
}
|
||||
} catch (exception) {
|
||||
return callback(exception);
|
||||
}
|
||||
|
||||
callback(undefined, result, response.headers);
|
||||
});
|
||||
});
|
||||
|
||||
httpsRequest.once("socket", function (socket) {
|
||||
if (isMedia) {
|
||||
socket.setTimeout(connectionPolicy.MediaRequestTimeout);
|
||||
} else {
|
||||
socket.setTimeout(connectionPolicy.RequestTimeout);
|
||||
}
|
||||
|
||||
socket.once("timeout", onTimeout);
|
||||
|
||||
httpsRequest.once("response", function () {
|
||||
socket.removeListener("timeout", onTimeout);
|
||||
});
|
||||
});
|
||||
|
||||
httpsRequest.once("error", callback);
|
||||
return httpsRequest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs the error body from the response and the data returned from the request.
|
||||
* @param {object} response - response object returned from the executon of a request.
|
||||
* @param {object} data - the data body returned from the executon of a request.
|
||||
*/
|
||||
function getErrorBody(response, data) {
|
||||
var errorBody = { code: response.statusCode, body: data };
|
||||
|
||||
if (Constants.HttpHeaders.ActivityId in response.headers) {
|
||||
errorBody.activityId = response.headers[Constants.HttpHeaders.ActivityId];
|
||||
}
|
||||
|
||||
if (Constants.HttpHeaders.SubStatus in response.headers) {
|
||||
errorBody.substatus = parseInt(response.headers[Constants.HttpHeaders.SubStatus]);
|
||||
}
|
||||
|
||||
if (Constants.HttpHeaders.RetryAfterInMilliseconds in response.headers) {
|
||||
errorBody.retryAfterInMilliseconds = parseInt(response.headers[Constants.HttpHeaders.RetryAfterInMilliseconds]);
|
||||
}
|
||||
|
||||
return errorBody;
|
||||
}
|
||||
|
||||
var RequestHandler = {
|
||||
_createRequestObjectStub: function (connectionPolicy, requestOptions, callback) {
|
||||
return createRequestObject(connectionPolicy, requestOptions, callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates the request object, call the passed callback when the response is retrieved.
|
||||
* @param {object} globalEndpointManager - an instance of GlobalEndpointManager class.
|
||||
* @param {object} connectionPolicy - an instance of ConnectionPolicy that has the connection configs.
|
||||
* @param {object} requestAgent - the https agent used for send request
|
||||
* @param {string} method - the http request method ( 'get', 'post', 'put', .. etc ).
|
||||
* @param {String} url - The base url for the endpoint.
|
||||
* @param {string} path - the path of the requesed resource.
|
||||
* @param {Object} data - the request body. It can be either string, buffer, stream or undefined.
|
||||
* @param {Object} queryParams - query parameters for the request.
|
||||
* @param {Object} headers - specific headers for the request.
|
||||
* @param {function} callback - the callback that will be called when the response is retrieved and processed.
|
||||
*/
|
||||
request: function (globalEndpointManager, connectionPolicy, requestAgent, method, url, request, data, queryParams, headers, callback) {
|
||||
var path = request.path == undefined ? request : request.path;
|
||||
var body;
|
||||
|
||||
if (data) {
|
||||
body = bodyFromData(data);
|
||||
if (!body) return callback({ message: "parameter data must be a javascript object, string, Buffer, or stream" });
|
||||
}
|
||||
|
||||
var buffer;
|
||||
var stream;
|
||||
if (body) {
|
||||
if (Buffer.isBuffer(body)) {
|
||||
buffer = body;
|
||||
} else if (body.pipe) {
|
||||
// it is a stream
|
||||
stream = body;
|
||||
} else if (typeof body === "string") {
|
||||
buffer = new Buffer(body, "utf8");
|
||||
} else {
|
||||
return callback({ message: "body must be string, Buffer, or stream" });
|
||||
}
|
||||
}
|
||||
|
||||
var requestOptions = parse(url);
|
||||
requestOptions.method = method;
|
||||
requestOptions.path = path;
|
||||
requestOptions.headers = headers;
|
||||
requestOptions.agent = requestAgent;
|
||||
requestOptions.secureProtocol = "TLSv1_client_method";
|
||||
|
||||
if (connectionPolicy.DisableSSLVerification === true) {
|
||||
requestOptions.rejectUnauthorized = false;
|
||||
}
|
||||
|
||||
if (queryParams) {
|
||||
requestOptions.path += "?" + querystring.stringify(queryParams);
|
||||
}
|
||||
|
||||
if (buffer) {
|
||||
requestOptions.headers[Constants.HttpHeaders.ContentLength] = buffer.length;
|
||||
RetryUtility.execute(globalEndpointManager, { buffer: buffer, stream: null }, this._createRequestObjectStub, connectionPolicy, requestOptions, request, callback);
|
||||
} else if (stream) {
|
||||
RetryUtility.execute(globalEndpointManager, { buffer: null, stream: stream }, this._createRequestObjectStub, connectionPolicy, requestOptions, request, callback);
|
||||
} else {
|
||||
RetryUtility.execute(globalEndpointManager, { buffer: null, stream: null }, this._createRequestObjectStub, connectionPolicy, requestOptions, request, callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = RequestHandler;
|
||||
}
|
|
@ -1,423 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base")
|
||||
, BigInt = require("big-integer")
|
||||
, Int64BE = require("int64-buffer").Int64BE;
|
||||
|
||||
//SCRIPT START
|
||||
var ResourceId = Base.defineClass(
|
||||
|
||||
function () {
|
||||
this.offer = '0';
|
||||
this.database = '0';
|
||||
this.documentCollection = '0';
|
||||
this.storedProcedure = '0';
|
||||
this.trigger = '0';
|
||||
this.userDefinedFunction = '0';
|
||||
this.document = '0';
|
||||
this.partitionKeyRange = '0';
|
||||
this.user = '0';
|
||||
this.conflict = '0';
|
||||
this.permission = '0';
|
||||
this.attachment = '0';
|
||||
this.length = 20,
|
||||
this.offer_id_length = 3,
|
||||
this.DocumentByte = 0,
|
||||
this.StoredProcedureByte = 8,
|
||||
this.TriggerByte = 7,
|
||||
this.UserDefinedFunctionByte = 6,
|
||||
this.ConflictByte = 4,
|
||||
this.PartitionKeyRangeByte = 5
|
||||
|
||||
},
|
||||
{
|
||||
parse: function (id) {
|
||||
var pair = this.tryParse(id);
|
||||
|
||||
if (!pair[0]) {
|
||||
throw (new Error("invalid resource id " + id));
|
||||
}
|
||||
return pair[1];
|
||||
},
|
||||
|
||||
newDatabaseId: function (dbId) {
|
||||
var resourceId = new ResourceId();
|
||||
resourceId.database = dbId;
|
||||
return ResourceId;
|
||||
},
|
||||
|
||||
newDocumentCollectionId: function (databaseId, collectionId) {
|
||||
var dbId = this.parse(databaseId);
|
||||
|
||||
var collectionResourceId = new ResourceId();
|
||||
collectionResourceId.database = dbId.database;
|
||||
collectionResourceId.documentCollection = collectionId;
|
||||
|
||||
return collectionResourceId;
|
||||
},
|
||||
|
||||
newUserId: function (databaseId, userId) {
|
||||
var dbId = this.parse(databaseId);
|
||||
|
||||
var userResourceId = new ResourceId();
|
||||
userResourceId.database = dbId.database;
|
||||
userResourceId.user = userId;
|
||||
|
||||
return userResourceId;
|
||||
},
|
||||
|
||||
newPermissionId: function (userId, permissionId) {
|
||||
var usrId = this.parse(userId);
|
||||
|
||||
var permissionResourceId = new ResourceId();
|
||||
permissionResourceId.database = usrId.database;
|
||||
permissionResourceId.user = usrId.user;
|
||||
permissionResourceId.permission = permissionId;
|
||||
return permissionResourceId;
|
||||
},
|
||||
|
||||
newAttachmentId: function (documentId, attachmentId) {
|
||||
var docId = this.parse(documentId);
|
||||
|
||||
var attachmentResourceId = new ResourceId();
|
||||
attachmentResourceId.database = docId.database;
|
||||
attachmentResourceId.documentCollection = docId.documentCollection;
|
||||
attachmentResourceId.document = docId.document;
|
||||
attachmentResourceId.attachment = attachmentid;
|
||||
|
||||
return attachmentResourceId;
|
||||
},
|
||||
|
||||
tryParse: function (id) {
|
||||
var rid = undefined;
|
||||
if (!id)
|
||||
return [false, undefined];
|
||||
|
||||
var pair = this.verify(id);
|
||||
|
||||
if (!pair[0])
|
||||
return [false, undefined];
|
||||
|
||||
var buffer = pair[1];
|
||||
|
||||
var intArray = new Int8Array(buffer);
|
||||
|
||||
if (buffer.length % 4 != 0 && buffer.length != this.offer_id_length)
|
||||
return [false, undefined];
|
||||
|
||||
var rid = new ResourceId();
|
||||
|
||||
//if length < 4 bytes, the resource is an offer
|
||||
if (buffer.length == this.offer_id_length) {
|
||||
rid.offer = 0;
|
||||
|
||||
for (var index = 0; index < this.offer_id_length; index++) {
|
||||
rid.offer = rid.offer | (intArray[index] << (index * 8));
|
||||
}
|
||||
|
||||
rid.offer = rid.offer.toString();
|
||||
return [true, rid];
|
||||
}
|
||||
|
||||
//first 4 bytes represent the database
|
||||
if (buffer.length >= 4)
|
||||
rid.database = buffer.readIntBE(0, 4).toString();
|
||||
|
||||
if (buffer.length >= 8) {
|
||||
var isCollection = (intArray[4] & (128)) > 0;
|
||||
|
||||
if (isCollection) {
|
||||
//5th - 8th bytes represents the collection
|
||||
|
||||
rid.documentCollection = buffer.readIntBE(4, 4).toString();
|
||||
var newBuff = new Buffer(4);
|
||||
|
||||
if (buffer.length >= 16) {
|
||||
|
||||
//9th - 15th bytes represent one of document, trigger, sproc, udf, conflict, pkrange
|
||||
var subCollectionResource = this.bigNumberReadIntBE(buffer, 8, 8).toString();
|
||||
|
||||
if ((intArray[15] >> 4) == this.DocumentByte) {
|
||||
rid.document = subCollectionResource;
|
||||
|
||||
//16th - 20th bytes represent the attachment
|
||||
if (buffer.length == 20)
|
||||
rid.attachment = buffer.readIntBE(16, 4).toString();
|
||||
} else if (Math.abs(intArray[15] >> 4) == this.StoredProcedureByte)
|
||||
rid.storedProcedure = subCollectionResource;
|
||||
else if ((intArray[15] >> 4) == this.TriggerByte)
|
||||
rid.trigger = subCollectionResource;
|
||||
else if ((intArray[15] >> 4) == this.UserDefinedFunctionByte)
|
||||
rid.userDefinedFunction = subCollectionResource;
|
||||
else if ((intArray[15] >> 4) == this.ConflictByte)
|
||||
rid.conflict = subCollectionResource;
|
||||
else if ((intArray[15] >> 4) == this.PartitionKeyRangeByte)
|
||||
rid.partitionKeyRange = subCollectionResource;
|
||||
else
|
||||
return [false, rid];
|
||||
|
||||
} else if (buffer.length != 8) {
|
||||
return [false, rid];
|
||||
}
|
||||
} else {
|
||||
//5th - 8th bytes represents the user
|
||||
|
||||
rid.user = buffer.readIntBE(4, 4).toString();
|
||||
|
||||
//9th - 15th bytes represent the permission
|
||||
if (buffer.length == 16)
|
||||
rid.permission = this.bigNumberReadIntBE(buffer, 8, 8).toString();
|
||||
else if (buffer.length != 8)
|
||||
return [false, rid];
|
||||
}
|
||||
}
|
||||
|
||||
return [true, rid];
|
||||
},
|
||||
|
||||
verify: function (id) {
|
||||
if (!id) {
|
||||
throw (new Error("invalid resource id " + id));
|
||||
}
|
||||
|
||||
var buffer = this.fromBase64String(id);
|
||||
if (!buffer || buffer.length > this.length) {
|
||||
buffer = undefined;
|
||||
return [false, buffer];
|
||||
}
|
||||
|
||||
return [true, buffer];
|
||||
},
|
||||
|
||||
verifyBool: function (id) {
|
||||
return this.verify(id)[0];
|
||||
},
|
||||
|
||||
fromBase64String: function (s) {
|
||||
return Buffer(s.replace('-', '/'), 'base64');
|
||||
},
|
||||
|
||||
toBase64String: function (buffer) {
|
||||
return buffer.toString('base64');
|
||||
},
|
||||
|
||||
isDatabaseId: function () {
|
||||
return this.database != 0 && (this.documentCollection == 0 && this.user == 0)
|
||||
},
|
||||
|
||||
getDatabaseId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getDocumentCollectionId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getUniqueDocumentCollectionId: function () {
|
||||
var db = new BigInt(this.database);
|
||||
var coll = new BigInt(this.documentCollection);
|
||||
return db.shiftLeft(32).or(coll).toString();
|
||||
},
|
||||
|
||||
getStoredProcedureId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.storedProcedure = this.storedProcedure;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getTriggerId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.trigger = this.trigger;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getUserDefinedFunctionId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.userDefinedFunction = this.userDefinedFunction;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getConflictId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.conflict = this.conflict;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getDocumentId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.document = this.document;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getPartitonKeyRangeId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.partitionKeyRange = this.partitionKeyRange;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getUserId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.user = this.user;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getPermissionId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.user = this.user;
|
||||
rid.permission = this.permission;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getAttachmentId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.database = this.database;
|
||||
rid.documentCollection = this.documentCollection;
|
||||
rid.document = this.document;
|
||||
rid.attachment = this.attachment;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getOfferId: function () {
|
||||
var rid = new ResourceId();
|
||||
rid.offer = this.offer;
|
||||
return rid;
|
||||
},
|
||||
|
||||
getValue: function () {
|
||||
var len = 0;
|
||||
if (this.offer != '0')
|
||||
len = len + this.offer_id_length;
|
||||
else if (this.database != '0')
|
||||
len = len + 4;
|
||||
if (this.documentCollection != '0' || this.user != '0')
|
||||
len = len + 4;
|
||||
if (this.document != '0' || this.permission != '0'
|
||||
|| this.storedProcedure != '0' || this.trigger != '0'
|
||||
|| this.userDefinedFunction != 0 || this.conflict != '0'
|
||||
|| this.partitionKeyRange != '0')
|
||||
len = len + 8;
|
||||
if (this.attachment != '0')
|
||||
len = len + 4;
|
||||
|
||||
var buffer = new Buffer(len);
|
||||
buffer.fill(0);
|
||||
|
||||
if (this.offer != '0')
|
||||
buffer.writeIntLE(Number(this.offer), 0, this.offer_id_length);
|
||||
else if (this.database != '0')
|
||||
buffer.writeIntBE(Number(this.database), 0, 4);
|
||||
|
||||
if (this.documentCollection != '0')
|
||||
buffer.writeIntBE(Number(this.documentCollection), 4, 4);
|
||||
else if (this.user != '0')
|
||||
buffer.writeIntBE(Number(this.user), 4, 4);
|
||||
|
||||
if (this.storedProcedure != '0') {
|
||||
var big = new Int64BE(this.storedProcedure);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
else if (this.trigger != '0') {
|
||||
var big = new Int64BE(this.trigger);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
else if (this.userDefinedFunction != '0') {
|
||||
var big = new Int64BE(this.userDefinedFunction);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
else if (this.conflict != '0') {
|
||||
var big = new Int64BE(this.conflict);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
else if (this.document != '0') {
|
||||
var big = new Int64BE(this.document);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
else if (this.permission != '0') {
|
||||
var big = new Int64BE(this.permission);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
else if (this.partitionKeyRange != '0') {
|
||||
var big = new Int64BE(this.partitionKeyRange);
|
||||
big.toBuffer().copy(buffer, 8, 0, 8);
|
||||
}
|
||||
|
||||
if (this.attachment != '0')
|
||||
buffer.writeIntBE(Number(this.attachment), 16, 4);
|
||||
|
||||
return buffer;
|
||||
|
||||
},
|
||||
|
||||
toString: function () {
|
||||
return this.toBase64String(this.getValue());
|
||||
},
|
||||
|
||||
bigNumberReadIntBE: function (buffer, offset, byteLength) {
|
||||
offset = offset >>> 0
|
||||
byteLength = byteLength >>> 0
|
||||
|
||||
var i = byteLength
|
||||
var mul = new BigInt("1");
|
||||
var val = new BigInt(buffer[offset + --i]);
|
||||
while (i > 0 && (mul = mul.times(0x100))) {
|
||||
var temp = new BigInt(buffer[offset + --i]);
|
||||
val = val.plus(temp.times(mul));
|
||||
}
|
||||
mul = mul.times(0x80);
|
||||
|
||||
if (val.greater(mul)) {
|
||||
var subtrahend = new BigInt(2);
|
||||
val = val.minus(subtrahend.pow(8 * byteLength));
|
||||
}
|
||||
|
||||
return val
|
||||
}
|
||||
}, null
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = ResourceId;
|
||||
}
|
|
@ -1,86 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base");
|
||||
|
||||
//SCRIPT START
|
||||
/**
|
||||
* This class implements the resource throttle retry policy for requests.
|
||||
* @property {int} _maxRetryAttemptCount - Max number of retries to be performed for a request.
|
||||
* @property {int} _fixedRetryIntervalInMilliseconds - Fixed retry interval in milliseconds to wait between each retry ignoring the retryAfter returned as part of the response.
|
||||
* @property {int} _maxWaitTimeInMilliseconds - Max wait time in milliseconds to wait for a request while the retries are happening.
|
||||
* @property {int} currentRetryAttemptCount - Current retry attempt count.
|
||||
* @property {int} cummulativeWaitTimeinMilliseconds - Cummulative wait time in milliseconds for a request while the retries are happening.
|
||||
*/
|
||||
var ResourceThrottleRetryPolicy = Base.defineClass(
|
||||
/**
|
||||
* @constructor ResourceThrottleRetryPolicy
|
||||
* @param {int} maxRetryAttemptCount - Max number of retries to be performed for a request.
|
||||
* @param {int} fixedRetryIntervalInMilliseconds - Fixed retry interval in milliseconds to wait between each retry ignoring the retryAfter returned as part of the response.
|
||||
* @param {int} maxWaitTimeInSeconds - Max wait time in seconds to wait for a request while the retries are happening.
|
||||
*/
|
||||
function (maxRetryAttemptCount, fixedRetryIntervalInMilliseconds, maxWaitTimeInSeconds) {
|
||||
this._maxRetryAttemptCount = maxRetryAttemptCount;
|
||||
this._fixedRetryIntervalInMilliseconds = fixedRetryIntervalInMilliseconds;
|
||||
this._maxWaitTimeInMilliseconds = maxWaitTimeInSeconds * 1000;
|
||||
this.currentRetryAttemptCount = 0;
|
||||
this.cummulativeWaitTimeinMilliseconds = 0;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Determines whether the request should be retried or not.
|
||||
* @param {object} err - Error returned by the request.
|
||||
* @param {function} callback - The callback function which takes bool argument which specifies whether the request will be retried or not.
|
||||
*/
|
||||
shouldRetry: function (err, callback) {
|
||||
if (err) {
|
||||
if (this.currentRetryAttemptCount < this._maxRetryAttemptCount) {
|
||||
this.currentRetryAttemptCount++;
|
||||
this.retryAfterInMilliseconds = 0;
|
||||
|
||||
if (this._fixedRetryIntervalInMilliseconds) {
|
||||
this.retryAfterInMilliseconds = this._fixedRetryIntervalInMilliseconds;
|
||||
} else if (err.retryAfterInMilliseconds) {
|
||||
this.retryAfterInMilliseconds = err.retryAfterInMilliseconds;
|
||||
}
|
||||
|
||||
if (this.cummulativeWaitTimeinMilliseconds < this._maxWaitTimeInMilliseconds) {
|
||||
this.cummulativeWaitTimeinMilliseconds += this.retryAfterInMilliseconds;
|
||||
return callback(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
return callback(false);
|
||||
}
|
||||
},
|
||||
{
|
||||
THROTTLE_STATUS_CODE: 429
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = ResourceThrottleRetryPolicy;
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base");
|
||||
|
||||
//SCRIPT START
|
||||
/**
|
||||
* Represents the Retry policy assocated with throttled requests in the Azure Cosmos DB database service.
|
||||
* @property {int} [MaxRetryAttemptCount] - Max number of retries to be performed for a request. Default value 9.
|
||||
* @property {int} [FixedRetryIntervalInMilliseconds] - Fixed retry interval in milliseconds to wait between each retry ignoring the retryAfter returned as part of the response.
|
||||
* @property {int} [MaxWaitTimeInSeconds] - Max wait time in seconds to wait for a request while the retries are happening. Default value 30 seconds.
|
||||
*/
|
||||
var RetryOptions = Base.defineClass(
|
||||
function RetryOptions(maxRetryAttemptCount, fixedRetryIntervalInMilliseconds, maxWaitTimeInSeconds) {
|
||||
this._maxRetryAttemptCount = maxRetryAttemptCount || 9;
|
||||
this._fixedRetryIntervalInMilliseconds = fixedRetryIntervalInMilliseconds;
|
||||
this._maxWaitTimeInSeconds = maxWaitTimeInSeconds || 30;
|
||||
|
||||
Object.defineProperty(this, "MaxRetryAttemptCount", {
|
||||
get: function () {
|
||||
return this._maxRetryAttemptCount;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "FixedRetryIntervalInMilliseconds", {
|
||||
get: function () {
|
||||
return this._fixedRetryIntervalInMilliseconds;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(this, "MaxWaitTimeInSeconds", {
|
||||
get: function () {
|
||||
return this._maxWaitTimeInSeconds;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
})
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = RetryOptions;
|
||||
}
|
|
@ -1,126 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base"),
|
||||
Constants = require("./constants"),
|
||||
EndpointDiscoveryRetryPolicy = require("./endpointDiscoveryRetryPolicy"),
|
||||
ResourceThrottleRetryPolicy = require("./resourceThrottleRetryPolicy"),
|
||||
SessionReadRetryPolicy = require("./sessionReadRetryPolicy");
|
||||
|
||||
//SCRIPT START
|
||||
var RetryUtility = {
|
||||
/**
|
||||
* Executes the retry policy for the created request object.
|
||||
* @param {object} globalEndpointManager - an instance of GlobalEndpointManager class.
|
||||
* @param {object} body - a dictionary containing 'buffer' and 'stream' keys to hold corresponding buffer or stream body, null otherwise.
|
||||
* @param {function} createRequestObjectStub - stub function that creates the request object.
|
||||
* @param {object} connectionPolicy - an instance of ConnectionPolicy that has the connection configs.
|
||||
* @param {RequestOptions} requestOptions - The request options.
|
||||
* @param {function} callback - the callback that will be called when the request is finished executing.
|
||||
*/
|
||||
execute: function (globalEndpointManager, body, createRequestObjectFunc, connectionPolicy, requestOptions, request, callback) {
|
||||
var request = typeof request !== 'string' ? request : { "path": "", "operationType": "nonReadOps", "client": null };
|
||||
|
||||
var endpointDiscoveryRetryPolicy = new EndpointDiscoveryRetryPolicy(globalEndpointManager);
|
||||
var resourceThrottleRetryPolicy = new ResourceThrottleRetryPolicy(connectionPolicy.RetryOptions.MaxRetryAttemptCount,
|
||||
connectionPolicy.RetryOptions.FixedRetryIntervalInMilliseconds,
|
||||
connectionPolicy.RetryOptions.MaxWaitTimeInSeconds);
|
||||
var sessionReadRetryPolicy = new SessionReadRetryPolicy(globalEndpointManager, request)
|
||||
|
||||
this.apply(body, createRequestObjectFunc, connectionPolicy, requestOptions, endpointDiscoveryRetryPolicy, resourceThrottleRetryPolicy, sessionReadRetryPolicy, callback);
|
||||
},
|
||||
|
||||
/**
|
||||
* Applies the retry policy for the created request object.
|
||||
* @param {object} body - a dictionary containing 'buffer' and 'stream' keys to hold corresponding buffer or stream body, null otherwise.
|
||||
* @param {function} createRequestObjectFunc - function that creates the request object.
|
||||
* @param {object} connectionPolicy - an instance of ConnectionPolicy that has the connection configs.
|
||||
* @param {RequestOptions} requestOptions - The request options.
|
||||
* @param {EndpointDiscoveryRetryPolicy} endpointDiscoveryRetryPolicy - The endpoint discovery retry policy instance.
|
||||
* @param {ResourceThrottleRetryPolicy} resourceThrottleRetryPolicy - The resource throttle retry policy instance.
|
||||
* @param {function} callback - the callback that will be called when the response is retrieved and processed.
|
||||
*/
|
||||
apply: function (body, createRequestObjectFunc, connectionPolicy, requestOptions, endpointDiscoveryRetryPolicy, resourceThrottleRetryPolicy, sessionReadRetryPolicy, callback) {
|
||||
var that = this;
|
||||
var httpsRequest = createRequestObjectFunc(connectionPolicy, requestOptions, function (err, response, headers) {
|
||||
if (err) {
|
||||
var retryPolicy = null;
|
||||
headers = headers || {};
|
||||
if (err.code === EndpointDiscoveryRetryPolicy.FORBIDDEN_STATUS_CODE && err.substatus === EndpointDiscoveryRetryPolicy.WRITE_FORBIDDEN_SUB_STATUS_CODE) {
|
||||
retryPolicy = endpointDiscoveryRetryPolicy;
|
||||
} else if (err.code === ResourceThrottleRetryPolicy.THROTTLE_STATUS_CODE) {
|
||||
retryPolicy = resourceThrottleRetryPolicy;
|
||||
} else if (err.code === SessionReadRetryPolicy.NOT_FOUND_STATUS_CODE && err.substatus === SessionReadRetryPolicy.READ_SESSION_NOT_AVAILABLE_SUB_STATUS_CODE) {
|
||||
retryPolicy = sessionReadRetryPolicy;
|
||||
}
|
||||
if (retryPolicy) {
|
||||
retryPolicy.shouldRetry(err, function (shouldRetry, newUrl) {
|
||||
if (!shouldRetry) {
|
||||
headers[Constants.ThrottleRetryCount] = resourceThrottleRetryPolicy.currentRetryAttemptCount;
|
||||
headers[Constants.ThrottleRetryWaitTimeInMs] = resourceThrottleRetryPolicy.cummulativeWaitTimeinMilliseconds;
|
||||
return callback(err, response, headers);
|
||||
} else {
|
||||
setTimeout(function () {
|
||||
if (typeof newUrl !== 'undefined')
|
||||
requestOptions = that.modifyRequestOptions(requestOptions, newUrl);
|
||||
that.apply(body, createRequestObjectFunc, connectionPolicy, requestOptions, endpointDiscoveryRetryPolicy, resourceThrottleRetryPolicy, sessionReadRetryPolicy, callback);
|
||||
}, retryPolicy.retryAfterInMilliseconds);
|
||||
return;
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
headers[Constants.ThrottleRetryCount] = resourceThrottleRetryPolicy.currentRetryAttemptCount;
|
||||
headers[Constants.ThrottleRetryWaitTimeInMs] = resourceThrottleRetryPolicy.cummulativeWaitTimeinMilliseconds;
|
||||
return callback(err, response, headers);
|
||||
});
|
||||
|
||||
if (httpsRequest) {
|
||||
if (body["stream"] !== null) {
|
||||
body["stream"].pipe(httpsRequest);
|
||||
} else if (body["buffer"] !== null) {
|
||||
httpsRequest.write(body["buffer"]);
|
||||
httpsRequest.end();
|
||||
} else {
|
||||
httpsRequest.end();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
modifyRequestOptions: function (oldRequestOptions, newUrl) {
|
||||
var properties = Object.keys(newUrl);
|
||||
for (var index in properties) {
|
||||
if (properties[index] !== "path")
|
||||
oldRequestOptions[properties[index]] = newUrl[properties[index]];
|
||||
}
|
||||
return oldRequestOptions;
|
||||
}
|
||||
}
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = RetryUtility;
|
||||
}
|
|
@ -1,277 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, _ = require("underscore")
|
||||
, bs = require("binary-search-bounds")
|
||||
, assert = require("assert");
|
||||
|
||||
//SCRIPT START
|
||||
var _PartitionKeyRange = {
|
||||
//Partition Key Range Constants
|
||||
MinInclusive : "minInclusive",
|
||||
MaxExclusive : "maxExclusive",
|
||||
Id : "id"
|
||||
};
|
||||
|
||||
var _QueryRangeConstants = {
|
||||
//Partition Key Range Constants
|
||||
MinInclusive: "minInclusive",
|
||||
MaxExclusive: "maxExclusive",
|
||||
min: "min"
|
||||
};
|
||||
|
||||
var _Constants = {
|
||||
MinimumInclusiveEffectivePartitionKey: "",
|
||||
MaximumExclusiveEffectivePartitionKey: "FF",
|
||||
};
|
||||
|
||||
var QueryRange = Base.defineClass(
|
||||
/**
|
||||
* Represents a QueryRange.
|
||||
* @constructor QueryRange
|
||||
* @param {string} rangeMin - min
|
||||
* @param {string} rangeMin - max
|
||||
* @param {boolean} isMinInclusive - isMinInclusive
|
||||
* @param {boolean} isMaxInclusive - isMaxInclusive
|
||||
* @ignore
|
||||
*/
|
||||
function (rangeMin, rangeMax, isMinInclusive, isMaxInclusive) {
|
||||
this.min = rangeMin;
|
||||
this.max = rangeMax;
|
||||
this.isMinInclusive = isMinInclusive;
|
||||
this.isMaxInclusive = isMaxInclusive;
|
||||
},
|
||||
{
|
||||
overlaps: function (other) {
|
||||
var range1 = this;
|
||||
var range2 = other;
|
||||
if (range1 === undefined || range2 === undefined) return false;
|
||||
if (range1.isEmpty() || range2.isEmpty()) return false;
|
||||
|
||||
if (range1.min <= range2.max || range2.min <= range1.max) {
|
||||
if ((range1.min === range2.max && !(range1.isMinInclusive && range2.isMaxInclusive))
|
||||
|| (range2.min === range1.max && !(range2.isMinInclusive && range1.isMaxInclusive))) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
isEmpty: function () {
|
||||
return (!(this.isMinInclusive && this.isMaxInclusive)) && this.min === this.max;
|
||||
}
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Parse a QueryRange from a partitionKeyRange
|
||||
* @returns QueryRange
|
||||
* @ignore
|
||||
*/
|
||||
parsePartitionKeyRange: function (partitionKeyRange) {
|
||||
return new QueryRange(partitionKeyRange[_PartitionKeyRange.MinInclusive], partitionKeyRange[_PartitionKeyRange.MaxExclusive],
|
||||
true, false);
|
||||
},
|
||||
/**
|
||||
* Parse a QueryRange from a dictionary
|
||||
* @returns QueryRange
|
||||
* @ignore
|
||||
*/
|
||||
parseFromDict: function (queryRangeDict) {
|
||||
return new QueryRange(queryRangeDict.min, queryRangeDict.max, queryRangeDict.isMinInclusive, queryRangeDict.isMaxInclusive);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
var InMemoryCollectionRoutingMap = Base.defineClass(
|
||||
/**
|
||||
* Represents a InMemoryCollectionRoutingMap Object, Stores partition key ranges in an efficient way with some additional information and provides
|
||||
* convenience methods for working with set of ranges.
|
||||
*/
|
||||
function (rangeById, rangeByInfo, orderedPartitionKeyRanges, orderedPartitionInfo, collectionUniqueId) {
|
||||
this._rangeById = rangeById;
|
||||
this._rangeByInfo = rangeByInfo;
|
||||
this._orderedPartitionKeyRanges = orderedPartitionKeyRanges;
|
||||
this._orderedRanges = orderedPartitionKeyRanges.map(
|
||||
function (pkr) {
|
||||
return new QueryRange(
|
||||
pkr[_PartitionKeyRange.MinInclusive], pkr[_PartitionKeyRange.MaxExclusive], true, false);
|
||||
});
|
||||
this._orderedPartitionInfo = orderedPartitionInfo;
|
||||
this._collectionUniqueId = collectionUniqueId;
|
||||
},
|
||||
{
|
||||
|
||||
getOrderedParitionKeyRanges: function () {
|
||||
return this._orderedPartitionKeyRanges;
|
||||
},
|
||||
|
||||
getRangeByEffectivePartitionKey: function (effectivePartitionKeyValue) {
|
||||
|
||||
if (_Constants.MinimumInclusiveEffectivePartitionKey === effectivePartitionKeyValue) {
|
||||
return this._orderedPartitionKeyRanges[0];
|
||||
}
|
||||
|
||||
if (_Constants.MaximumExclusiveEffectivePartitionKey === effectivePartitionKeyValue) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
var sortedLow = this._orderedRanges.map(
|
||||
function (r) {
|
||||
return { v: r.min, b: !r.isMinInclusive };
|
||||
});
|
||||
|
||||
var index = bs.le(sortedLow, { v: effectivePartitionKeyValue, b: true }, this._vbCompareFunction);
|
||||
// that's an error
|
||||
assert.ok(index >= 0, "error in collection routing map, queried partition key is less than the start range.");
|
||||
|
||||
return this._orderedPartitionKeyRanges[index];
|
||||
},
|
||||
|
||||
_vbCompareFunction: function (x, y) {
|
||||
if (x.v > y.v) return 1;
|
||||
if (x.v < y.v) return -1;
|
||||
if (x.b > y.b) return 1;
|
||||
if (x.b < y.b) return -1;
|
||||
return 0;
|
||||
},
|
||||
|
||||
getRangeByPartitionKeyRangeId: function (partitionKeyRangeId) {
|
||||
|
||||
var t = this._rangeById[partitionKeyRangeId];
|
||||
|
||||
if (t === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return t[0];
|
||||
},
|
||||
|
||||
getOverlappingRanges: function (providedQueryRanges) {
|
||||
|
||||
if (!_.isArray(providedQueryRanges)) {
|
||||
return this.getOverlappingRanges([providedQueryRanges]);
|
||||
}
|
||||
|
||||
var minToPartitionRange = {};
|
||||
var sortedLow = this._orderedRanges.map(
|
||||
function (r) {
|
||||
return { v: r.min, b: !r.isMinInclusive };
|
||||
});
|
||||
var sortedHigh = this._orderedRanges.map(
|
||||
function (r) {
|
||||
return { v: r.max, b: r.isMaxInclusive };
|
||||
});
|
||||
|
||||
// this for loop doesn't invoke any async callback
|
||||
for (var i = 0; i < providedQueryRanges.length; i++) {
|
||||
var queryRange = providedQueryRanges[i];
|
||||
if (queryRange.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
var minIndex = bs.le(sortedLow, { v: queryRange.min, b: !queryRange.isMinInclusive }, this._vbCompareFunction);
|
||||
assert.ok(minIndex >= 0, "error in collection routing map, queried value is less than the start range.");
|
||||
|
||||
var maxIndex = bs.ge(sortedHigh, { v: queryRange.max, b: queryRange.isMaxInclusive }, this._vbCompareFunction);
|
||||
assert.ok(maxIndex < sortedHigh.length, "error in collection routing map, queried value is greater than the end range.");
|
||||
|
||||
// the for loop doesn't invoke any async callback
|
||||
for (var j = minIndex; j < maxIndex + 1; j++) {
|
||||
if (queryRange.overlaps(this._orderedRanges[j])) {
|
||||
minToPartitionRange[this._orderedPartitionKeyRanges[j][_PartitionKeyRange.MinInclusive]] = this._orderedPartitionKeyRanges[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var overlappingPartitionKeyRanges = _.values(minToPartitionRange);
|
||||
|
||||
var getKey = function (r) {
|
||||
return r[_PartitionKeyRange.MinInclusive];
|
||||
};
|
||||
return _.sortBy(overlappingPartitionKeyRanges, getKey);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
var CollectionRoutingMapFactory = Base.defineClass(undefined, undefined,
|
||||
{
|
||||
createCompleteRoutingMap: function (partitionKeyRangeInfoTuppleList, collectionUniqueId) {
|
||||
var rangeById = {};
|
||||
var rangeByInfo = {};
|
||||
|
||||
var sortedRanges = [];
|
||||
|
||||
// the for loop doesn't invoke any async callback
|
||||
for (var index = 0; index < partitionKeyRangeInfoTuppleList.length; index++) {
|
||||
var r = partitionKeyRangeInfoTuppleList[index];
|
||||
rangeById[r[0][_PartitionKeyRange.Id]] = r;
|
||||
rangeByInfo[r[1]] = r[0];
|
||||
sortedRanges.push(r);
|
||||
}
|
||||
|
||||
sortedRanges = _.sortBy(sortedRanges,
|
||||
function (r) {
|
||||
return r[0][_PartitionKeyRange.MinInclusive];
|
||||
});
|
||||
var partitionKeyOrderedRange = sortedRanges.map(function (r) { return r[0]; });
|
||||
var orderedPartitionInfo = sortedRanges.map(function (r) { return r[1]; });
|
||||
|
||||
if (!this._isCompleteSetOfRange(partitionKeyOrderedRange)) return undefined;
|
||||
return new InMemoryCollectionRoutingMap(rangeById, rangeByInfo, partitionKeyOrderedRange, orderedPartitionInfo, collectionUniqueId);
|
||||
},
|
||||
|
||||
_isCompleteSetOfRange: function (partitionKeyOrderedRange) {
|
||||
var isComplete = false;
|
||||
if (partitionKeyOrderedRange.length > 0) {
|
||||
var firstRange = partitionKeyOrderedRange[0];
|
||||
var lastRange = partitionKeyOrderedRange[partitionKeyOrderedRange.length - 1];
|
||||
isComplete = (firstRange[_PartitionKeyRange.MinInclusive] === _Constants.MinimumInclusiveEffectivePartitionKey);
|
||||
isComplete &= (lastRange[_PartitionKeyRange.MaxExclusive] === _Constants.MaximumExclusiveEffectivePartitionKey);
|
||||
|
||||
for (var i = 1; i < partitionKeyOrderedRange.length; i++) {
|
||||
var previousRange = partitionKeyOrderedRange[i - 1];
|
||||
var currentRange = partitionKeyOrderedRange[i];
|
||||
isComplete &= (previousRange[_PartitionKeyRange.MaxExclusive] == currentRange[_PartitionKeyRange.MinInclusive]);
|
||||
|
||||
if (!isComplete) {
|
||||
if (previousRange[_PartitionKeyRange.MaxExclusive] > currentRange[_PartitionKeyRange.MinInclusive] ) {
|
||||
throw Error("Ranges overlap");
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return isComplete;
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.InMemoryCollectionRoutingMap = InMemoryCollectionRoutingMap;
|
||||
exports.CollectionRoutingMapFactory = CollectionRoutingMapFactory;
|
||||
exports.QueryRange = QueryRange;
|
||||
exports._PartitionKeyRange = _PartitionKeyRange;
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, InMemoryCollectionRoutingMap = require("./inMemoryCollectionRoutingMap")
|
||||
, semaphore = require("semaphore");
|
||||
|
||||
var CollectionRoutingMapFactory = InMemoryCollectionRoutingMap.CollectionRoutingMapFactory;
|
||||
|
||||
//SCRIPT START
|
||||
var PartitionKeyRangeCache = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents a PartitionKeyRangeCache. PartitionKeyRangeCache provides list of effective partition key ranges for a collection.
|
||||
* This implementation loads and caches the collection routing map per collection on demand.
|
||||
* @constructor PartitionKeyRangeCache
|
||||
* @param {object} documentclient - The documentclient object.
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient) {
|
||||
this.documentclient = documentclient;
|
||||
this.collectionRoutingMapByCollectionId = {};
|
||||
this.sem = semaphore(1);
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Finds or Instantiates the requested Collection Routing Map and invokes callback
|
||||
* @param {callback} callback - Function to execute for the collection routing map. the function takes two parameters error, collectionRoutingMap.
|
||||
* @param {string} collectionLink - Requested collectionLink
|
||||
* @ignore
|
||||
*/
|
||||
_onCollectionRoutingMap: function (callback, collectionLink) {
|
||||
var isNameBased = Base.isLinkNameBased(collectionLink);
|
||||
var collectionId = this.documentclient.getIdFromLink(collectionLink, isNameBased);
|
||||
|
||||
var collectionRoutingMap = this.collectionRoutingMapByCollectionId[collectionId];
|
||||
if (collectionRoutingMap === undefined) {
|
||||
// attempt to consturct collection routing map
|
||||
var that = this;
|
||||
var semaphorizedFuncCollectionMapInstantiator = function () {
|
||||
var collectionRoutingMap = that.collectionRoutingMapByCollectionId[collectionId];
|
||||
if (collectionRoutingMap === undefined) {
|
||||
var partitionKeyRangesIterator = that.documentclient.readPartitionKeyRanges(collectionLink);
|
||||
partitionKeyRangesIterator.toArray(function (err, resources) {
|
||||
if (err) {
|
||||
return callback(err, undefined);
|
||||
}
|
||||
|
||||
collectionRoutingMap = CollectionRoutingMapFactory.createCompleteRoutingMap(
|
||||
resources.map(function (r) { return [r, true]; }),
|
||||
collectionId);
|
||||
|
||||
that.collectionRoutingMapByCollectionId[collectionId] = collectionRoutingMap;
|
||||
that.sem.leave();
|
||||
return callback(undefined, collectionRoutingMap);
|
||||
});
|
||||
|
||||
} else {
|
||||
// sanity gaurd
|
||||
that.sem.leave();
|
||||
return callback(undefined, collectionRoutingMap.getOverlappingRanges(partitionKeyRanges));
|
||||
}
|
||||
};
|
||||
|
||||
// We want only one attempt to construct collectionRoutingMap so we pass the consturction in the semaphore take
|
||||
this.sem.take(semaphorizedFuncCollectionMapInstantiator);
|
||||
|
||||
} else {
|
||||
callback(undefined, collectionRoutingMap);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Given the query ranges and a collection, invokes the callback on the list of overlapping partition key ranges
|
||||
* @param {callback} callback - Function execute on the overlapping partition key ranges result, takes two parameters error, partition key ranges
|
||||
* @param collectionLink
|
||||
* @param queryRanges
|
||||
* @ignore
|
||||
*/
|
||||
getOverlappingRanges: function (callback, collectionLink, queryRanges) {
|
||||
this._onCollectionRoutingMap(function (err, collectionRoutingMap) {
|
||||
if (err) {
|
||||
return callback(err, undefined);
|
||||
}
|
||||
return callback(undefined, collectionRoutingMap.getOverlappingRanges(queryRanges));
|
||||
}, collectionLink);
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = PartitionKeyRangeCache;
|
||||
}
|
|
@ -1,175 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../base")
|
||||
, assert = require("assert")
|
||||
, InMemoryCollectionRoutingMap = require("./inMemoryCollectionRoutingMap")
|
||||
, PartitionKeyRangeCache = require("./partitionKeyRangeCache")
|
||||
, util = require("util");
|
||||
|
||||
var CollectionRoutingMapFactory = InMemoryCollectionRoutingMap.CollectionRoutingMapFactory;
|
||||
var QueryRange = InMemoryCollectionRoutingMap.QueryRange;
|
||||
var _PartitionKeyRange = InMemoryCollectionRoutingMap._PartitionKeyRange;
|
||||
|
||||
//SCRIPT START
|
||||
var SmartRoutingMapProvider = Base.defineClass(
|
||||
|
||||
/**
|
||||
* Represents a SmartRoutingMapProvider Object, Efficiently uses PartitionKeyRangeCache and minimizes the unnecessary
|
||||
* invocation of PartitionKeyRangeCache.getOverlappingRanges()
|
||||
* @constructor SmartRoutingMapProvider
|
||||
* @param {object} documentclient - The documentclient object.
|
||||
* @ignore
|
||||
*/
|
||||
function (documentclient) {
|
||||
this._partitionKeyRangeCache = new PartitionKeyRangeCache(documentclient);
|
||||
},
|
||||
{
|
||||
_secondRangeIsAfterFirstRange: function (range1, range2) {
|
||||
assert.notEqual(range1.max, undefined, "invalid arg");
|
||||
assert.notEqual(range2.min, undefined, "invalid arg");
|
||||
|
||||
if (range1.max > range2.min) {
|
||||
// r.min < #previous_r.max
|
||||
return false;
|
||||
} else {
|
||||
if (range1.max === range2.min && range1.isMaxInclusive && range2.isMinInclusive) {
|
||||
// the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r
|
||||
// they share a point
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
_isSortedAndNonOverlapping: function (ranges) {
|
||||
for (var idx = 1; idx < ranges.length; idx++) {
|
||||
var previousR = ranges[idx - 1];
|
||||
var r = ranges[idx];
|
||||
if (!this._secondRangeIsAfterFirstRange(previousR, r)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
_stringMax: function (a, b) {
|
||||
return (a >= b ? a : b);
|
||||
},
|
||||
|
||||
_stringCompare: function(a, b) {
|
||||
return (a == b ? 0 : (a > b ? 1 : -1));
|
||||
},
|
||||
|
||||
_subtractRange: function (r, partitionKeyRange) {
|
||||
var left = this._stringMax(partitionKeyRange[_PartitionKeyRange.MaxExclusive], r.min);
|
||||
var leftInclusive;
|
||||
if (this._stringCompare(left, r.min) === 0) {
|
||||
leftInclusive = r.isMinInclusive;
|
||||
} else {
|
||||
leftInclusive = false;
|
||||
}
|
||||
return new QueryRange(left, r.max, leftInclusive,
|
||||
r.isMaxInclusive);
|
||||
},
|
||||
|
||||
/**
|
||||
* Given the sorted ranges and a collection, invokes the callback on the list of overlapping partition key ranges
|
||||
* @param {callback} callback - Function execute on the overlapping partition key ranges result, takes two parameters error, partition key ranges
|
||||
* @param collectionLink
|
||||
* @param sortedRanges
|
||||
* @ignore
|
||||
*/
|
||||
getOverlappingRanges: function (callback, collectionLink, sortedRanges) {
|
||||
// validate if the list is non- overlapping and sorted
|
||||
if (!this._isSortedAndNonOverlapping(sortedRanges)) {
|
||||
return callback(new Error("the list of ranges is not a non-overlapping sorted ranges"), undefined);
|
||||
}
|
||||
|
||||
var partitionKeyRanges = [];
|
||||
|
||||
if (sortedRanges.length === 0) {
|
||||
return callback(undefined, partitionKeyRanges);
|
||||
}
|
||||
|
||||
var that = this;
|
||||
this._partitionKeyRangeCache._onCollectionRoutingMap(function (err, collectionRoutingMap) {
|
||||
if (err) {
|
||||
return callback(err, undefined);
|
||||
}
|
||||
|
||||
var index = 0;
|
||||
var currentProvidedRange = sortedRanges[index];
|
||||
while (true) {
|
||||
if (currentProvidedRange.isEmpty()) {
|
||||
// skip and go to the next item
|
||||
if (++index >= sortedRanges.length) {
|
||||
return callback(undefined, partitionKeyRanges);
|
||||
}
|
||||
currentProvidedRange = sortedRanges[index];
|
||||
continue;
|
||||
}
|
||||
|
||||
var queryRange;
|
||||
if (partitionKeyRanges.length > 0) {
|
||||
queryRange = that._subtractRange(
|
||||
currentProvidedRange, partitionKeyRanges[partitionKeyRanges.length - 1]);
|
||||
} else {
|
||||
queryRange = currentProvidedRange;
|
||||
}
|
||||
|
||||
var overlappingRanges = collectionRoutingMap.getOverlappingRanges(queryRange);
|
||||
assert.ok(overlappingRanges.length > 0, util.format("error: returned overlapping ranges for queryRange %s is empty", queryRange));
|
||||
partitionKeyRanges = partitionKeyRanges.concat(overlappingRanges);
|
||||
|
||||
var lastKnownTargetRange = QueryRange.parsePartitionKeyRange(partitionKeyRanges[partitionKeyRanges.length - 1]);
|
||||
assert.notEqual(lastKnownTargetRange, undefined);
|
||||
// the overlapping ranges must contain the requested range
|
||||
assert.ok(that._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0,
|
||||
util.format("error: returned overlapping ranges %s does not contain the requested range %s", overlappingRanges, queryRange));
|
||||
|
||||
// the current range is contained in partitionKeyRanges just move forward
|
||||
if (++index >= sortedRanges.length) {
|
||||
return callback(undefined, partitionKeyRanges);
|
||||
}
|
||||
currentProvidedRange = sortedRanges[index];
|
||||
|
||||
while (that._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0) {
|
||||
// the current range is covered too.just move forward
|
||||
if (++index >= sortedRanges.length) {
|
||||
return callback(undefined, partitionKeyRanges);
|
||||
}
|
||||
currentProvidedRange = sortedRanges[index];
|
||||
}
|
||||
}
|
||||
}, collectionLink);
|
||||
}
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = SmartRoutingMapProvider;
|
||||
}
|
|
@ -1,196 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base")
|
||||
, ResourceId = require("./resourceId")
|
||||
, Constants = require("./constants")
|
||||
, BigInt = require("big-integer");
|
||||
|
||||
var SessionContainer = Base.defineClass(
|
||||
|
||||
function (hostname, collectionNameToCollectionResourceId, collectionResourceIdToSessionTokens) {
|
||||
this.hostname = hostname;
|
||||
|
||||
if (collectionNameToCollectionResourceId != undefined && collectionResourceIdToSessionTokens != undefined) {
|
||||
this.collectionNameToCollectionResourceId = collectionNameToCollectionResourceId;
|
||||
this.collectionResourceIdToSessionTokens = collectionResourceIdToSessionTokens;
|
||||
} else {
|
||||
this.collectionNameToCollectionResourceId = {};
|
||||
this.collectionResourceIdToSessionTokens = {};
|
||||
}
|
||||
},
|
||||
{
|
||||
getHostName: function () {
|
||||
return this.hostname;
|
||||
},
|
||||
|
||||
getPartitionKeyRangeIdToTokenMap: function (request) {
|
||||
return this.getPartitionKeyRangeIdToTokenMapPrivate(request['isNameBased'], request['resourceId'], request['resourceAddress']);
|
||||
},
|
||||
|
||||
getPartitionKeyRangeIdToTokenMapPrivate: function (isNameBased, rId, resourceAddress) {
|
||||
var rangeIdToTokenMap = null;
|
||||
if (!isNameBased) {
|
||||
if (rId) {
|
||||
var resourceIdObject = new ResourceId();
|
||||
var resourceId = resourceIdObject.parse(rId);
|
||||
if (resourceId.documentCollection != '0') {
|
||||
rangeIdToTokenMap = this.collectionResourceIdToSessionTokens[resourceId.getUniqueDocumentCollectionId()];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resourceAddress = Base._trimSlashes(resourceAddress)
|
||||
var collectionName = Base.getCollectionLink(resourceAddress);
|
||||
if (collectionName && (collectionName in this.collectionNameToCollectionResourceId))
|
||||
rangeIdToTokenMap = this.collectionResourceIdToSessionTokens[this.collectionNameToCollectionResourceId[collectionName]];
|
||||
}
|
||||
|
||||
return rangeIdToTokenMap;
|
||||
},
|
||||
|
||||
resolveGlobalSessionToken: function (request) {
|
||||
if (!request)
|
||||
throw new Error("request cannot be null");
|
||||
|
||||
return this.resolveGlobalSessionTokenPrivate(request['isNameBased'], request['resourceId'], request['resourceAddress']);
|
||||
},
|
||||
|
||||
resolveGlobalSessionTokenPrivate: function (isNameBased, rId, resourceAddress) {
|
||||
var rangeIdToTokenMap = this.getPartitionKeyRangeIdToTokenMapPrivate(isNameBased, rId, resourceAddress);
|
||||
if (rangeIdToTokenMap != null)
|
||||
return this.getCombinedSessionToken(rangeIdToTokenMap);
|
||||
|
||||
return "";
|
||||
},
|
||||
|
||||
clearToken: function (request) {
|
||||
var collectionResourceId = undefined;
|
||||
if (!request['isNameBased']) {
|
||||
if (request['resourceId']) {
|
||||
var resourceIdObject = new ResourceId();
|
||||
var resourceId = resourceIdObject.parse(request['resourceId']);
|
||||
if (resourceId.documentCollection != 0) {
|
||||
collectionResourceId = resourceId.getUniqueDocumentCollectionId();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var resourceAddress = Base._trimSlashes(request['resourceAddress']);
|
||||
var collectionName = Base.getCollectionLink(resourceAddress);
|
||||
if (collectionName) {
|
||||
collectionResourceId = this.collectionNameToCollectionResourceId[collectionName];
|
||||
delete this.collectionNameToCollectionResourceId[collectionName];
|
||||
}
|
||||
}
|
||||
if (collectionResourceId != undefined)
|
||||
delete this.collectionResourceIdToSessionTokens[collectionResourceId];
|
||||
},
|
||||
|
||||
setSessionToken: function (request, reqHeaders, resHeaders) {
|
||||
if (resHeaders && !this.isReadingFromMaster(request['resourceType'], request['opearationType'])) {
|
||||
var sessionToken = resHeaders[Constants.HttpHeaders.SessionToken];
|
||||
if (sessionToken) {
|
||||
var ownerFullName = resHeaders[Constants.HttpHeaders.OwnerFullName];
|
||||
if (!ownerFullName)
|
||||
ownerFullName = Base._trimSlashes(request['resourceAddress']);
|
||||
|
||||
var collectionName = Base.getCollectionLink(ownerFullName);
|
||||
|
||||
var ownerId = undefined;
|
||||
if (!request['isNameBased']) {
|
||||
ownerId = request['resourceId'];
|
||||
} else {
|
||||
ownerId = resHeaders[Constants.HttpHeaders.OwnerId];
|
||||
if (!ownerId)
|
||||
ownerId = request['resourceId'];
|
||||
}
|
||||
|
||||
if (ownerId) {
|
||||
var resourceIdObject = new ResourceId();
|
||||
var resourceId = resourceIdObject.parse(ownerId);
|
||||
|
||||
if (resourceId.documentCollection != 0 && collectionName) {
|
||||
var uniqueDocumentCollectionId = resourceId.getUniqueDocumentCollectionId();
|
||||
this.setSesisonTokenPrivate(uniqueDocumentCollectionId, collectionName, sessionToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
setSesisonTokenPrivate: function (collectionRid, collectionName, sessionToken) {
|
||||
if (!(collectionRid in this.collectionResourceIdToSessionTokens))
|
||||
this.collectionResourceIdToSessionTokens[collectionRid] = {};
|
||||
this.compareAndSetToken(sessionToken, this.collectionResourceIdToSessionTokens[collectionRid]);
|
||||
if (!(collectionName in this.collectionNameToCollectionResourceId))
|
||||
this.collectionNameToCollectionResourceId[collectionName] = collectionRid;
|
||||
},
|
||||
|
||||
getCombinedSessionToken: function (tokens) {
|
||||
var result = "";
|
||||
if (tokens) {
|
||||
for (var index in tokens) {
|
||||
result = result + index + ':' + tokens[index] + ",";
|
||||
}
|
||||
}
|
||||
return result.slice(0, -1);
|
||||
},
|
||||
|
||||
compareAndSetToken: function (newToken, oldTokens) {
|
||||
if (newToken) {
|
||||
var newTokenParts = newToken.split(":");
|
||||
if (newTokenParts.length == 2) {
|
||||
var range = newTokenParts[0];
|
||||
var newLSN = BigInt(newTokenParts[1]);
|
||||
var success = false;
|
||||
|
||||
var oldLSN = BigInt(oldTokens[range]);
|
||||
if (!oldLSN || oldLSN.lesser(newLSN))
|
||||
oldTokens[range] = newLSN.toString();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
isReadingFromMaster: function (resourceType, operationType) {
|
||||
if (resourceType == "offers" ||
|
||||
resourceType == "dbs" ||
|
||||
resourceType == "users" ||
|
||||
resourceType == "permissions" ||
|
||||
resourceType == "topology" ||
|
||||
resourceType == "databaseaccount" ||
|
||||
resourceType == "pkranges" ||
|
||||
(resourceType == "colls"
|
||||
&& (operationType == Constants.OperationTypes.Query))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = SessionContainer;
|
||||
}
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base")
|
||||
, Constants = require("./constants")
|
||||
, url = require("url");
|
||||
|
||||
//SCRIPT START
|
||||
/**
|
||||
* This class implements the retry policy for session consistent reads.
|
||||
* @property {int} _maxRetryAttemptCount - Max number of retry attempts to perform.
|
||||
* @property {int} currentRetryAttemptCount - Current retry attempt count.
|
||||
* @property {object} globalEndpointManager - The GlobalEndpointManager instance.
|
||||
* @property {object} request - The Http request information
|
||||
* @property {int} retryAfterInMilliseconds - Retry interval in milliseconds.
|
||||
*/
|
||||
var SessionReadRetryPolicy = Base.defineClass(
|
||||
/**
|
||||
* @constructor SessionReadRetryPolicy
|
||||
* @param {object} globalEndpointManager - The GlobalEndpointManager instance.
|
||||
* @property {object} request - The Http request information
|
||||
*/
|
||||
function (globalEndpointManager, request) {
|
||||
this._maxRetryAttemptCount = SessionReadRetryPolicy.maxRetryAttemptCount;
|
||||
this.currentRetryAttemptCount = 0;
|
||||
this.globalEndpointManager = globalEndpointManager;
|
||||
this.request = request;
|
||||
this.retryAfterInMilliseconds = SessionReadRetryPolicy.retryAfterInMilliseconds;
|
||||
},
|
||||
{
|
||||
/**
|
||||
* Determines whether the request should be retried or not.
|
||||
* @param {object} err - Error returned by the request.
|
||||
* @param {function} callback - The callback function which takes bool argument which specifies whether the request will be retried or not.
|
||||
*/
|
||||
shouldRetry: function (err, callback) {
|
||||
if (err) {
|
||||
var that = this;
|
||||
if (this.currentRetryAttemptCount <= this._maxRetryAttemptCount
|
||||
&& (this.request.operationType == Constants.OperationTypes.Read ||
|
||||
this.request.operationType == Constants.OperationTypes.Query)) {
|
||||
that.globalEndpointManager.getReadEndpoint(function (readEndpoint) {
|
||||
that.globalEndpointManager.getWriteEndpoint(function (writeEndpoint) {
|
||||
if (readEndpoint !== writeEndpoint && that.request.endpointOverride == null) {
|
||||
that.currentRetryAttemptCount++;
|
||||
console.log("Read with session token not available in read region. Trying read from write region.");
|
||||
that.request.endpointOverride = writeEndpoint;
|
||||
var newUrl = url.parse(writeEndpoint);
|
||||
return callback(true, newUrl);
|
||||
} else {
|
||||
console.log("Clear the the token for named base request");
|
||||
that.request.client.clearSessionToken(that.request.path);
|
||||
return callback(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
return callback(false);
|
||||
}
|
||||
},
|
||||
{
|
||||
maxRetryAttemptCount: 1,
|
||||
retryAfterInMilliseconds: 0,
|
||||
NOT_FOUND_STATUS_CODE: 404,
|
||||
READ_SESSION_NOT_AVAILABLE_SUB_STATUS_CODE: 1002
|
||||
}
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = SessionReadRetryPolicy;
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
//SCRIPT START
|
||||
|
||||
var StatusCodes = {
|
||||
// Success
|
||||
"Ok": 200,
|
||||
"Created": 201,
|
||||
"Accepted": 202,
|
||||
"NoContent": 204,
|
||||
"NotModified": 304,
|
||||
|
||||
// Client error
|
||||
"BadRequest": 400,
|
||||
"Unauthorized": 401,
|
||||
"Forbidden": 403,
|
||||
"NotFound": 404,
|
||||
"MethodNotAllowed": 405,
|
||||
"RequestTimeout": 408,
|
||||
"Conflict": 409,
|
||||
"Gone": 410,
|
||||
"PreconditionFailed": 412,
|
||||
"RequestEntityTooLarge": 413,
|
||||
"TooManyRequests": 429,
|
||||
"RetryWith": 449,
|
||||
|
||||
"InternalServerError": 500,
|
||||
"ServiceUnavailable": 503,
|
||||
|
||||
//Operation pause and cancel. These are FAKE status codes for QOS logging purpose only.
|
||||
"OperationPaused": 1200,
|
||||
"OperationCancelled": 1201
|
||||
};
|
||||
|
||||
var SubStatusCodes = {
|
||||
"Unknown": 0,
|
||||
|
||||
// 400: Bad Request Substatus
|
||||
"CrossPartitionQueryNotServable": 1004,
|
||||
|
||||
// 410: StatusCodeType_Gone: substatus
|
||||
"PartitionKeyRangeGone": 1002,
|
||||
}
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports.StatusCodes = StatusCodes;
|
||||
module.exports.SubStatusCodes = SubStatusCodes;
|
||||
}
|
|
@ -1,225 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("./base")
|
||||
, Constants = require("./constants")
|
||||
, Helper = require("./helper").Helper;
|
||||
|
||||
|
||||
//SCRIPT START
|
||||
var UriFactory = Base.defineClass(
|
||||
|
||||
/**************************CONSTRUCTORS**************************/
|
||||
undefined,
|
||||
|
||||
/************************INSTANCE MEMBERS************************/
|
||||
undefined,
|
||||
|
||||
/*************************STATIC METHODS*************************/
|
||||
{
|
||||
/**
|
||||
* Given a database id, this creates a database link.
|
||||
* @param {string} databaseId -The database id
|
||||
* @returns {string} -A database link in the format of dbs/{0} with {0} being a Uri escaped version of the databaseId
|
||||
* @description Would be used when creating or deleting a DocumentCollection or a User in Azure Cosmos DB database service
|
||||
*/
|
||||
createDatabaseUri: function (databaseId) {
|
||||
databaseId = Helper.trimSlashFromLeftAndRight(databaseId);
|
||||
Helper.validateResourceId(databaseId);
|
||||
|
||||
return Constants.Path.DatabasesPathSegment + "/" +
|
||||
databaseId;
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a database and collection id, this creates a collection link.
|
||||
* @param {string} databaseId -The database id
|
||||
* @param {string} collectionId -The collection id
|
||||
* @returns {string} A collection link in the format of dbs/{0}/colls/{1} with {0} being a Uri escaped version of the databaseId and {1} being collectionId
|
||||
* @description Would be used when updating or deleting a DocumentCollection, creating a Document, a StoredProcedure, a Trigger, a UserDefinedFunction, or when executing a query with CreateDocumentQuery in Azure Cosmos DB database service.
|
||||
*/
|
||||
createDocumentCollectionUri: function (databaseId, collectionId) {
|
||||
collectionId = Helper.trimSlashFromLeftAndRight(collectionId);
|
||||
Helper.validateResourceId(collectionId);
|
||||
|
||||
return this.createDatabaseUri(databaseId) + "/" +
|
||||
Constants.Path.CollectionsPathSegment + "/" +
|
||||
collectionId;
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a database and user id, this creates a user link.
|
||||
* @param {string} databaseId -The database id
|
||||
* @param {string} userId -The user id
|
||||
* @returns {string} A user link in the format of dbs/{0}/users/{1} with {0} being a Uri escaped version of the databaseId and {1} being userId
|
||||
* @description Would be used when creating a Permission, or when replacing or deleting a User in Azure Cosmos DB database service
|
||||
*/
|
||||
createUserUri: function (databaseId, userId) {
|
||||
userId = Helper.trimSlashFromLeftAndRight(userId);
|
||||
Helper.validateResourceId(userId);
|
||||
|
||||
return this.createDatabaseUri(databaseId) + "/" +
|
||||
Constants.Path.UsersPathSegment + "/" +
|
||||
userId;
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a database and collection id, this creates a collection link.
|
||||
* @param {string} databaseId -The database id
|
||||
* @param {string} collectionId -The collection id
|
||||
* @param {string} documentId -The document id
|
||||
* @returns {string} -A document link in the format of dbs/{0}/colls/{1}/docs/{2} with {0} being a Uri escaped version of the databaseId, {1} being collectionId and {2} being the documentId
|
||||
* @description Would be used when creating an Attachment, or when replacing or deleting a Document in Azure Cosmos DB database service
|
||||
*/
|
||||
createDocumentUri: function (databaseId, collectionId, documentId) {
|
||||
documentId = Helper.trimSlashFromLeftAndRight(documentId);
|
||||
Helper.validateResourceId(documentId);
|
||||
|
||||
return this.createDocumentCollectionUri(databaseId, collectionId) + "/" +
|
||||
Constants.Path.DocumentsPathSegment + "/" +
|
||||
documentId;
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a database, collection and document id, this creates a document link.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} userId -The user Id
|
||||
* @param {string} permissionId - The permissionId
|
||||
* @returns {string} A permission link in the format of dbs/{0}/users/{1}/permissions/{2} with {0} being a Uri escaped version of the databaseId, {1} being userId and {2} being permissionId
|
||||
* @description Would be used when replacing or deleting a Permission in Azure Cosmos DB database service.
|
||||
*/
|
||||
createPermissionUri: function (databaseId, userId, permissionId) {
|
||||
permissionId = Helper.trimSlashFromLeftAndRight(permissionId);
|
||||
Helper.validateResourceId(permissionId);
|
||||
|
||||
return this.createUserUri(databaseId, userId) + "/" +
|
||||
Constants.Path.PermissionsPathSegment + "/" +
|
||||
permissionId;
|
||||
},
|
||||
|
||||
/**
|
||||
* Given a database, collection and stored proc id, this creates a stored proc link.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} collectionId -The collection Id
|
||||
* @param {string} storedProcedureId -The stored procedure Id
|
||||
* @returns {string} -A stored procedure link in the format of dbs/{0}/colls/{1}/sprocs/{2} with {0} being a Uri escaped version of the databaseId, {1} being collectionId and {2} being the storedProcedureId
|
||||
* @description Would be used when replacing, executing, or deleting a StoredProcedure in Azure Cosmos DB database service.
|
||||
*/
|
||||
createStoredProcedureUri: function (databaseId, collectionId, storedProcedureId) {
|
||||
storedProcedureId = Helper.trimSlashFromLeftAndRight(storedProcedureId);
|
||||
Helper.validateResourceId(storedProcedureId);
|
||||
|
||||
return this.createDocumentCollectionUri(databaseId, collectionId) + "/" +
|
||||
Constants.Path.StoredProceduresPathSegment + "/" +
|
||||
storedProcedureId;
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Given a database, collection and trigger id, this creates a trigger link.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} collectionId -The collection Id
|
||||
* @param {string} triggerId -The trigger Id
|
||||
* @returns {string} -A trigger link in the format of dbs/{0}/colls/{1}/triggers/{2} with {0} being a Uri escaped version of the databaseId, {1} being collectionId and {2} being the triggerId
|
||||
* @description Would be used when replacing, executing, or deleting a Trigger in Azure Cosmos DB database service
|
||||
*/
|
||||
createTriggerUri: function (databaseId, collectionId, triggerId) {
|
||||
triggerId = Helper.trimSlashFromLeftAndRight(triggerId);
|
||||
Helper.validateResourceId(triggerId);
|
||||
|
||||
return this.createDocumentCollectionUri(databaseId, collectionId) + "/" +
|
||||
Constants.Path.TriggersPathSegment + "/" +
|
||||
triggerId;
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Given a database, collection and udf id, this creates a udf link.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} collectionId -The collection Id
|
||||
* @param {string} udfId -The User Defined Function Id
|
||||
* @returns {string} -A udf link in the format of dbs/{0}/colls/{1}/udfs/{2} with {0} being a Uri escaped version of the databaseId, {1} being collectionId and {2} being the udfId
|
||||
* @description Would be used when replacing, executing, or deleting a UserDefinedFunction in Azure Cosmos DB database service
|
||||
*/
|
||||
createUserDefinedFunctionUri: function (databaseId, collectionId, udfId) {
|
||||
udfId = Helper.trimSlashFromLeftAndRight(udfId);
|
||||
Helper.validateResourceId(udfId);
|
||||
|
||||
return this.createDocumentCollectionUri(databaseId, collectionId) + "/" +
|
||||
Constants.Path.UserDefinedFunctionsPathSegment + "/" +
|
||||
udfId;
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Given a database, collection and conflict id, this creates a conflict link.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} collectionId -The collection Id
|
||||
* @param {string} conflictId -The conflict Id
|
||||
* @returns {string} -A conflict link in the format of dbs/{0}/colls/{1}/conflicts/{2} with {0} being a Uri escaped version of the databaseId, {1} being collectionId and {2} being the conflictId
|
||||
* @description Would be used when creating a Conflict in Azure Cosmos DB database service.
|
||||
*/
|
||||
createConflictUri: function (databaseId, collectionId, conflictId) {
|
||||
conflictId = Helper.trimSlashFromLeftAndRight(conflictId);
|
||||
Helper.validateResourceId(conflictId);
|
||||
|
||||
return this.createDocumentCollectionUri(databaseId, collectionId) + "/" +
|
||||
Constants.Path.ConflictsPathSegment + "/" +
|
||||
conflictId;
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Given a database, collection and conflict id, this creates a conflict link.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} collectionId -The collection Id
|
||||
* @param {string} documentId -The document Id\
|
||||
* @param {string} attachmentId -The attachment Id
|
||||
* @returns {string} -A conflict link in the format of dbs/{0}/colls/{1}/conflicts/{2} with {0} being a Uri escaped version of the databaseId, {1} being collectionId and {2} being the conflictId
|
||||
* @description Would be used when creating a Conflict in Azure Cosmos DB database service.
|
||||
*/
|
||||
createAttachmentUri: function (databaseId, collectionId, documentId, attachmentId) {
|
||||
attachmentId = Helper.trimSlashFromLeftAndRight(attachmentId);
|
||||
Helper.validateResourceId(attachmentId);
|
||||
|
||||
return this.createDocumentUri(databaseId, collectionId, documentId) + "/" +
|
||||
Constants.Path.AttachmentsPathSegment + "/" +
|
||||
attachmentId;
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Given a database and collection, this creates a partition key ranges link in the Azure Cosmos DB database service.
|
||||
* @param {string} databaseId -The database Id
|
||||
* @param {string} collectionId -The collection Id
|
||||
* @returns {string} -A partition key ranges link in the format of dbs/{0}/colls/{1}/pkranges with {0} being a Uri escaped version of the databaseId and {1} being collectionId
|
||||
*/
|
||||
createPartitionKeyRangesUri: function (databaseId, collectionId) {
|
||||
return this.createDocumentCollectionUri(databaseId, collectionId) + "/" +
|
||||
Constants.Path.PartitionKeyRangesPathSegment;
|
||||
}
|
||||
}
|
||||
|
||||
);
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.UriFactory = UriFactory;
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
{
|
||||
"name": "documentdb",
|
||||
"description": "Azure Cosmos DB Service Node.js SDK for SQL API",
|
||||
"keywords": [
|
||||
"cosmosdb",
|
||||
"cosmos db",
|
||||
"documentdb",
|
||||
"document database",
|
||||
"azure",
|
||||
"nosql",
|
||||
"database",
|
||||
"cloud"
|
||||
],
|
||||
"version": "1.14.2",
|
||||
"author": "Microsoft Corporation",
|
||||
"main": "./index.js",
|
||||
"engine": {
|
||||
"node": ">=0.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^8.0.7",
|
||||
"eslint": "*",
|
||||
"grunt": "^0.4.5",
|
||||
"grunt-eslint": "^13.0.0",
|
||||
"jsdoc": "*",
|
||||
"load-grunt-tasks": "^3.1.0",
|
||||
"mocha": "*",
|
||||
"sinon": "^3.2.1",
|
||||
"time-grunt": "^1.2.0",
|
||||
"grunt-mocha-test": "^0.13.3",
|
||||
"mocha-multi-reporters": "^1.1.6",
|
||||
"mocha-junit-reporter": "^1.15.0",
|
||||
"child_process": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
"big-integer": "^1.6.25",
|
||||
"int64-buffer": "^0.1.9",
|
||||
"binary-search-bounds": "2.0.3",
|
||||
"priorityqueuejs": "1.0.0",
|
||||
"semaphore": "1.0.5",
|
||||
"underscore": "1.8.3",
|
||||
"tunnel": "0.0.5"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Azure/azure-documentdb-node"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
|
@ -1,187 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../lib/base");
|
||||
var assert = require("assert")
|
||||
|
||||
describe("Base._trimSlashes", function () {
|
||||
var test = function (input, expected) {
|
||||
assert.strictEqual(Base._trimSlashes(input), expected);
|
||||
};
|
||||
|
||||
it("/a/ => a", function () {
|
||||
test("/a/", "a");
|
||||
});
|
||||
|
||||
it("/a/b => a/b", function () {
|
||||
test("/a/b", "a/b");
|
||||
});
|
||||
|
||||
it("/a/b/ => a/b", function () {
|
||||
test("/a/b/", "a/b");
|
||||
});
|
||||
|
||||
it("a/b/ => a/b", function () {
|
||||
test("a/b", "a/b");
|
||||
});
|
||||
|
||||
it("/a => a", function () {
|
||||
test("/a", "a");
|
||||
});
|
||||
|
||||
it("a/ => a", function () {
|
||||
test("a/", "a");
|
||||
});
|
||||
|
||||
it("//a// => a", function () {
|
||||
test("//a//", "a");
|
||||
});
|
||||
|
||||
it("/ => ", function () {
|
||||
test("/", "");
|
||||
});
|
||||
|
||||
it("// => ", function () {
|
||||
test("//", "");
|
||||
});
|
||||
|
||||
it("/// => ", function () {
|
||||
test("///", "");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Base._isValidCollectionLink", function () {
|
||||
var test = function (input, expected) {
|
||||
assert.strictEqual(Base._isValidCollectionLink(input), expected);
|
||||
};
|
||||
|
||||
it("not string => false", function () {
|
||||
var testValues = [
|
||||
null,
|
||||
undefined,
|
||||
0,
|
||||
function () { },
|
||||
[],
|
||||
{}
|
||||
];
|
||||
|
||||
testValues.forEach(function (value) {
|
||||
test(value, false);
|
||||
});
|
||||
});
|
||||
|
||||
it("not four parts => false", function () {
|
||||
var testValues = [
|
||||
"",
|
||||
"a",
|
||||
"a/b",
|
||||
"a/b/c",
|
||||
"a/b/c/d/e",
|
||||
"a/b/c/d/e/f"
|
||||
];
|
||||
|
||||
testValues.forEach(function (value) {
|
||||
test(value, false);
|
||||
});
|
||||
});
|
||||
|
||||
it("not dbs/x/colls/y => false", function () {
|
||||
var testValues = [
|
||||
"a/b/c/d",
|
||||
"dbs/b/c/d",
|
||||
"a/b/colls/d"
|
||||
];
|
||||
|
||||
testValues.forEach(function (value) {
|
||||
test(value, false);
|
||||
});
|
||||
});
|
||||
|
||||
it("dbs/x/colls/y => true", function () {
|
||||
var testValues = [
|
||||
"dbs/b/colls/d",
|
||||
];
|
||||
|
||||
testValues.forEach(function (value) {
|
||||
test(value, true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Base.getAttachmentIdFromMediaId", function () {
|
||||
var test = function (input, expected) {
|
||||
assert.strictEqual(Base.getAttachmentIdFromMediaId(input), expected);
|
||||
};
|
||||
|
||||
it("> 20 characters, Alpha-numeric only: 6hl2ALdWbQCxAgAAAAAAAC4b1VoB => 6hl2ALdWbQCxAgAAAAAAAC4b1Vo=", function () {
|
||||
test("6hl2ALdWbQCxAgAAAAAAAC4-1VoB", "6hl2ALdWbQCxAgAAAAAAAC4-1Vo=");
|
||||
});
|
||||
|
||||
it("> 20 characters, Single hyphen '-': 6hl2ALdWbQCxAgAAAAAAAC4-1VoB => 6hl2ALdWbQCxAgAAAAAAAC4-1Vo=", function () {
|
||||
test("6hl2ALdWbQCxAgAAAAAAAC4-1VoB", "6hl2ALdWbQCxAgAAAAAAAC4-1Vo=");
|
||||
});
|
||||
|
||||
it("> 20 characters, Multiple hyphens '-': 6hl2ALdWb-CxAgAAAAAAAC4-1VoB => 6hl2ALdWb-CxAgAAAAAAAC4-1Vo=", function () {
|
||||
test("6hl2ALdWb-CxAgAAAAAAAC4-1VoB", "6hl2ALdWb-CxAgAAAAAAAC4-1Vo=");
|
||||
});
|
||||
|
||||
it("> 20 characters, Plus sign '+': 6hl2ALdWb+CxAgAAAAAAAC4Q1VoB => 6hl2ALdWb+CxAgAAAAAAAC4Q1Vo=", function () {
|
||||
test("6hl2ALdWb-CxAgAAAAAAAC4-1VoB", "6hl2ALdWb-CxAgAAAAAAAC4-1Vo=");
|
||||
});
|
||||
|
||||
it("> 20 characters, Plus sign '+', Hyphen '-': 6hl2ALdWb+CxAgAAAAAAAC4-1VoB => 6hl2ALdWb+CxAgAAAAAAAC4-1Vo=", function () {
|
||||
test("6hl2ALdWb-CxAgAAAAAAAC4-1VoB", "6hl2ALdWb-CxAgAAAAAAAC4-1Vo=");
|
||||
});
|
||||
|
||||
it("< 20 characters, Plus sign '+', Hyphen '-': 6hl2A-dWb+CxAgAAAA => 6hl2A-dWb+CxAgAAAA", function () {
|
||||
test("6hl2A-dWb+CxAgAAAA", "6hl2A-dWb+CxAgAAAA");
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe("Base.parsePath", function () {
|
||||
var test = function (input, expected) {
|
||||
assert.strictEqual(JSON.stringify(Base.parsePath(input)), JSON.stringify(expected));
|
||||
};
|
||||
|
||||
it("escape control characters 1", function () {
|
||||
test("/\"Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1\"/*", [ "Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1", "*" ]);
|
||||
});
|
||||
|
||||
it("escape control characters 2", function () {
|
||||
test("/'Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1'/*", [ "Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1", "*" ]);
|
||||
});
|
||||
|
||||
it("test paths", function () {
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
|
||||
var content = fs.readFileSync(path.resolve(__dirname, 'BaselineTest.PathParser.json'));
|
||||
var obj = JSON.parse(content);
|
||||
obj.forEach(function (entry) {
|
||||
test(entry.path, entry.parts);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,231 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var assert = require("assert");
|
||||
var ConsistentHashRing = require("../lib/hash/consistentHashRing").ConsistentHashRing;
|
||||
|
||||
describe("ConsistentHashRing new()", function () {
|
||||
it("valid arguments does not throw", function () {
|
||||
var ring = new ConsistentHashRing(["bar"]);
|
||||
assert(ring);
|
||||
assert.strictEqual(ring._partitions.length, 128);
|
||||
});
|
||||
|
||||
it("invalid nodes throws", function () {
|
||||
assert.throws(
|
||||
function () {
|
||||
var ring = new ConsistentHashRing();
|
||||
},
|
||||
/Invalid argument: 'nodes' has to be an array./
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ConsistentHashRing._constructPartitions", function () {
|
||||
it("construct ring", function () {
|
||||
var fixedHashValue = 123;
|
||||
var partitionsPerNode = 2;
|
||||
var nodes = ["A", "B", "C"];
|
||||
var timesComputeHashCalled = 0;
|
||||
var computeHash = function () {
|
||||
timesComputeHashCalled++;
|
||||
return fixedHashValue;
|
||||
};
|
||||
var totalPartitions = partitionsPerNode * nodes.length;
|
||||
var totalCalls = (partitionsPerNode + 1) * nodes.length;
|
||||
|
||||
var partitions = ConsistentHashRing._constructPartitions(nodes, partitionsPerNode, computeHash);
|
||||
|
||||
assert.strictEqual(totalPartitions, partitions.length);
|
||||
assert.strictEqual(totalCalls, timesComputeHashCalled);
|
||||
|
||||
partitions.forEach(function (partition) {
|
||||
assert(partition.node);
|
||||
assert.strictEqual(fixedHashValue, partition.hashValue);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("ConsistentHashRing._compareHashes", function () {
|
||||
var test = function (a, b, result) {
|
||||
var actual = ConsistentHashRing._compareHashes(a, b);
|
||||
assert.strictEqual(result, actual);
|
||||
}
|
||||
|
||||
it("a=b", function () {
|
||||
test(0, 0, 0);
|
||||
});
|
||||
|
||||
it("a>b", function () {
|
||||
test(1, 0, 1);
|
||||
});
|
||||
|
||||
it("a<b", function () {
|
||||
test(0, 1, -1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ConsistentHashRing._search", function () {
|
||||
var test = function (nodes, key, expected) {
|
||||
var result = ConsistentHashRing._search(nodes, key);
|
||||
var actual = nodes[result].hashValue;
|
||||
|
||||
var message = {
|
||||
key: key,
|
||||
expected: expected,
|
||||
actual: actual
|
||||
};
|
||||
|
||||
assert.strictEqual(expected, actual, JSON.stringify(message));
|
||||
}
|
||||
|
||||
it("10", function () {
|
||||
var test1 = function (key, expected) {
|
||||
var nodes = [
|
||||
{ hashValue: 10 }
|
||||
];
|
||||
|
||||
test(nodes, key, expected);
|
||||
}
|
||||
|
||||
test1(Number.NEGATIVE_INFINITY, 10);
|
||||
test1(9, 10);
|
||||
test1(10, 10);
|
||||
test1(11, 10);
|
||||
test1(Number.POSITIVE_INFINITY, 10);
|
||||
});
|
||||
|
||||
it("10, 20", function () {
|
||||
var test2 = function (key, expected) {
|
||||
var nodes = [
|
||||
{ hashValue: 10 },
|
||||
{ hashValue: 20 }
|
||||
];
|
||||
|
||||
test(nodes, key, expected);
|
||||
}
|
||||
|
||||
test2(Number.NEGATIVE_INFINITY, 20);
|
||||
test2(10, 10);
|
||||
test2(11, 10);
|
||||
test2(19, 10);
|
||||
test2(20, 20);
|
||||
test2(Number.POSITIVE_INFINITY, 20);
|
||||
});
|
||||
|
||||
it("10, 20, 30", function () {
|
||||
var test3 = function (key, expected) {
|
||||
var nodes = [
|
||||
{ hashValue: 10 },
|
||||
{ hashValue: 20 },
|
||||
{ hashValue: 30 }
|
||||
];
|
||||
|
||||
test(nodes, key, expected);
|
||||
}
|
||||
|
||||
test3(Number.NEGATIVE_INFINITY, 30);
|
||||
test3(10, 10);
|
||||
test3(11, 10);
|
||||
test3(19, 10);
|
||||
test3(20, 20);
|
||||
test3(21, 20);
|
||||
test3(29, 20);
|
||||
test3(30, 30);
|
||||
test3(31, 30);
|
||||
test3(Number.POSITIVE_INFINITY, 30);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ConsistentHashRing.getNode", function () {
|
||||
it("A(10), B(20), C(30)", function () {
|
||||
var test = function (key, expected) {
|
||||
var nodes = ["A", "B", "C"];
|
||||
var options = {
|
||||
partitionsPerNode: 1,
|
||||
computeHash: function (key) {
|
||||
if (key === "A") return 10;
|
||||
if (key === "B") return 20;
|
||||
if (key === "C") return 30;
|
||||
|
||||
if (key === "a") return 15;
|
||||
if (key === "b") return 25;
|
||||
if (key === "c") return 35;
|
||||
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
var ring = new ConsistentHashRing(nodes, options);
|
||||
var actual = ring.getNode(key);
|
||||
|
||||
var message = {
|
||||
key: key,
|
||||
expected: expected,
|
||||
actual: actual
|
||||
};
|
||||
assert.strictEqual(expected, actual, JSON.stringify(message));
|
||||
}
|
||||
|
||||
test("a", "A");
|
||||
test("b", "B");
|
||||
test("c", "C");
|
||||
test("d", "C");
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe("ConsistentHashRing._throwIfInvalidNodes", function () {
|
||||
it("does not throw", function () {
|
||||
assert.doesNotThrow(function () {
|
||||
ConsistentHashRing._throwIfInvalidNodes([]);
|
||||
});
|
||||
});
|
||||
|
||||
it("throws", function () {
|
||||
var test = function (nodes) {
|
||||
assert.throws(
|
||||
function () {
|
||||
ConsistentHashRing._throwIfInvalidNodes(nodes);
|
||||
},
|
||||
/Invalid argument: 'nodes' has to be an array./
|
||||
);
|
||||
};
|
||||
|
||||
var values = [
|
||||
undefined,
|
||||
null,
|
||||
"string",
|
||||
0,
|
||||
true,
|
||||
{},
|
||||
function () { }
|
||||
];
|
||||
|
||||
values.forEach(function (nodes) {
|
||||
test(nodes);
|
||||
});
|
||||
});
|
||||
});
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1 +0,0 @@
|
|||
{ "unicode content" : "Line Separator (
) & Paragraph Separator (
) & Next Line (
) & نیمفاصله" }
|
|
@ -1,250 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var assert = require("assert");
|
||||
var HashPartitionResolver = require("../lib/hash/hashPartitionResolver").HashPartitionResolver;
|
||||
|
||||
describe("HashPartitionResolver new()", function () {
|
||||
it(" does not throw", function () {
|
||||
assert.doesNotThrow(
|
||||
function () {
|
||||
var resolver = new HashPartitionResolver("foo", ["dbs/foo/colls/A"]);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("invalid partitionKeyResolver", function () {
|
||||
assert.throws(
|
||||
function () {
|
||||
var resolver = new HashPartitionResolver()
|
||||
},
|
||||
/partitionKeyExtractor cannot be null or undefined/
|
||||
);
|
||||
});
|
||||
|
||||
it("invalid collectionLinks", function () {
|
||||
assert.throws(
|
||||
function () {
|
||||
var resolver = new HashPartitionResolver("foo")
|
||||
},
|
||||
/collectionLinks must be an array./
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver.getPartitionKey", function () {
|
||||
it("string", function () {
|
||||
var resolver = new HashPartitionResolver("foo", ["dbs/foo/colls/A"]);
|
||||
var partitionKey = resolver.getPartitionKey({ foo: "bar" });
|
||||
assert.strictEqual("bar", partitionKey);
|
||||
});
|
||||
|
||||
it("function", function () {
|
||||
var resolver = new HashPartitionResolver(function (document) { return document.foo; }, ["dbs/foo/colls/A"]);
|
||||
var partitionKey = resolver.getPartitionKey({ foo: "bar" });
|
||||
assert.strictEqual("bar", partitionKey);
|
||||
});
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver.resolveForRead", function () {
|
||||
it("valid key", function () {
|
||||
var resolver = new HashPartitionResolver("ignored", ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
var links = resolver.resolveForRead("a");
|
||||
assert.deepEqual(["dbs/foo/colls/A"], links);
|
||||
});
|
||||
|
||||
it("null key", function () {
|
||||
var resolver = new HashPartitionResolver("ignored", ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
var links = resolver.resolveForRead(null);
|
||||
assert.deepEqual(links, ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
});
|
||||
|
||||
it("undefined key", function () {
|
||||
var resolver = new HashPartitionResolver("ignored", ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
var links = resolver.resolveForRead();
|
||||
assert.deepEqual(links, ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver.resolveForCreate", function () {
|
||||
it("valid key", function () {
|
||||
var resolver = new HashPartitionResolver("ignored", ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
var links = resolver.resolveForCreate("a");
|
||||
assert.deepEqual("dbs/foo/colls/A", links);
|
||||
});
|
||||
|
||||
it("invalid key", function () {
|
||||
var resolver = new HashPartitionResolver("ignored", ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
assert.throws(
|
||||
function () {
|
||||
resolver.resolveForCreate(0);
|
||||
},
|
||||
/partitionKey must be a 'string'/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver._resolve", function () {
|
||||
it("throws", function () {
|
||||
var resolver = new HashPartitionResolver(function (document) { return document.foo; }, ["dbs/foo/colls/A"]);
|
||||
assert.throws(
|
||||
function () { resolver._resolve(1); },
|
||||
/partitionKey must be a 'string'/
|
||||
);
|
||||
});
|
||||
|
||||
it("resolves to non-null", function () {
|
||||
var resolver = new HashPartitionResolver("ignoredPartitionKeyExtractor", ["dbs/foo/colls/A", "dbs/foo/colls/B", "dbs/foo/colls/C"]);
|
||||
var link = resolver._resolve("x");
|
||||
assert.notStrictEqual(null, link);
|
||||
});
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver._throwIfInvalidCollectionLinks", function () {
|
||||
it("non-array throws", function () {
|
||||
var links = [
|
||||
undefined,
|
||||
null,
|
||||
1,
|
||||
"foo",
|
||||
{},
|
||||
NaN,
|
||||
function () { }
|
||||
];
|
||||
|
||||
links.forEach(
|
||||
function (link) {
|
||||
assert.throws(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidCollectionLinks(link);
|
||||
},
|
||||
/collectionLinks must be an array./
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("invalid links throws", function () {
|
||||
assert.throws(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidCollectionLinks(["foo"]);
|
||||
},
|
||||
/All elements of collectionLinks must be collection links./
|
||||
);
|
||||
});
|
||||
|
||||
it("does not throw", function () {
|
||||
assert.doesNotThrow(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidCollectionLinks(["dbs/a/colls/b"]);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver._throwIfInvalidPartitionKeyExtractor", function () {
|
||||
it(" does not throw", function () {
|
||||
var partitionKeyExtractors = [
|
||||
"foo",
|
||||
function () { }
|
||||
];
|
||||
|
||||
partitionKeyExtractors.forEach(
|
||||
function (partitionKeyExtractor) {
|
||||
assert.doesNotThrow(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKeyExtractor(partitionKeyExtractor);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("null or undefined throws", function () {
|
||||
var partitionKeyExtractors = [
|
||||
undefined,
|
||||
null
|
||||
];
|
||||
|
||||
partitionKeyExtractors.forEach(
|
||||
function (partitionKeyExtractor) {
|
||||
assert.throws(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKeyExtractor(partitionKeyExtractor);
|
||||
},
|
||||
/partitionKeyExtractor cannot be null or undefined/
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("throws", function () {
|
||||
var partitionKeyExtractors = [
|
||||
1,
|
||||
{},
|
||||
[],
|
||||
NaN
|
||||
];
|
||||
|
||||
partitionKeyExtractors.forEach(
|
||||
function (partitionKeyExtractor) {
|
||||
assert.throws(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKeyExtractor(partitionKeyExtractor);
|
||||
},
|
||||
/partitionKeyExtractor must be either a 'string' or a 'function/
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("HashPartitionResolver._throwIfInvalidPartitionKey", function () {
|
||||
it(" does not throw", function () {
|
||||
assert.doesNotThrow(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKey("foo");
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("throws", function () {
|
||||
var keys = [
|
||||
undefined,
|
||||
null,
|
||||
1,
|
||||
{},
|
||||
[],
|
||||
NaN,
|
||||
function () { }
|
||||
];
|
||||
|
||||
keys.forEach(
|
||||
function (key) {
|
||||
assert.throws(
|
||||
function () {
|
||||
HashPartitionResolver._throwIfInvalidPartitionKey(key);
|
||||
},
|
||||
/partitionKey must be a 'string'/
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,202 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var assert = require("assert"),
|
||||
CollectionRoutingMap = require("../lib/routing/inMemoryCollectionRoutingMap"),
|
||||
_ = require("underscore");
|
||||
|
||||
var QueryRange = CollectionRoutingMap.QueryRange;
|
||||
var CollectionRoutingMapFactory = CollectionRoutingMap.CollectionRoutingMapFactory;
|
||||
|
||||
describe("InMemoryCollectionRoutingMap Tests", function () {
|
||||
|
||||
describe("getOverlappingRanges", function () {
|
||||
|
||||
var partitionKeyRanges = [{ 'id': '0', 'minInclusive': '', 'maxExclusive': '05C1C9CD673398' }, { 'id': '1', 'minInclusive': '05C1C9CD673398', 'maxExclusive': '05C1D9CD673398' }, { 'id': '2', 'minInclusive': '05C1D9CD673398', 'maxExclusive': '05C1E399CD6732' }, { 'id': '3', 'minInclusive': '05C1E399CD6732', 'maxExclusive': '05C1E9CD673398' }, { 'id': '4', 'minInclusive': '05C1E9CD673398', 'maxExclusive': 'FF' }]
|
||||
var partitionRangeWithInfo = partitionKeyRanges.map(function (r) { return [r, true]; });
|
||||
var collectionRoutingMap = CollectionRoutingMapFactory.createCompleteRoutingMap(partitionRangeWithInfo, 'sample collection id');
|
||||
|
||||
it("queryCompleteRange", function () {
|
||||
var completeRange = new QueryRange("", "FF", true, false);
|
||||
var overlappingPartitionKeyRanges = collectionRoutingMap.getOverlappingRanges(completeRange);
|
||||
|
||||
assert.equal(overlappingPartitionKeyRanges.length, partitionKeyRanges.length)
|
||||
assert.deepEqual(overlappingPartitionKeyRanges, partitionKeyRanges)
|
||||
});
|
||||
|
||||
it("queryEmptyRange", function () {
|
||||
var emtpyRange = new QueryRange("05C1C9CD673396", "05C1C9CD673396", true, false);
|
||||
var overlappingPartitionKeyRanges = collectionRoutingMap.getOverlappingRanges(emtpyRange);
|
||||
|
||||
assert.equal(overlappingPartitionKeyRanges.length, 0)
|
||||
});
|
||||
|
||||
it("queryPoint", function () {
|
||||
var pointRange = new QueryRange("05C1D9CD673397", "05C1D9CD673397", true, true);
|
||||
var overlappingPartitionKeyRanges = collectionRoutingMap.getOverlappingRanges(pointRange);
|
||||
|
||||
assert.equal(overlappingPartitionKeyRanges.length, 1);
|
||||
assert(overlappingPartitionKeyRanges[0].minInclusive <= pointRange.min);
|
||||
assert(overlappingPartitionKeyRanges[0].maxExclusive > pointRange.max);
|
||||
});
|
||||
|
||||
it("boundaryPointQuery", function () {
|
||||
var pointRange = new QueryRange("05C1C9CD673398", "05C1C9CD673398", true, true);
|
||||
var overlappingPartitionKeyRanges = collectionRoutingMap.getOverlappingRanges(pointRange);
|
||||
|
||||
assert.equal(overlappingPartitionKeyRanges.length, 1);
|
||||
assert(overlappingPartitionKeyRanges[0].minInclusive <= pointRange.min);
|
||||
assert(overlappingPartitionKeyRanges[0].maxExclusive > pointRange.max);
|
||||
assert(overlappingPartitionKeyRanges[0].minInclusive === pointRange.min);
|
||||
});
|
||||
});
|
||||
|
||||
describe("All methods", function () {
|
||||
var partitionRangeWithInfo =
|
||||
[
|
||||
[{
|
||||
id: "2",
|
||||
minInclusive: "0000000050",
|
||||
maxExclusive: "0000000070"
|
||||
}, 2],
|
||||
[{
|
||||
id: "0",
|
||||
minInclusive: "",
|
||||
maxExclusive: "0000000030"
|
||||
}, 0],
|
||||
[{
|
||||
id: "1",
|
||||
minInclusive: "0000000030",
|
||||
maxExclusive: "0000000050"
|
||||
}, 1],
|
||||
[{
|
||||
id: "3",
|
||||
minInclusive: "0000000070",
|
||||
maxExclusive: "FF"
|
||||
}, 3]
|
||||
];
|
||||
|
||||
var collectionRoutingMap = CollectionRoutingMapFactory.createCompleteRoutingMap(partitionRangeWithInfo, 'sample collection id');
|
||||
|
||||
it("validate _orderedPartitionKeyRanges", function () {
|
||||
assert.equal("0", collectionRoutingMap.getOrderedParitionKeyRanges()[0].id)
|
||||
assert.equal("1", collectionRoutingMap.getOrderedParitionKeyRanges()[1].id)
|
||||
assert.equal("2", collectionRoutingMap.getOrderedParitionKeyRanges()[2].id)
|
||||
assert.equal("3", collectionRoutingMap.getOrderedParitionKeyRanges()[3].id)
|
||||
});
|
||||
|
||||
it("validate _orderedPartitionInfo", function () {
|
||||
assert.equal(0, collectionRoutingMap._orderedPartitionInfo[0])
|
||||
assert.equal(1, collectionRoutingMap._orderedPartitionInfo[1])
|
||||
assert.equal(2, collectionRoutingMap._orderedPartitionInfo[2])
|
||||
assert.equal(3, collectionRoutingMap._orderedPartitionInfo[3])
|
||||
});
|
||||
|
||||
it("validate getRangeByEffectivePartitionKey", function () {
|
||||
assert.equal("0", collectionRoutingMap.getRangeByEffectivePartitionKey("").id)
|
||||
assert.equal("0", collectionRoutingMap.getRangeByEffectivePartitionKey("0000000000").id)
|
||||
assert.equal("1", collectionRoutingMap.getRangeByEffectivePartitionKey("0000000030").id)
|
||||
assert.equal("1", collectionRoutingMap.getRangeByEffectivePartitionKey("0000000031").id)
|
||||
assert.equal("3", collectionRoutingMap.getRangeByEffectivePartitionKey("0000000071").id)
|
||||
});
|
||||
|
||||
it("validate getRangeByPartitionKeyRangeId", function () {
|
||||
assert.equal("0", collectionRoutingMap.getRangeByPartitionKeyRangeId("0").id)
|
||||
assert.equal("1", collectionRoutingMap.getRangeByPartitionKeyRangeId("1").id)
|
||||
});
|
||||
|
||||
it("validate getOverlappingRanges", function () {
|
||||
var completeRange = new QueryRange("", "FF", true, false);
|
||||
|
||||
var overlappingRanges = collectionRoutingMap.getOverlappingRanges([completeRange])
|
||||
assert.equal(4, overlappingRanges.length)
|
||||
|
||||
var onlyParitionRanges = partitionRangeWithInfo.map(function (item) { return item[0]; });
|
||||
var getKey = function (r) {
|
||||
return r['id'];
|
||||
};
|
||||
onlyParitionRanges = _.sortBy(onlyParitionRanges, getKey);
|
||||
assert.deepEqual(overlappingRanges, onlyParitionRanges)
|
||||
|
||||
var noPoint = new QueryRange("", "", false, false)
|
||||
assert.equal(0, collectionRoutingMap.getOverlappingRanges([noPoint]).length);
|
||||
|
||||
var onePoint = new QueryRange("0000000040", "0000000040", true, true)
|
||||
var overlappingPartitionKeyRanges = collectionRoutingMap.getOverlappingRanges([onePoint]);
|
||||
assert.equal(1, overlappingPartitionKeyRanges.length)
|
||||
assert.equal("1", overlappingPartitionKeyRanges[0].id)
|
||||
|
||||
var ranges = [
|
||||
new QueryRange("0000000040", "0000000045", true, true),
|
||||
new QueryRange("0000000045", "0000000046", true, true),
|
||||
new QueryRange("0000000046", "0000000050", true, true)
|
||||
]
|
||||
var overlappingPartitionKeyRanges = collectionRoutingMap.getOverlappingRanges(ranges);
|
||||
|
||||
assert.equal(2, overlappingPartitionKeyRanges.length)
|
||||
assert.equal("1", overlappingPartitionKeyRanges[0].id)
|
||||
assert.equal("2", overlappingPartitionKeyRanges[1].id)
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe("Error Handling", function () {
|
||||
|
||||
describe("Incorrect instantiation", function () {
|
||||
|
||||
it("Invalid Routing Map", function () {
|
||||
var partitionRangeWithInfo =
|
||||
[
|
||||
[{ 'id': "1", 'minInclusive': "0000000020", 'maxExclusive': "0000000030" }, 2],
|
||||
[{ 'id': "2", 'minInclusive': "0000000025", 'maxExclusive': "0000000035" }, 2],
|
||||
];
|
||||
var collectionUniqueId = ""
|
||||
try {
|
||||
var collectionRoutingMap = CollectionRoutingMapFactory.createCompleteRoutingMap(partitionRangeWithInfo, 'sample collection id');
|
||||
assert.fail("must throw exception");
|
||||
} catch (e) {
|
||||
assert.equal(e.message, "Ranges overlap");
|
||||
}
|
||||
});
|
||||
|
||||
it("Incomplete Routing Map", function () {
|
||||
var partitionRangeWithInfo =
|
||||
[
|
||||
[{ 'id': "2", 'minInclusive': "", 'maxExclusive': "0000000030" }, 2],
|
||||
[{ 'id': "3", 'minInclusive': "0000000031", 'maxExclusive': "FF" }, 2],
|
||||
];
|
||||
var collectionRoutingMap = CollectionRoutingMapFactory.createCompleteRoutingMap(partitionRangeWithInfo, 'sample collection id');
|
||||
assert.equal(collectionRoutingMap, null);
|
||||
|
||||
partitionRangeWithInfo = [
|
||||
[{ 'id': "2", 'minInclusive': "", 'maxExclusive': "0000000030" }, 2],
|
||||
[{ 'id': "2", 'minInclusive': "0000000030", 'maxExclusive': "FF" }, 2],
|
||||
]
|
||||
var collectionRoutingMap = CollectionRoutingMapFactory.createCompleteRoutingMap(partitionRangeWithInfo, 'sample collection id');
|
||||
assert.notEqual(collectionRoutingMap, null);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"timeout": 300000
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var MurmurHash = require("../lib/hash/murmurHash").MurmurHash;
|
||||
var assert = require("assert")
|
||||
|
||||
describe("MurmurHash.hash", function () {
|
||||
var test = function (input, seed, expected) {
|
||||
assert.equal(MurmurHash.hash(input, seed), expected);
|
||||
};
|
||||
|
||||
it("374", function () {
|
||||
test(374, 0, 2455513042);
|
||||
});
|
||||
|
||||
it("Buffer(374.0)", function () {
|
||||
test(new Buffer([0, 0, 0, 0, 0, 96, 119, 64]), 0, 3717946798);
|
||||
});
|
||||
|
||||
it("Buffer(\"afdgdd\")", function () {
|
||||
test(new Buffer("afdgdd"), 0, 1099701186);
|
||||
});
|
||||
|
||||
it("afdgdd", function () {
|
||||
test("afdgdd", 0, 1099701186);
|
||||
});
|
||||
|
||||
it("\"\"", function () {
|
||||
test("", 0x1B873593, 1738713326);
|
||||
});
|
||||
|
||||
it("\"1\"", function () {
|
||||
test("1", 0xE82562E4, 3978597072);
|
||||
});
|
||||
|
||||
it("\"00\"", function () {
|
||||
test("00", 0xB4C39035, 459540986);
|
||||
});
|
||||
|
||||
it("eyetooth", function () {
|
||||
test("eyetooth", 0x8161BD86, 1864131224);
|
||||
});
|
||||
|
||||
it("acid", function () {
|
||||
test("acid", 0x4DFFEAD7, 3116405302);
|
||||
});
|
||||
|
||||
it("elevation", function () {
|
||||
test("elevation", 0x1A9E1828, 3745560233);
|
||||
});
|
||||
|
||||
it("dent", function () {
|
||||
test("dent", 0xE73C4579, 3554761172);
|
||||
});
|
||||
|
||||
it("homeland", function () {
|
||||
test("homeland", 0xB3DA72CA, 3144830214);
|
||||
});
|
||||
|
||||
it("glamor", function () {
|
||||
test("glamor", 0x8078A01B, 2812447113);
|
||||
});
|
||||
|
||||
it("flags", function () {
|
||||
test("flags", 0x4D16CD6C, 40273746);
|
||||
});
|
||||
|
||||
it("democracy", function () {
|
||||
test("democracy", 0x19B4FABD, 2966836708);
|
||||
});
|
||||
|
||||
it("bumble", function () {
|
||||
test("bumble", 0xE653280E, 214161406);
|
||||
});
|
||||
|
||||
it("catch", function () {
|
||||
test("catch", 0xB2F1555F, 3451276184);
|
||||
});
|
||||
|
||||
it("omnomnomnivore", function () {
|
||||
test("omnomnomnivore", 0x7F8F82B0, 4291675192);
|
||||
});
|
||||
|
||||
it("The quick brown fox jumps over the lazy dog", function () {
|
||||
test("The quick brown fox jumps over the lazy dog", 0x4C2DB001, 3381504877);
|
||||
});
|
||||
});
|
||||
|
||||
describe("MurmurHash._getBufferFromNumber", function () {
|
||||
var test = function (input, expected) {
|
||||
var actual = MurmurHash._getBufferFromNumber(input);
|
||||
assert.equal(actual.length, expected.length);
|
||||
for (var i; i < actual.length; i++) {
|
||||
assert.equal(actual.buffer[i], expected[i]);
|
||||
}
|
||||
};
|
||||
|
||||
it("374", function () {
|
||||
test(374, [118, 1, 0, 0]);
|
||||
});
|
||||
});
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2014 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var assert = require("assert")
|
||||
, Contants = require("../lib/constants")
|
||||
, os = require("os")
|
||||
, Platform = require("../lib/platform")
|
||||
, util = require("util");
|
||||
|
||||
describe("Platform.getUserAgent", function () {
|
||||
it("getUserAgent()", function () {
|
||||
var userAgent = Platform.getUserAgent();
|
||||
var expectedUserAgent = util.format("%s/%s Nodejs/%s documentdb-nodejs-sdk/%s",
|
||||
os.platform(), os.release(), process.version,
|
||||
Contants.SDKVersion
|
||||
);
|
||||
assert.strictEqual(userAgent, expectedUserAgent, "invalid UserAgent format");
|
||||
});
|
||||
|
||||
describe("Platform._getSafeUserAgentSegmentInfo()", function () {
|
||||
it("Removing spaces", function () {
|
||||
var safeString = Platform._getSafeUserAgentSegmentInfo('a b c');
|
||||
assert.strictEqual(safeString, 'abc');
|
||||
});
|
||||
it("empty string handling", function () {
|
||||
var safeString = Platform._getSafeUserAgentSegmentInfo('');
|
||||
assert.strictEqual(safeString, 'unknown');
|
||||
});
|
||||
it("undefined", function () {
|
||||
var safeString = Platform._getSafeUserAgentSegmentInfo(undefined);
|
||||
assert.strictEqual(safeString, 'unknown');
|
||||
});
|
||||
it("null", function () {
|
||||
var safeString = Platform._getSafeUserAgentSegmentInfo(null);
|
||||
assert.strictEqual(safeString, 'unknown');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,196 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Constants = require("../../lib/constants");
|
||||
var DefaultQueryExecutionContext = require("../../lib/queryExecutionContext/defaultQueryExecutionContext");
|
||||
var assert = require("assert");
|
||||
|
||||
describe("defaultQueryExecutionContext Tests", function () {
|
||||
var continuation = "next please";
|
||||
|
||||
describe("When passing no continuation to the constructor", function () {
|
||||
var documentClient = null;
|
||||
var query = null;
|
||||
var options = null;
|
||||
var expectedPassedOptions = { continuation: undefined };
|
||||
|
||||
describe("and no continuation is returned", function () {
|
||||
var fetchFunction = function (opts, cb) {
|
||||
assert.ok(cb, "callback is null or undefined");
|
||||
assert.deepEqual(opts, expectedPassedOptions, "options object does not match");
|
||||
|
||||
cb(null, null, {});
|
||||
};
|
||||
|
||||
var sut = new DefaultQueryExecutionContext(documentClient, query, options, fetchFunction);
|
||||
var currentPartitionIndex = sut.currentPartitionIndex;
|
||||
|
||||
assert.equal(sut.continuation, null, "passed continuation does not match");
|
||||
|
||||
sut.fetchMore(function (err, res, responseHeaders) {
|
||||
it("should pass no error to the callback", function () {
|
||||
assert.equal(err, null, "Unexpected error received");
|
||||
});
|
||||
|
||||
it("should pass no result to the callback", function () {
|
||||
assert.equal(res, null, "Unexpected result received");
|
||||
});
|
||||
|
||||
it("should pass an empty responseHeaders object to the callback", function () {
|
||||
assert.deepEqual(responseHeaders, {}, "responseHeaders object is not empty");
|
||||
});
|
||||
|
||||
it("should increment the currentPartitionIndex", function () {
|
||||
assert.equal(sut.currentPartitionIndex, currentPartitionIndex + 1, "currentPartitionIndex does not match");
|
||||
});
|
||||
|
||||
it("should leave the continuation at undefined", function () {
|
||||
assert.equal(sut.continuation, undefined, "continuation is not undefined");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("and a continuation is returned", function () {
|
||||
var expectedResponseHeaders = {};
|
||||
expectedResponseHeaders[Constants.HttpHeaders.Continuation] = continuation;
|
||||
|
||||
var fetchFunction = function (opts, cb) {
|
||||
assert.ok(cb, "callback is null or undefined");
|
||||
assert.deepEqual(opts, expectedPassedOptions, "options object does not match");
|
||||
|
||||
cb(null, null, expectedResponseHeaders);
|
||||
};
|
||||
|
||||
var sut = new DefaultQueryExecutionContext(documentClient, query, options, fetchFunction);
|
||||
var currentPartitionIndex = sut.currentPartitionIndex;
|
||||
|
||||
assert.equal(sut.continuation, null, "passed continuation does not match");
|
||||
|
||||
sut.fetchMore(function (err, res, responseHeaders) {
|
||||
it("should pass no error to the callback", function () {
|
||||
assert.equal(err, null, "Unexpected error received");
|
||||
});
|
||||
|
||||
it("should pass no result to the callback", function () {
|
||||
assert.equal(res, null, "Unexpected result received");
|
||||
});
|
||||
|
||||
it("should pass an empty responseHeaders object to the callback", function () {
|
||||
assert.deepEqual(responseHeaders, expectedResponseHeaders, "responseHeaders do not match");
|
||||
});
|
||||
|
||||
it("should not increment the currentPartitionIndex", function () {
|
||||
assert.equal(sut.currentPartitionIndex, currentPartitionIndex, "currentPartitionIndex does not match");
|
||||
});
|
||||
|
||||
it("should set the continuation to the response header value", function () {
|
||||
assert.equal(sut.continuation, continuation, "continuation does not match");
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("When passing a continuation to the constructor", function () {
|
||||
var documentClient = null;
|
||||
var query = null;
|
||||
var options = { continuation: continuation };
|
||||
|
||||
describe("and no continuation is returned", function () {
|
||||
var fetchFunction = function (opts, cb) {
|
||||
assert.ok(cb, "callback is null or undefined");
|
||||
assert.deepEqual(opts, options, "options object does not match");
|
||||
|
||||
cb(null, null, {});
|
||||
};
|
||||
|
||||
var sut = new DefaultQueryExecutionContext(documentClient, query, options, fetchFunction);
|
||||
var currentPartitionIndex = sut.currentPartitionIndex;
|
||||
|
||||
assert.equal(sut.continuation, continuation, "passed continuation does not match");
|
||||
|
||||
sut.fetchMore(function (err, res, responseHeaders) {
|
||||
it("should pass no error to the callback", function () {
|
||||
assert.equal(err, null, "Unexpected error received");
|
||||
});
|
||||
|
||||
it("should pass no result to the callback", function () {
|
||||
assert.equal(res, null, "Unexpected result received");
|
||||
});
|
||||
|
||||
it("should pass an empty responseHeaders object to the callback", function () {
|
||||
assert.deepEqual(responseHeaders, {}, "responseHeaders object is not empty");
|
||||
});
|
||||
|
||||
it("should increment the currentPartitionIndex", function () {
|
||||
assert.equal(sut.currentPartitionIndex, currentPartitionIndex + 1, "currentPartitionIndex does not match");
|
||||
});
|
||||
|
||||
it("should set the continuation to undefined", function () {
|
||||
assert.equal(sut.continuation, undefined, "continuation is not undefined");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("and a continuation is returned", function () {
|
||||
var nextContinuation = "last one";
|
||||
var expectedResponseHeaders = {};
|
||||
expectedResponseHeaders[Constants.HttpHeaders.Continuation] = nextContinuation;
|
||||
|
||||
var fetchFunction = function (opts, cb) {
|
||||
assert.ok(cb, "callback is null or undefined");
|
||||
assert.deepEqual(opts, options, "options object does not match");
|
||||
|
||||
cb(null, null, expectedResponseHeaders);
|
||||
};
|
||||
|
||||
var sut = new DefaultQueryExecutionContext(documentClient, query, options, fetchFunction);
|
||||
var currentPartitionIndex = sut.currentPartitionIndex;
|
||||
|
||||
assert.equal(sut.continuation, continuation, "passed continuation does not match");
|
||||
|
||||
sut.fetchMore(function (err, res, responseHeaders) {
|
||||
it("should pass no error to the callback", function () {
|
||||
assert.equal(err, null, "Unexpected error received");
|
||||
});
|
||||
|
||||
it("should pass no result to the callback", function () {
|
||||
assert.equal(res, null, "Unexpected result received");
|
||||
});
|
||||
|
||||
it("should pass the expected responseHeaders object to the callback", function () {
|
||||
assert.deepEqual(responseHeaders, expectedResponseHeaders, "responseHeaders do not match");
|
||||
});
|
||||
|
||||
it("should not increment the currentPartitionIndex", function () {
|
||||
assert.equal(sut.currentPartitionIndex, currentPartitionIndex, "currentPartitionIndex does not match");
|
||||
});
|
||||
|
||||
it("should set the continuation to the response header value", function () {
|
||||
assert.equal(sut.continuation, nextContinuation, "continuation does not match");
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,176 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
var lib = require("../lib/"),
|
||||
sinon = require("sinon"),
|
||||
Base = require("../lib/base"),
|
||||
assert = require("assert"),
|
||||
testConfig = require("./_testConfig"),
|
||||
ResourceId = require("../lib/resourceId"),
|
||||
SessionContainer = require("../lib/sessionContainer");
|
||||
|
||||
var host = testConfig.host;
|
||||
var masterKey = testConfig.masterKey;
|
||||
|
||||
describe("Session Container unit tests", function () {
|
||||
|
||||
var collectionLink = 'dbs/testDatabase/colls/testCollection';
|
||||
var collectionId = 'oWxIAN48yN0=';
|
||||
|
||||
var verify = function (resource, resourceType) {
|
||||
if (resourceType == 'offer' && resource.offer != 0)
|
||||
return true;
|
||||
else if (resourceType == 'db' && resource.database != 0)
|
||||
return true;
|
||||
else if (resourceType == 'coll' && resource.database != 0 && resource.collection != 0)
|
||||
return true;
|
||||
else if (resourceType == 'document' && resource.database != 0 && resource.documentCollection != 0 && resource.document != 0)
|
||||
return true;
|
||||
else if (resourceType == 'attachment' && resource.database != 0 && resource.documentCollection != 0 && resource.document != 0 && resource.attachment != 0)
|
||||
return true;
|
||||
else if (resourceType == 'sproc' && resource.database != 0 && resource.documentCollection != 0 && resource.storedProcedure != 0)
|
||||
return true;
|
||||
else if (resourceType == 'trigger' && resource.database != 0 && resource.documentCollection != 0 && resource.trigger != 0)
|
||||
return true;
|
||||
else if (resourceType == 'udf' && resource.database != 0 && resource.documentCollection != 0 && resource.userDefinedFunction != 0)
|
||||
return true;
|
||||
else if (resourceType == 'pkr' && resource.database != 0 && resource.documentCollection != 0 && resource.partitionKeyRange != 0)
|
||||
return true;
|
||||
else if (resourceType == 'user' && resource.database != 0 && resource.user != 0)
|
||||
return true;
|
||||
else if (resourceType == 'permission' && resource.database != 0 && resource.user != 0 && resource.permission != 0)
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
};
|
||||
|
||||
it("validate ResourceId's parse and toString functions", function (done) {
|
||||
var rid = new ResourceId();
|
||||
var offerId = "HW1D"
|
||||
var databaseId = "5bdcAA==";
|
||||
var collectionId = "5bdcAKxMGks=";
|
||||
var docId = "n7JnAIx+kn8CAAAAAAAACA==";
|
||||
var attachmentId = "n7JnAIx+kn8BAAAAAAAACFZwB8E=";
|
||||
var sprocId = "n7JnAIx+kn8BAAAAAAAAgA==";
|
||||
var triggerId = "n7JnAIx+kn8BAAAAAAAAcA==";
|
||||
var udfId = "n7JnAIx+kn8BAAAAAAAAYA==";
|
||||
var pkrId = "oqoqAItdJQ0CAAAAAAAAUA==";
|
||||
var userId = "n7JnADdIXgA=";
|
||||
var permissionId = "n7JnADdIXgDnT1HTUDIbAA==";
|
||||
|
||||
var offer = rid.parse(offerId);
|
||||
assert.equal(verify(offer, 'offer'), true);
|
||||
assert.equal(offer.toString(), offerId);
|
||||
|
||||
var database = rid.parse(databaseId);
|
||||
assert.equal(verify(database, 'db'), true);
|
||||
assert.equal(database.toString(), databaseId);
|
||||
|
||||
var collection = rid.parse(collectionId);
|
||||
assert.equal(verify(collection, 'coll'), true);
|
||||
assert.equal(collection.toString(), collectionId);
|
||||
|
||||
var document = rid.parse(docId);
|
||||
assert.equal(verify(document, 'document'), true);
|
||||
assert.equal(document.toString(), docId);
|
||||
|
||||
var attachment = rid.parse(attachmentId);
|
||||
assert.equal(verify(attachment, 'attachment'), true);
|
||||
assert.equal(attachment.toString(), attachmentId);
|
||||
|
||||
var sproc = rid.parse(sprocId);
|
||||
assert.equal(verify(sproc, 'sproc'), true);
|
||||
assert.equal(sproc.toString(), sprocId);
|
||||
|
||||
var trigger = rid.parse(triggerId);
|
||||
assert.equal(verify(trigger, 'trigger'), true);
|
||||
assert.equal(trigger.toString(), triggerId);
|
||||
|
||||
var udf = rid.parse(udfId);
|
||||
assert.equal(verify(udf, 'udf'), true);
|
||||
assert.equal(udf.toString(), udfId);
|
||||
|
||||
var pkr = rid.parse(pkrId);
|
||||
assert.equal(verify(pkr, 'pkr'), true);
|
||||
assert.equal(pkr.toString(), pkrId);
|
||||
|
||||
var user = rid.parse(userId);
|
||||
assert.equal(verify(user, 'user'), true);
|
||||
assert.equal(user.toString(), userId);
|
||||
|
||||
var permission = rid.parse(permissionId);
|
||||
assert.equal(verify(permission, 'permission'), true);
|
||||
assert.equal(permission.toString(), permissionId);
|
||||
done();
|
||||
});
|
||||
|
||||
it("validate internal functions of Session Container", function (done) {
|
||||
var sc = new SessionContainer();
|
||||
|
||||
//test compareAndSetToken()
|
||||
var oldTokens = {}
|
||||
sc.compareAndSetToken('0:200', oldTokens);
|
||||
assert.deepEqual(oldTokens, { '0': '200' })
|
||||
sc.compareAndSetToken('0:201', oldTokens);
|
||||
assert.deepEqual(oldTokens, { '0': '201' })
|
||||
sc.compareAndSetToken('0:199', oldTokens);
|
||||
assert.deepEqual(oldTokens, { '0': '201' })
|
||||
|
||||
//test getCombinedSessiontoken()
|
||||
assert.equal(sc.getCombinedSessionToken({ '0': '100', '1': '200' }), '0:100,1:200');
|
||||
|
||||
var ridRequest = {
|
||||
isNameBased: false,
|
||||
resourceId: collectionId,
|
||||
resourceAddress: collectionId,
|
||||
resourceType: 'docs',
|
||||
operationType: 'create'
|
||||
};
|
||||
|
||||
var resHeadersRid = {
|
||||
'x-ms-alt-content-path': collectionLink,
|
||||
'x-ms-session-token': '1:1290'
|
||||
};
|
||||
|
||||
//test setSessionToken() for rid based request
|
||||
sc.setSessionToken(ridRequest, {}, resHeadersRid);
|
||||
assert.deepEqual(sc.collectionNameToCollectionResourceId, { 'dbs/testDatabase/colls/testCollection': '-566441763' });
|
||||
assert.deepEqual(sc.collectionResourceIdToSessionTokens, { '-566441763': { '1': '1290' } });
|
||||
|
||||
//test getPartitionKeyRangeIdToMapPrivate() for rid based request
|
||||
assert.deepEqual(sc.getPartitionKeyRangeIdToTokenMapPrivate(true, null, '/' + collectionLink + '/'), { '1': '1290' });
|
||||
|
||||
//test clearToken for rid based request
|
||||
sc.clearToken(ridRequest);
|
||||
assert.deepEqual(sc.collectionNameToCollectionResourceId, { 'dbs/testDatabase/colls/testCollection': '-566441763' });
|
||||
assert.deepEqual(sc.collectionResourceIdToSessionTokens, {});
|
||||
|
||||
var nameBasedRequest = {
|
||||
isNameBased: true,
|
||||
resourceId: null,
|
||||
resourceAddress: '/' + collectionLink + '/',
|
||||
resourceType: 'docs',
|
||||
operationType: 'create'
|
||||
};
|
||||
var resHeadersNameBased = {
|
||||
'x-ms-alt-content-path': collectionLink,
|
||||
'x-ms-content-path': collectionId,
|
||||
'x-ms-session-token': '1:1126'
|
||||
};
|
||||
|
||||
//test setSessionToken() for name based request
|
||||
sc.setSessionToken(nameBasedRequest, {}, resHeadersNameBased);
|
||||
assert.deepEqual(sc.collectionNameToCollectionResourceId, { 'dbs/testDatabase/colls/testCollection': '-566441763' });
|
||||
assert.deepEqual(sc.collectionResourceIdToSessionTokens, { '-566441763': { '1': '1126' } });
|
||||
|
||||
//test getPartitionKeyRangeIdToMapPrivate() for name based request
|
||||
assert.deepEqual(sc.getPartitionKeyRangeIdToTokenMapPrivate(false, collectionId, collectionId), { '1': '1126' });
|
||||
|
||||
//test clearToken for name based request
|
||||
sc.clearToken(nameBasedRequest);
|
||||
assert.deepEqual(sc.collectionNameToCollectionResourceId, {});
|
||||
assert.deepEqual(sc.collectionResourceIdToSessionTokens, {});
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
|
@ -1,267 +0,0 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var Base = require("../lib/base"),
|
||||
assert = require("assert"),
|
||||
CollectionRoutingMap = require("../lib/routing/inMemoryCollectionRoutingMap"),
|
||||
SmartRoutingMapProvider = require("../lib/routing/smartRoutingMapProvider"),
|
||||
PartitionKeyRangeCache = require("../lib/routing/partitionKeyRangeCache");
|
||||
|
||||
var QueryRange = CollectionRoutingMap.QueryRange;
|
||||
var CollectionRoutingMapFactory = CollectionRoutingMap.CollectionRoutingMapFactory;
|
||||
|
||||
describe("Smart Routing Map Provider OverlappingRanges", function () {
|
||||
|
||||
var collectionLink = 'dbs/7JZZAA==/colls/7JZZAOS-JQA=/';
|
||||
var collectionId = 'my collection';
|
||||
var MockedQueryIterator = Base.defineClass(function (results) { this._results = results; },
|
||||
{ toArray: function (callback) { callback(undefined, this._results); } });
|
||||
|
||||
var MockedDocumentClient = Base.defineClass(function (partitionKeyRanges) { this._partitionKeyRanges = partitionKeyRanges; }, {
|
||||
readPartitionKeyRanges: function (collectionLink) {
|
||||
return new MockedQueryIterator(this._partitionKeyRanges);
|
||||
},
|
||||
getIdFromLink: function () {
|
||||
return collectionId;
|
||||
}
|
||||
});
|
||||
|
||||
var partitionKeyRanges = [
|
||||
{ 'id': '0', 'minInclusive': '', 'maxExclusive': '05C1C9CD673398' },
|
||||
{ 'id': '1', 'minInclusive': '05C1C9CD673398', 'maxExclusive': '05C1D9CD673398' },
|
||||
{ 'id': '2', 'minInclusive': '05C1D9CD673398', 'maxExclusive': '05C1E399CD6732' },
|
||||
{ 'id': '3', 'minInclusive': '05C1E399CD6732', 'maxExclusive': '05C1E9CD673398' },
|
||||
{ 'id': '4', 'minInclusive': '05C1E9CD673398', 'maxExclusive': 'FF' }];
|
||||
|
||||
var mockedDocumentClient = new MockedDocumentClient(partitionKeyRanges);
|
||||
var smartRoutingMapProvider = new SmartRoutingMapProvider(mockedDocumentClient);
|
||||
var partitionKeyRangeCache = new PartitionKeyRangeCache(mockedDocumentClient);
|
||||
|
||||
describe("Test Full Range", function () {
|
||||
|
||||
it('query ranges: ["", ""FF)', function (done) {
|
||||
// query range is the whole partition key range
|
||||
var pkRange = new QueryRange("", "FF", true, false);
|
||||
validateOverlappingRanges([pkRange], partitionKeyRanges, done);
|
||||
});
|
||||
|
||||
it('query ranges: ("", ""FF)', function (done) {
|
||||
// query range is the whole partition key range
|
||||
var pkRange = new QueryRange("", "FF", false, false);
|
||||
validateOverlappingRanges([pkRange], partitionKeyRanges, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test Empty Range", function () {
|
||||
|
||||
it('empty query range list', function (done) {
|
||||
// query range list is empty
|
||||
validateOverlappingRanges([], [], done);
|
||||
});
|
||||
|
||||
it('query ranges: ("", ""]', function (done) {
|
||||
// validate the overlaping partition key ranges results for empty ranges is empty
|
||||
validateOverlappingRanges([new QueryRange("", "", false, true)], [], done);
|
||||
});
|
||||
|
||||
it('query ranges: ("", "")', function (done) {
|
||||
// validate the overlaping partition key ranges results for empty ranges is empty
|
||||
validateOverlappingRanges([new QueryRange("", "", false, false)], [], done);
|
||||
});
|
||||
|
||||
it('query ranges: ["", "")', function (done) {
|
||||
// validate the overlaping partition key ranges results for empty ranges is empty
|
||||
validateOverlappingRanges([new QueryRange("", "", true, false)], [], done);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling: Bad Overlapping Query Range", function () {
|
||||
|
||||
it('overlapping query ranges (in a point)', function (done) {
|
||||
var r1 = new QueryRange("", "AA", true, true);
|
||||
var r2 = new QueryRange("AA", "FF", true, false);
|
||||
validateSmartOverlappingRanges([r1, r2], undefined, done, true);
|
||||
});
|
||||
|
||||
it('overlapping query ranges (in a range)', function (done) {
|
||||
var r1 = new QueryRange("", "AB", true, false);
|
||||
var r2 = new QueryRange("AA", "FA", true, false);
|
||||
validateSmartOverlappingRanges([r1, r2], undefined, done, true);
|
||||
});
|
||||
|
||||
it('not sorted query ranges', function (done) {
|
||||
var r1 = new QueryRange("AB", "AC", true, false);
|
||||
var r2 = new QueryRange("AA", "AB", true, false);
|
||||
validateSmartOverlappingRanges([r1, r2], undefined, done, true);
|
||||
});
|
||||
});
|
||||
|
||||
it("Empty Ranges are thrown away", function (done) {
|
||||
var e1 = new QueryRange("", "", true, false)
|
||||
var r1 = new QueryRange("", "AB", true, false)
|
||||
var e2 = new QueryRange("AB", "AB", true, false)
|
||||
var r2 = new QueryRange("AB", "AC", true, false)
|
||||
var e3 = new QueryRange("AC", "AC", true, false)
|
||||
var e4 = new QueryRange("AD", "AD", true, false);
|
||||
assertOverlappingRangesAreEqual([e1, r1, e2, r2, e3, e4], [r1, r2], done);
|
||||
});
|
||||
|
||||
it("Single Query Range", function (done) {
|
||||
var r = new QueryRange("AB", "AC", true, false)
|
||||
assertBothProvidersResultsEqual([r], done);
|
||||
});
|
||||
|
||||
it("Multiple Query Ranges", function (done) {
|
||||
var ranges = [
|
||||
new QueryRange("0000000040", "0000000045", true, false),
|
||||
new QueryRange("0000000045", "0000000046", true, false),
|
||||
new QueryRange("0000000046", "0000000050", true, false)
|
||||
]
|
||||
assertBothProvidersResultsEqual(ranges, done);
|
||||
});
|
||||
|
||||
it("Single Boundary Case Query Range", function (done) {
|
||||
var ranges = [
|
||||
new QueryRange("05C1C9CD673398", "05C1D9CD673398", true, false)
|
||||
];
|
||||
validateOverlappingRanges(ranges, partitionKeyRanges.slice(1, 2), done);
|
||||
});
|
||||
|
||||
it("Two Adjacent Boundary Case Query Ranges", function (done) {
|
||||
var ranges = [
|
||||
// partitionKeyRanges[1]
|
||||
new QueryRange("05C1C9CD673398", "05C1D9CD673398", true, false),
|
||||
// partitionKeyRanges[2]
|
||||
new QueryRange("05C1D9CD673398", "05C1D9CD673399", true, false),
|
||||
]
|
||||
validateOverlappingRanges(ranges, partitionKeyRanges.slice(1, 3), done);
|
||||
});
|
||||
|
||||
it("Two Ranges in one partition key range", function (done) {
|
||||
var ranges = [
|
||||
// two ranges fall in the same partition key range
|
||||
new QueryRange("05C1C9CD673400", "05C1C9CD673401", true, false),
|
||||
new QueryRange("05C1C9CD673402", "05C1C9CD673403", true, false),
|
||||
]
|
||||
validateOverlappingRanges(ranges, partitionKeyRanges.slice(1, 2), done);
|
||||
});
|
||||
|
||||
it("Complex", function (done) {
|
||||
var ranges = [
|
||||
// all are covered by partitionKeyRanges[1]
|
||||
new QueryRange("05C1C9CD673398", "05C1D9CD673391", true, false),
|
||||
new QueryRange("05C1D9CD673391", "05C1D9CD673392", true, false),
|
||||
new QueryRange("05C1D9CD673393", "05C1D9CD673395", true, false),
|
||||
new QueryRange("05C1D9CD673395", "05C1D9CD673395", true, false),
|
||||
// all are covered by partitionKeyRanges[4]]
|
||||
new QueryRange("05C1E9CD673398", "05C1E9CD673401", true, false),
|
||||
new QueryRange("05C1E9CD673402", "05C1E9CD673403", true, false),
|
||||
// empty range
|
||||
new QueryRange("FF", "FF", true, false),
|
||||
]
|
||||
validateOverlappingRanges(ranges, [partitionKeyRanges[1], partitionKeyRanges[4]], done);
|
||||
});
|
||||
|
||||
// Validates the results
|
||||
// smartRoutingMapProvider.getOverlappingRanges()
|
||||
// partitionKeyRangeCache.getOverlappingRanges() is equal
|
||||
var assertBothProvidersResultsEqual = function (queryRanges, done) {
|
||||
var errorExpected = errorExpected || false;
|
||||
smartRoutingMapProvider.getOverlappingRanges(function (err1, results1) {
|
||||
partitionKeyRangeCache.getOverlappingRanges(function (err2, results2) {
|
||||
assert.equal(err1, err2);
|
||||
assert.deepEqual(results1, results2);
|
||||
done();
|
||||
}, collectionLink, queryRanges);
|
||||
}, collectionLink, queryRanges);
|
||||
}
|
||||
|
||||
// Validates the results
|
||||
// smartRoutingMapProvider.getOverlappingRanges()
|
||||
// partitionKeyRangeCache.getOverlappingRanges() is as expected
|
||||
var validateOverlappingRanges = function (queryRanges, expectedResults, done, errorExpected) {
|
||||
var errorExpected = errorExpected || false;
|
||||
validateSmartOverlappingRanges(queryRanges, expectedResults,
|
||||
function () {
|
||||
validatePartitionKeyRangeCacheOverlappingRanges(queryRanges, expectedResults, done, errorExpected);
|
||||
}, errorExpected);
|
||||
}
|
||||
|
||||
// Validates the results of both
|
||||
// smartRoutingMapProvider.getOverlappingRanges()
|
||||
// partitionKeyRangeCache.getOverlappingRanges() is the same for both queryRanges1, queryRanges2
|
||||
var assertOverlappingRangesAreEqual = function (queryRanges1, queryRanges2, done) {
|
||||
assertProviderOverlappingRangesAreEqual(smartRoutingMapProvider, queryRanges1, queryRanges2,
|
||||
function () {
|
||||
assertProviderOverlappingRangesAreEqual(partitionKeyRangeCache, queryRanges1, queryRanges2,
|
||||
function () {
|
||||
assertBothProvidersResultsEqual(queryRanges1, done);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Validates the results
|
||||
// provider.getOverlappingRanges() is the same on both queryRanges1, queryRanges2
|
||||
var assertProviderOverlappingRangesAreEqual = function (provider, queryRanges1, queryRanges2, done) {
|
||||
var errorExpected = errorExpected || false;
|
||||
provider.getOverlappingRanges(function (err1, results1) {
|
||||
provider.getOverlappingRanges(function (err2, results2) {
|
||||
assert.equal(err1, err2);
|
||||
assert.deepEqual(results1, results2);
|
||||
done();
|
||||
}, collectionLink, queryRanges2);
|
||||
}, collectionLink, queryRanges1);
|
||||
}
|
||||
|
||||
// Validates the results
|
||||
// provider.getOverlappingRanges() is as expected
|
||||
var validateProviderOverlappingRanges = function (provider, queryRanges, expectedResults, done, errorExpected) {
|
||||
var errorExpected = errorExpected || false;
|
||||
provider.getOverlappingRanges(function (err, results) {
|
||||
if (errorExpected) {
|
||||
assert.notEqual(err, undefined);
|
||||
assert.equal(results, undefined);
|
||||
done();
|
||||
} else {
|
||||
assert.equal(err, undefined, "unexpected error happened " + err);
|
||||
assert.deepEqual(results, expectedResults);
|
||||
done();
|
||||
}
|
||||
}, collectionLink, queryRanges);
|
||||
}
|
||||
|
||||
// validates that the results of
|
||||
// smartRoutingMapProvider.getOverlappingRanges() is as expected
|
||||
var validateSmartOverlappingRanges = function (queryRanges, expectedResults, done, errorExpected) {
|
||||
validateProviderOverlappingRanges(smartRoutingMapProvider, queryRanges, expectedResults, done, errorExpected);
|
||||
}
|
||||
|
||||
// validates that the results of
|
||||
// partitionKeyRangeCache.getOverlappingRanges() is as expected
|
||||
var validatePartitionKeyRangeCacheOverlappingRanges = function (queryRanges, expectedResults, done, errorExpected) {
|
||||
validateProviderOverlappingRanges(partitionKeyRangeCache, queryRanges, expectedResults, done, errorExpected);
|
||||
}
|
||||
});
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,205 +0,0 @@
|
|||
// Generated by typings
|
||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/253e456e3c0bf4bd34afaceb7dcbae282da14066/mocha/index.d.ts
|
||||
interface MochaSetupOptions {
|
||||
//milliseconds to wait before considering a test slow
|
||||
slow?: number;
|
||||
|
||||
// timeout in milliseconds
|
||||
timeout?: number;
|
||||
|
||||
// ui name "bdd", "tdd", "exports" etc
|
||||
ui?: string;
|
||||
|
||||
//array of accepted globals
|
||||
globals?: any[];
|
||||
|
||||
// reporter instance (function or string), defaults to `mocha.reporters.Spec`
|
||||
reporter?: any;
|
||||
|
||||
// bail on the first test failure
|
||||
bail?: boolean;
|
||||
|
||||
// ignore global leaks
|
||||
ignoreLeaks?: boolean;
|
||||
|
||||
// grep string or regexp to filter tests with
|
||||
grep?: any;
|
||||
}
|
||||
|
||||
declare var mocha: Mocha;
|
||||
declare var describe: Mocha.IContextDefinition;
|
||||
declare var xdescribe: Mocha.IContextDefinition;
|
||||
// alias for `describe`
|
||||
declare var context: Mocha.IContextDefinition;
|
||||
// alias for `describe`
|
||||
declare var suite: Mocha.IContextDefinition;
|
||||
declare var it: Mocha.ITestDefinition;
|
||||
declare var xit: Mocha.ITestDefinition;
|
||||
// alias for `it`
|
||||
declare var test: Mocha.ITestDefinition;
|
||||
declare var specify: Mocha.ITestDefinition;
|
||||
|
||||
// Used with the --delay flag; see https://mochajs.org/#hooks
|
||||
declare function run(): void;
|
||||
|
||||
interface MochaDone {
|
||||
(error?: any): any;
|
||||
}
|
||||
|
||||
interface ActionFunction {
|
||||
(done: MochaDone): any | PromiseLike<any>
|
||||
}
|
||||
|
||||
declare function setup(action: ActionFunction): void;
|
||||
declare function teardown(action: ActionFunction): void;
|
||||
declare function suiteSetup(action: ActionFunction): void;
|
||||
declare function suiteTeardown(action: ActionFunction): void;
|
||||
declare function before(action: ActionFunction): void;
|
||||
declare function before(description: string, action: ActionFunction): void;
|
||||
declare function after(action: ActionFunction): void;
|
||||
declare function after(description: string, action: ActionFunction): void;
|
||||
declare function beforeEach(action: ActionFunction): void;
|
||||
declare function beforeEach(description: string, action: ActionFunction): void;
|
||||
declare function afterEach(action: ActionFunction): void;
|
||||
declare function afterEach(description: string, action: ActionFunction): void;
|
||||
|
||||
declare class Mocha {
|
||||
currentTest: Mocha.ITestDefinition;
|
||||
constructor(options?: {
|
||||
grep?: RegExp;
|
||||
ui?: string;
|
||||
reporter?: string;
|
||||
timeout?: number;
|
||||
bail?: boolean;
|
||||
});
|
||||
|
||||
/** Setup mocha with the given options. */
|
||||
setup(options: MochaSetupOptions): Mocha;
|
||||
bail(value?: boolean): Mocha;
|
||||
addFile(file: string): Mocha;
|
||||
/** Sets reporter by name, defaults to "spec". */
|
||||
reporter(name: string): Mocha;
|
||||
/** Sets reporter constructor, defaults to mocha.reporters.Spec. */
|
||||
reporter(reporter: (runner: Mocha.IRunner, options: any) => any): Mocha;
|
||||
ui(value: string): Mocha;
|
||||
grep(value: string): Mocha;
|
||||
grep(value: RegExp): Mocha;
|
||||
invert(): Mocha;
|
||||
ignoreLeaks(value: boolean): Mocha;
|
||||
checkLeaks(): Mocha;
|
||||
/**
|
||||
* Function to allow assertion libraries to throw errors directly into mocha.
|
||||
* This is useful when running tests in a browser because window.onerror will
|
||||
* only receive the 'message' attribute of the Error.
|
||||
*/
|
||||
throwError(error: Error): void;
|
||||
/** Enables growl support. */
|
||||
growl(): Mocha;
|
||||
globals(value: string): Mocha;
|
||||
globals(values: string[]): Mocha;
|
||||
useColors(value: boolean): Mocha;
|
||||
useInlineDiffs(value: boolean): Mocha;
|
||||
timeout(value: number): Mocha;
|
||||
slow(value: number): Mocha;
|
||||
enableTimeouts(value: boolean): Mocha;
|
||||
asyncOnly(value: boolean): Mocha;
|
||||
noHighlighting(value: boolean): Mocha;
|
||||
/** Runs tests and invokes `onComplete()` when finished. */
|
||||
run(onComplete?: (failures: number) => void): Mocha.IRunner;
|
||||
}
|
||||
|
||||
// merge the Mocha class declaration with a module
|
||||
declare namespace Mocha {
|
||||
/** Partial interface for Mocha's `Runnable` class. */
|
||||
interface IRunnable {
|
||||
title: string;
|
||||
fn: Function;
|
||||
async: boolean;
|
||||
sync: boolean;
|
||||
timedOut: boolean;
|
||||
}
|
||||
|
||||
/** Partial interface for Mocha's `Suite` class. */
|
||||
interface ISuite {
|
||||
parent: ISuite;
|
||||
title: string;
|
||||
|
||||
fullTitle(): string;
|
||||
}
|
||||
|
||||
/** Partial interface for Mocha's `Test` class. */
|
||||
interface ITest extends IRunnable {
|
||||
parent: ISuite;
|
||||
pending: boolean;
|
||||
|
||||
fullTitle(): string;
|
||||
}
|
||||
|
||||
/** Partial interface for Mocha's `Runner` class. */
|
||||
interface IRunner {}
|
||||
|
||||
interface IContextDefinition {
|
||||
(description: string, spec: () => void): ISuite;
|
||||
only(description: string, spec: () => void): ISuite;
|
||||
skip(description: string, spec: () => void): void;
|
||||
timeout(ms: number): void;
|
||||
}
|
||||
|
||||
interface ITestDefinition {
|
||||
(expectation: string, assertion?: ActionFunction): ITest;
|
||||
only(expectation: string, assertion?: ActionFunction): ITest;
|
||||
skip(expectation: string, assertion?: ActionFunction): void;
|
||||
timeout(ms: number): void;
|
||||
state: "failed" | "passed";
|
||||
}
|
||||
|
||||
export module reporters {
|
||||
export class Base {
|
||||
stats: {
|
||||
suites: number;
|
||||
tests: number;
|
||||
passes: number;
|
||||
pending: number;
|
||||
failures: number;
|
||||
};
|
||||
|
||||
constructor(runner: IRunner);
|
||||
}
|
||||
|
||||
export class Doc extends Base {}
|
||||
export class Dot extends Base {}
|
||||
export class HTML extends Base {}
|
||||
export class HTMLCov extends Base {}
|
||||
export class JSON extends Base {}
|
||||
export class JSONCov extends Base {}
|
||||
export class JSONStream extends Base {}
|
||||
export class Landing extends Base {}
|
||||
export class List extends Base {}
|
||||
export class Markdown extends Base {}
|
||||
export class Min extends Base {}
|
||||
export class Nyan extends Base {}
|
||||
export class Progress extends Base {
|
||||
/**
|
||||
* @param options.open String used to indicate the start of the progress bar.
|
||||
* @param options.complete String used to indicate a complete test on the progress bar.
|
||||
* @param options.incomplete String used to indicate an incomplete test on the progress bar.
|
||||
* @param options.close String used to indicate the end of the progress bar.
|
||||
*/
|
||||
constructor(runner: IRunner, options?: {
|
||||
open?: string;
|
||||
complete?: string;
|
||||
incomplete?: string;
|
||||
close?: string;
|
||||
});
|
||||
}
|
||||
export class Spec extends Base {}
|
||||
export class TAP extends Base {}
|
||||
export class XUnit extends Base {
|
||||
constructor(runner: IRunner, options?: any);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "mocha" {
|
||||
export = Mocha;
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"resolution": "main",
|
||||
"tree": {
|
||||
"src": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/a4a912a0cd1849fa7df0e5d909c8625fba04e49d/node/index.d.ts",
|
||||
"raw": "registry:dt/node#7.0.0+20170322231424",
|
||||
"typings": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/a4a912a0cd1849fa7df0e5d909c8625fba04e49d/node/index.d.ts"
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
// Generated by typings
|
||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/7de6c3dd94feaeb21f20054b9f30d5dabc5efabd/semaphore/semaphore.d.ts
|
||||
declare module 'semaphore' {
|
||||
|
||||
function semaphore(capacity?: number): semaphore.Semaphore;
|
||||
|
||||
namespace semaphore {
|
||||
|
||||
interface Task {
|
||||
(): void;
|
||||
}
|
||||
|
||||
interface Semaphore {
|
||||
capacity: number;
|
||||
|
||||
take(task: Task): void;
|
||||
take(n: number, task: Task): void;
|
||||
|
||||
leave(n?: number): void;
|
||||
}
|
||||
}
|
||||
export = semaphore;
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,4 +0,0 @@
|
|||
/// <reference path="globals/grunt/index.d.ts" />
|
||||
/// <reference path="globals/mocha/index.d.ts" />
|
||||
/// <reference path="globals/node/index.d.ts" />
|
||||
/// <reference path="globals/underscore/index.d.ts" />
|
|
@ -0,0 +1,258 @@
|
|||
import { Agent } from "https";
|
||||
import * as tunnel from "tunnel";
|
||||
import * as url from "url";
|
||||
import { Base, ResponseCallback } from "./base";
|
||||
import { Constants, Helper, Platform } from "./common";
|
||||
import { RequestOptions } from "./documentclient";
|
||||
import { ConnectionPolicy, ConsistencyLevel, DatabaseAccount, QueryCompatibilityMode } from "./documents";
|
||||
import { GlobalEndpointManager } from "./globalEndpointManager";
|
||||
import { IHeaders } from "./queryExecutionContext";
|
||||
import { RequestHandler, Response } from "./request";
|
||||
import { SessionContainer } from "./sessionContainer";
|
||||
|
||||
// Using this to organize public vs internal methods
|
||||
export abstract class DocumentClientBase {
|
||||
public masterKey: string;
|
||||
public resourceTokens: { [key: string]: string };
|
||||
public tokenProvider: any;
|
||||
public connectionPolicy: ConnectionPolicy;
|
||||
public consistencyLevel: ConsistencyLevel;
|
||||
public defaultHeaders: IHeaders;
|
||||
public defaultUrlParams: string;
|
||||
public queryCompatibilityMode: QueryCompatibilityMode;
|
||||
public partitionResolvers: any; // TODO: any paritionresolvers
|
||||
public partitionKeyDefinitionCache: any; // TODO: ParitionKeyDefinitionCache
|
||||
// tslint:disable-next-line:variable-name
|
||||
protected _globalEndpointManager: GlobalEndpointManager; // TODO: code smell naming
|
||||
public sessionContainer: SessionContainer;
|
||||
public requestAgent: Agent;
|
||||
constructor(
|
||||
public urlConnection: string,
|
||||
auth: any, // TODO: any auth
|
||||
connectionPolicy: ConnectionPolicy,
|
||||
consistencyLevel: ConsistencyLevel) {
|
||||
if (auth) {
|
||||
this.masterKey = auth.masterKey;
|
||||
this.resourceTokens = auth.resourceTokens;
|
||||
if (auth.permissionFeed) {
|
||||
this.resourceTokens = {};
|
||||
for (const permission of auth.permissionFeed) {
|
||||
const resourceId = Helper.getResourceIdFromPath(permission.resource);
|
||||
if (!resourceId) {
|
||||
throw new Error(`authorization error: ${resourceId} \
|
||||
is an invalid resourceId in permissionFeed`);
|
||||
}
|
||||
|
||||
this.resourceTokens[resourceId] = permission._token;
|
||||
}
|
||||
}
|
||||
this.tokenProvider = auth.tokenProvider;
|
||||
}
|
||||
|
||||
this.connectionPolicy = connectionPolicy || new ConnectionPolicy();
|
||||
this.consistencyLevel = consistencyLevel;
|
||||
this.defaultHeaders = {};
|
||||
this.defaultHeaders[Constants.HttpHeaders.CacheControl] = "no-cache";
|
||||
this.defaultHeaders[Constants.HttpHeaders.Version] = Constants.CurrentVersion;
|
||||
if (consistencyLevel !== undefined) {
|
||||
this.defaultHeaders[Constants.HttpHeaders.ConsistencyLevel] = consistencyLevel;
|
||||
}
|
||||
|
||||
const platformDefaultHeaders = Platform.getPlatformDefaultHeaders() || {};
|
||||
for (const platformDefaultHeader of Object.keys(platformDefaultHeaders)) {
|
||||
this.defaultHeaders[platformDefaultHeader] = platformDefaultHeaders[platformDefaultHeader];
|
||||
}
|
||||
|
||||
this.defaultHeaders[Constants.HttpHeaders.UserAgent] = Platform.getUserAgent();
|
||||
|
||||
// overide this for default query params to be added to the url.
|
||||
this.defaultUrlParams = "";
|
||||
|
||||
// Query compatibility mode.
|
||||
// Allows to specify compatibility mode used by client when making query requests. Should be removed when
|
||||
// application/sql is no longer supported.
|
||||
this.queryCompatibilityMode = QueryCompatibilityMode.Default;
|
||||
this.partitionResolvers = {};
|
||||
|
||||
this.partitionKeyDefinitionCache = {};
|
||||
|
||||
this._globalEndpointManager = new GlobalEndpointManager(this);
|
||||
|
||||
this.sessionContainer = new SessionContainer(this.urlConnection);
|
||||
|
||||
// Initialize request agent
|
||||
const requestAgentOptions: any = { keepAlive: true, maxSockets: Infinity }; // TODO: any
|
||||
if (!!this.connectionPolicy.ProxyUrl) {
|
||||
const proxyUrl = url.parse(this.connectionPolicy.ProxyUrl);
|
||||
requestAgentOptions.proxy = {
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port,
|
||||
};
|
||||
|
||||
if (!!proxyUrl.auth) {
|
||||
requestAgentOptions.proxy.proxyAuth = proxyUrl.auth;
|
||||
}
|
||||
|
||||
this.requestAgent = (proxyUrl.protocol.toLowerCase() === "https:" ?
|
||||
tunnel.httpsOverHttps(requestAgentOptions) :
|
||||
tunnel.httpsOverHttp(requestAgentOptions)) as any; // TODO: type coersion
|
||||
} else {
|
||||
this.requestAgent = new Agent(requestAgentOptions); // TODO: Move to request?
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Database account information.
|
||||
* @memberof DocumentClient
|
||||
* @instance
|
||||
* @param {string} [options.urlConnection] - The endpoint url whose database account needs to be retrieved. \
|
||||
* If not present, current client's url will be used.
|
||||
* @param {RequestCallback} callback - The callback for the request. The second parameter of the \
|
||||
* callback will be of type {@link DatabaseAccount}.
|
||||
*/
|
||||
public async getDatabaseAccount(
|
||||
options?: RequestOptions,
|
||||
callback?: ResponseCallback<DatabaseAccount>): Promise<Response<DatabaseAccount>> {
|
||||
const optionsCallbackTuple = this.validateOptionsAndCallback(options, callback);
|
||||
options = optionsCallbackTuple.options;
|
||||
callback = optionsCallbackTuple.callback;
|
||||
|
||||
const urlConnection = options.urlConnection || this.urlConnection;
|
||||
|
||||
const requestHeaders = await Base.getHeaders(this, this.defaultHeaders, "get", "", "", "", {});
|
||||
|
||||
try {
|
||||
const { result, headers } = await this.get(urlConnection, "", requestHeaders);
|
||||
|
||||
const databaseAccount = new DatabaseAccount();
|
||||
databaseAccount.DatabasesLink = "/dbs/";
|
||||
databaseAccount.MediaLink = "/media/";
|
||||
databaseAccount.MaxMediaStorageUsageInMB =
|
||||
headers[Constants.HttpHeaders.MaxMediaStorageUsageInMB] as number;
|
||||
databaseAccount.CurrentMediaStorageUsageInMB =
|
||||
headers[Constants.HttpHeaders.CurrentMediaStorageUsageInMB] as number;
|
||||
databaseAccount.ConsistencyPolicy = result.userConsistencyPolicy;
|
||||
|
||||
// WritableLocations and ReadableLocations properties will be available
|
||||
// only for geo-replicated database accounts
|
||||
if (Constants.WritableLocations in result && result.id !== "localhost") {
|
||||
databaseAccount._writableLocations = result[Constants.WritableLocations];
|
||||
}
|
||||
if (Constants.ReadableLocations in result && result.id !== "localhost") {
|
||||
databaseAccount._readableLocations = result[Constants.ReadableLocations];
|
||||
}
|
||||
|
||||
if (callback) {
|
||||
callback(null, databaseAccount, headers);
|
||||
return;
|
||||
} else {
|
||||
return { result: databaseAccount, headers };
|
||||
}
|
||||
} catch (err) {
|
||||
if (callback) {
|
||||
callback(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public validateOptionsAndCallback(optionsIn: any, callbackIn: any) {
|
||||
let options;
|
||||
let callback;
|
||||
|
||||
// options
|
||||
if (optionsIn === undefined) {
|
||||
options = new Object();
|
||||
} else if (callbackIn === undefined && typeof optionsIn === "function") {
|
||||
callback = optionsIn;
|
||||
options = new Object();
|
||||
} else if (typeof optionsIn !== "object") {
|
||||
throw new Error(
|
||||
`The "options" parameter must be of type "object". Actual type is: "${typeof optionsIn}".`);
|
||||
} else {
|
||||
options = optionsIn;
|
||||
}
|
||||
|
||||
// callback
|
||||
if (callbackIn !== undefined && typeof callbackIn !== "function") {
|
||||
throw new Error(
|
||||
`The "callback" parameter must be of type "function". Actual type is: "${typeof callbackIn}".`);
|
||||
} else if (typeof callbackIn === "function") {
|
||||
callback = callbackIn;
|
||||
}
|
||||
|
||||
return { options, callback };
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public get(urlString: string, request: any, headers: IHeaders) { // TODO: any
|
||||
return RequestHandler.request(
|
||||
this._globalEndpointManager,
|
||||
this.connectionPolicy,
|
||||
this.requestAgent,
|
||||
"GET",
|
||||
urlString,
|
||||
request,
|
||||
undefined,
|
||||
this.defaultUrlParams,
|
||||
headers);
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public post(urlString: string, request: any, body: any, headers: IHeaders) { // TODO: any
|
||||
return RequestHandler.request(
|
||||
this._globalEndpointManager,
|
||||
this.connectionPolicy,
|
||||
this.requestAgent,
|
||||
"POST",
|
||||
urlString,
|
||||
request,
|
||||
body,
|
||||
this.defaultUrlParams,
|
||||
headers);
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public put(urlString: string, request: any, body: any, headers: IHeaders) { // TODO: any
|
||||
return RequestHandler.request(
|
||||
this._globalEndpointManager,
|
||||
this.connectionPolicy,
|
||||
this.requestAgent,
|
||||
"PUT",
|
||||
urlString,
|
||||
request,
|
||||
body,
|
||||
this.defaultUrlParams,
|
||||
headers);
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public head(urlString: string, request: any, headers: IHeaders) { // TODO: any
|
||||
return RequestHandler.request(
|
||||
this._globalEndpointManager,
|
||||
this.connectionPolicy,
|
||||
this.requestAgent,
|
||||
"HEAD",
|
||||
urlString,
|
||||
request,
|
||||
undefined,
|
||||
this.defaultUrlParams,
|
||||
headers);
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public delete(urlString: string, request: any, headers: IHeaders) {
|
||||
return RequestHandler.request(
|
||||
this._globalEndpointManager,
|
||||
this.connectionPolicy,
|
||||
this.requestAgent,
|
||||
"DELETE",
|
||||
urlString,
|
||||
request,
|
||||
undefined,
|
||||
this.defaultUrlParams,
|
||||
headers);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
import * as crypto from "crypto";
|
||||
import { DocumentClient } from "./documentclient";
|
||||
import { DocumentClientBase } from "./DocumentClientBase";
|
||||
import { IHeaders } from "./queryExecutionContext";
|
||||
|
||||
export interface IRequestInfo {
|
||||
[index: string]: any;
|
||||
verb: string;
|
||||
path: string;
|
||||
resourceId: string;
|
||||
resourceType: string;
|
||||
headers: IHeaders;
|
||||
}
|
||||
|
||||
export interface ITokenProvider {
|
||||
getToken: (requestInfo: IRequestInfo, callback?: (err: Error, token: string) => void) => Promise<string>;
|
||||
}
|
||||
|
||||
export class AuthHandler {
|
||||
// TODO: documentClient
|
||||
public static async getAuthorizationHeader(
|
||||
documentClient: DocumentClientBase,
|
||||
verb: string,
|
||||
path: string,
|
||||
resourceId: string,
|
||||
resourceType: string,
|
||||
headers: IHeaders): Promise<string> {
|
||||
if (documentClient.masterKey) {
|
||||
return encodeURIComponent(AuthHandler.getAuthorizationTokenUsingMasterKey(
|
||||
verb, resourceId, resourceType, headers, documentClient.masterKey));
|
||||
} else if (documentClient.resourceTokens) {
|
||||
return encodeURIComponent(AuthHandler.getAuthorizationTokenUsingResourceTokens(
|
||||
documentClient.resourceTokens, path, resourceId));
|
||||
} else if (documentClient.tokenProvider) {
|
||||
return encodeURIComponent(await AuthHandler.getAuthorizationTokenUsingTokenProvider(
|
||||
documentClient.tokenProvider, {
|
||||
verb, path, resourceId, resourceType, headers,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
private static getAuthorizationTokenUsingMasterKey(
|
||||
verb: string, resourceId: string, resourceType: string,
|
||||
headers: IHeaders, masterKey: string) {
|
||||
const key = new Buffer(masterKey, "base64");
|
||||
|
||||
const text = (verb || "").toLowerCase() + "\n" +
|
||||
(resourceType || "").toLowerCase() + "\n" +
|
||||
(resourceId || "") + "\n" +
|
||||
(headers["x-ms-date"] as string || "").toLowerCase() + "\n" +
|
||||
(headers["date"] as string || "").toLowerCase() + "\n";
|
||||
|
||||
const body = new Buffer(text, "utf8");
|
||||
const signature = crypto.createHmac("sha256", key).update(body).digest("base64");
|
||||
const MasterToken = "master";
|
||||
const TokenVersion = "1.0";
|
||||
|
||||
return `type=${MasterToken}&ver=${TokenVersion}&sig=${signature}`;
|
||||
}
|
||||
|
||||
// TODO: Resource tokens
|
||||
private static getAuthorizationTokenUsingResourceTokens(
|
||||
resourceTokens: { [resourceId: string]: string }, path: string, resourceId: string) {
|
||||
if (resourceTokens && Object.keys(resourceTokens).length > 0) {
|
||||
// For database account access(through getDatabaseAccount API), path and resourceId are "",
|
||||
// so in this case we return the first token to be used for creating the auth header as the
|
||||
// service will accept any token in this case
|
||||
if (!path && !resourceId) {
|
||||
return resourceTokens[Object.keys(resourceTokens)[0]];
|
||||
}
|
||||
|
||||
if (resourceId && resourceTokens[resourceId]) {
|
||||
return resourceTokens[resourceId];
|
||||
}
|
||||
|
||||
// minimum valid path /dbs
|
||||
if (!path || path.length < 4) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// remove '/' from left and right of path
|
||||
path = path[0] === "/" ? path.substring(1) : path;
|
||||
path = path[path.length - 1] === "/" ? path.substring(0, path.length - 1) : path;
|
||||
|
||||
const pathSegments = (path && path.split("/")) || [];
|
||||
|
||||
// if it's an incomplete path like /dbs/db1/colls/, start from the paretn resource
|
||||
let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
|
||||
for (; index > 0; index -= 2) {
|
||||
const id = decodeURI(pathSegments[index]);
|
||||
if (resourceTokens[id]) {
|
||||
return resourceTokens[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static getAuthorizationTokenUsingTokenProvider(
|
||||
tokenProvider: ITokenProvider, requestInfo: IRequestInfo): Promise<string> {
|
||||
requestInfo.getAuthorizationTokenUsingMasterKey = AuthHandler.getAuthorizationTokenUsingMasterKey;
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const callback = (err: Error, token: string) => {
|
||||
if (reject) { return reject(err); }
|
||||
resolve(token);
|
||||
};
|
||||
|
||||
const results = tokenProvider.getToken(requestInfo, callback);
|
||||
if (results.then && typeof results.then === "function") {
|
||||
resolve(await results);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,405 @@
|
|||
import { AuthHandler } from "./auth";
|
||||
import { Constants, Platform } from "./common";
|
||||
import { DocumentClient, FeedOptions, MediaOptions, Options, RequestOptions } from "./documentclient";
|
||||
import { DocumentClientBase } from "./DocumentClientBase";
|
||||
import { IHeaders } from "./queryExecutionContext";
|
||||
import { Response } from "./request";
|
||||
|
||||
export class Base {
|
||||
public static extend(arg0: any, arg1: any): any {
|
||||
// tslint:disable-next-line:prefer-object-spread
|
||||
return Object.assign(arg0, arg1);
|
||||
}
|
||||
public static map(arg0: any[], arg1: any): any[] {
|
||||
return arg0.map(arg1);
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
public static jsonStringifyAndEscapeNonASCII(arg: any) { // TODO: better way for this? Not sure.
|
||||
// escapes non-ASCII characters as \uXXXX
|
||||
return JSON.stringify(arg).replace(/[\u0080-\uFFFF]/g, (m) => {
|
||||
return "\\u" + ("0000" + m.charCodeAt(0).toString(16)).slice(-4);
|
||||
});
|
||||
}
|
||||
|
||||
public static async getHeaders(
|
||||
documentClient: DocumentClientBase,
|
||||
defaultHeaders: IHeaders,
|
||||
verb: string, path: string,
|
||||
resourceId: string,
|
||||
resourceType: string,
|
||||
options: RequestOptions | FeedOptions | MediaOptions,
|
||||
partitionKeyRangeId?: string): Promise<IHeaders> {
|
||||
|
||||
const headers: IHeaders = { ...defaultHeaders };
|
||||
const opts: RequestOptions & FeedOptions & MediaOptions = (options || {}) as any; // TODO: this is dirty
|
||||
|
||||
if (opts.continuation) {
|
||||
headers[Constants.HttpHeaders.Continuation] = opts.continuation;
|
||||
}
|
||||
|
||||
if (opts.preTriggerInclude) {
|
||||
headers[Constants.HttpHeaders.PreTriggerInclude] =
|
||||
opts.preTriggerInclude.constructor === Array
|
||||
? (opts.preTriggerInclude as string[]).join(",")
|
||||
: opts.preTriggerInclude as string;
|
||||
}
|
||||
|
||||
if (opts.postTriggerInclude) {
|
||||
headers[Constants.HttpHeaders.PostTriggerInclude] =
|
||||
opts.postTriggerInclude.constructor === Array
|
||||
? (opts.postTriggerInclude as string[]).join(",")
|
||||
: opts.postTriggerInclude as string;
|
||||
}
|
||||
|
||||
if (opts.offerType) {
|
||||
headers[Constants.HttpHeaders.OfferType] = opts.offerType;
|
||||
}
|
||||
|
||||
if (opts.offerThroughput) {
|
||||
headers[Constants.HttpHeaders.OfferThroughput] = opts.offerThroughput;
|
||||
}
|
||||
|
||||
if (opts.maxItemCount) {
|
||||
headers[Constants.HttpHeaders.PageSize] = opts.maxItemCount;
|
||||
}
|
||||
|
||||
if (opts.accessCondition) {
|
||||
if (opts.accessCondition.type === "IfMatch") {
|
||||
headers[Constants.HttpHeaders.IfMatch] = opts.accessCondition.condition;
|
||||
} else {
|
||||
headers[Constants.HttpHeaders.IfNoneMatch] = opts.accessCondition.condition;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.a_im) {
|
||||
headers[Constants.HttpHeaders.A_IM] = opts.a_im;
|
||||
}
|
||||
|
||||
if (opts.indexingDirective) {
|
||||
headers[Constants.HttpHeaders.IndexingDirective] = opts.indexingDirective;
|
||||
}
|
||||
|
||||
// TODO: add consistency level validation.
|
||||
if (opts.consistencyLevel) {
|
||||
headers[Constants.HttpHeaders.ConsistencyLevel] = opts.consistencyLevel;
|
||||
}
|
||||
|
||||
if (opts.resourceTokenExpirySeconds) {
|
||||
headers[Constants.HttpHeaders.ResourceTokenExpiry] = opts.resourceTokenExpirySeconds;
|
||||
}
|
||||
|
||||
// TODO: add session token automatic handling in case of session consistency.
|
||||
if (opts.sessionToken) {
|
||||
headers[Constants.HttpHeaders.SessionToken] = opts.sessionToken;
|
||||
}
|
||||
|
||||
if (opts.enableScanInQuery) {
|
||||
headers[Constants.HttpHeaders.EnableScanInQuery] = opts.enableScanInQuery;
|
||||
}
|
||||
|
||||
if (opts.enableCrossPartitionQuery) {
|
||||
headers[Constants.HttpHeaders.EnableCrossPartitionQuery] = opts.enableCrossPartitionQuery;
|
||||
}
|
||||
|
||||
if (opts.maxDegreeOfParallelism !== undefined) {
|
||||
headers[Constants.HttpHeaders.ParallelizeCrossPartitionQuery] = true;
|
||||
}
|
||||
|
||||
if (opts.populateQuotaInfo) {
|
||||
headers[Constants.HttpHeaders.PopulateQuotaInfo] = true;
|
||||
}
|
||||
|
||||
// If the user is not using partition resolver, we add options.partitonKey to the header for elastic collections
|
||||
if ((documentClient as any).partitionResolver === undefined // TODO: paritionResolver does not exist
|
||||
|| (documentClient as any).partitionResolver === null) {
|
||||
if (opts.partitionKey !== undefined) {
|
||||
let partitionKey: string[] | string = opts.partitionKey;
|
||||
if (partitionKey === null || !Array.isArray(partitionKey)) {
|
||||
partitionKey = [partitionKey as string];
|
||||
}
|
||||
headers[Constants.HttpHeaders.PartitionKey] = Base.jsonStringifyAndEscapeNonASCII(partitionKey);
|
||||
}
|
||||
}
|
||||
|
||||
if (documentClient.masterKey || documentClient.tokenProvider) {
|
||||
headers[Constants.HttpHeaders.XDate] = new Date().toUTCString();
|
||||
}
|
||||
|
||||
if (verb === "post" || verb === "put") {
|
||||
if (!headers[Constants.HttpHeaders.ContentType]) {
|
||||
headers[Constants.HttpHeaders.ContentType] = Constants.MediaTypes.Json;
|
||||
}
|
||||
}
|
||||
|
||||
if (!headers[Constants.HttpHeaders.Accept]) {
|
||||
headers[Constants.HttpHeaders.Accept] = Constants.MediaTypes.Json;
|
||||
}
|
||||
|
||||
if (partitionKeyRangeId !== undefined) {
|
||||
headers[Constants.HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId;
|
||||
}
|
||||
|
||||
if (opts.enableScriptLogging) {
|
||||
headers[Constants.HttpHeaders.EnableScriptLogging] = opts.enableScriptLogging;
|
||||
}
|
||||
|
||||
if (opts.offerEnableRUPerMinuteThroughput) {
|
||||
headers[Constants.HttpHeaders.OfferIsRUPerMinuteThroughputEnabled] = true;
|
||||
}
|
||||
|
||||
if (opts.disableRUPerMinuteUsage) {
|
||||
headers[Constants.HttpHeaders.DisableRUPerMinuteUsage] = true;
|
||||
}
|
||||
if (documentClient.masterKey || documentClient.resourceTokens || documentClient.tokenProvider) {
|
||||
const token = await AuthHandler.getAuthorizationHeader(
|
||||
documentClient, verb, path, resourceId, resourceType, headers);
|
||||
headers[Constants.HttpHeaders.Authorization] = token;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
public static parseLink(resourcePath: string) {
|
||||
if (resourcePath.length === 0) {
|
||||
/* for DatabaseAccount case, both type and objectBody will be undefined. */
|
||||
return {
|
||||
type: undefined,
|
||||
objectBody: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (resourcePath[resourcePath.length - 1] !== "/") {
|
||||
resourcePath = resourcePath + "/";
|
||||
}
|
||||
|
||||
if (resourcePath[0] !== "/") {
|
||||
resourcePath = "/" + resourcePath;
|
||||
}
|
||||
|
||||
/*
|
||||
The path will be in the form of /[resourceType]/[resourceId]/ ....
|
||||
/[resourceType]//[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/
|
||||
or /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/[resourceType]/[resourceId]/ ....
|
||||
/[resourceType]/[resourceId]/
|
||||
The result of split will be in the form of
|
||||
[[[resourceType], [resourceId] ... ,[resourceType], [resourceId], ""]
|
||||
In the first case, to extract the resourceId it will the element before last ( at length -2 )
|
||||
and the the type will before it ( at length -3 )
|
||||
In the second case, to extract the resource type it will the element before last ( at length -2 )
|
||||
*/
|
||||
const pathParts = resourcePath.split("/");
|
||||
let id;
|
||||
let type;
|
||||
if (pathParts.length % 2 === 0) {
|
||||
// request in form /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId].
|
||||
id = pathParts[pathParts.length - 2];
|
||||
type = pathParts[pathParts.length - 3];
|
||||
} else {
|
||||
// request in form /[resourceType]/[resourceId]/ .... /[resourceType]/.
|
||||
id = pathParts[pathParts.length - 3];
|
||||
type = pathParts[pathParts.length - 2];
|
||||
}
|
||||
|
||||
const result = {
|
||||
type,
|
||||
objectBody: {
|
||||
id,
|
||||
self: resourcePath,
|
||||
},
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static parsePath(path: string) {
|
||||
const pathParts = [];
|
||||
let currentIndex = 0;
|
||||
|
||||
const throwError = () => {
|
||||
throw new Error("Path " + path + " is invalid at index " + currentIndex);
|
||||
};
|
||||
|
||||
const getEscapedToken = () => {
|
||||
const quote = path[currentIndex];
|
||||
let newIndex = ++currentIndex;
|
||||
|
||||
while (true) {
|
||||
newIndex = path.indexOf(quote, newIndex);
|
||||
if (newIndex === -1) {
|
||||
throwError();
|
||||
}
|
||||
|
||||
if (path[newIndex - 1] !== "\\") { break; }
|
||||
|
||||
++newIndex;
|
||||
}
|
||||
|
||||
const token = path.substr(currentIndex, newIndex - currentIndex);
|
||||
currentIndex = newIndex + 1;
|
||||
return token;
|
||||
};
|
||||
|
||||
const getToken = () => {
|
||||
const newIndex = path.indexOf("/", currentIndex);
|
||||
let token = null;
|
||||
if (newIndex === -1) {
|
||||
token = path.substr(currentIndex);
|
||||
currentIndex = path.length;
|
||||
} else {
|
||||
token = path.substr(currentIndex, newIndex - currentIndex);
|
||||
currentIndex = newIndex;
|
||||
}
|
||||
|
||||
token = token.trim();
|
||||
return token;
|
||||
};
|
||||
|
||||
while (currentIndex < path.length) {
|
||||
if (path[currentIndex] !== "/") {
|
||||
throwError();
|
||||
}
|
||||
|
||||
if (++currentIndex === path.length) { break; }
|
||||
|
||||
if (path[currentIndex] === '\"' || path[currentIndex] === "'") {
|
||||
pathParts.push(getEscapedToken());
|
||||
} else {
|
||||
pathParts.push(getToken());
|
||||
}
|
||||
}
|
||||
|
||||
return pathParts;
|
||||
}
|
||||
|
||||
public static getDatabaseLink(link: string) {
|
||||
return link.split("/").slice(0, 2).join("/");
|
||||
}
|
||||
|
||||
public static getCollectionLink(link: string) {
|
||||
return link.split("/").slice(0, 4).join("/");
|
||||
}
|
||||
|
||||
public static getAttachmentIdFromMediaId(mediaId: string) {
|
||||
// Replace - with / on the incoming mediaId. This will preserve the / so that we can revert it later.
|
||||
const buffer = new Buffer(mediaId.replace(/-/g, "/"), "base64");
|
||||
const ResoureIdLength = 20;
|
||||
// After the base64 conversion, change the / back to a - to get the proper attachmentId
|
||||
return buffer.length > ResoureIdLength
|
||||
? buffer.toString("base64", 0, ResoureIdLength).replace(/\//g, "-")
|
||||
: mediaId;
|
||||
}
|
||||
|
||||
public static getHexaDigit() {
|
||||
return Math.floor(Math.random() * 16).toString(16);
|
||||
}
|
||||
|
||||
// TODO: repalce with well known library?
|
||||
public static generateGuidId() {
|
||||
let id = "";
|
||||
|
||||
for (let i = 0; i < 8; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (let i = 0; i < 4; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (let i = 0; i < 4; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (let i = 0; i < 4; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
id += "-";
|
||||
|
||||
for (let i = 0; i < 12; i++) {
|
||||
id += Base.getHexaDigit();
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
public static isLinkNameBased(link: string) {
|
||||
const parts = link.split("/");
|
||||
let firstId = "";
|
||||
let count = 0;
|
||||
// Get the first id from path.
|
||||
for (const part of parts) {
|
||||
if (!part) {
|
||||
// Skip empty string.
|
||||
continue;
|
||||
}
|
||||
++count;
|
||||
if (count === 1 && part.toLowerCase() !== "dbs") {
|
||||
return false;
|
||||
}
|
||||
if (count === 2) {
|
||||
firstId = part;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!firstId) { return false; }
|
||||
if (firstId.length !== 8) { return true; }
|
||||
const decodedDataLength = Platform.getDecodedDataLength(firstId);
|
||||
if (decodedDataLength !== 4) { return true; }
|
||||
return false;
|
||||
}
|
||||
|
||||
public static _trimSlashes(source: string) {
|
||||
return source.replace(Constants.RegularExpressions.TrimLeftSlashes, "")
|
||||
.replace(Constants.RegularExpressions.TrimRightSlashes, "");
|
||||
}
|
||||
|
||||
public static _isValidCollectionLink(link: string) {
|
||||
if (typeof link !== "string") {
|
||||
return false;
|
||||
}
|
||||
|
||||
const parts = Base._trimSlashes(link).split("/");
|
||||
|
||||
if (parts && parts.length !== 4) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parts[0] !== "dbs") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parts[2] !== "colls") {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static ThrowOrCallback(callback: ResponseCallback<any>, err: any) {
|
||||
if (callback) {
|
||||
process.nextTick(() => {
|
||||
callback(err);
|
||||
});
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
public static ResponseOrCallback(callback: ResponseCallback<any>, value: Response<any>) {
|
||||
if (callback) {
|
||||
process.nextTick(() => {
|
||||
callback(undefined, value.result, value.headers);
|
||||
});
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type ResponseCallback<T> = (err: any, result?: T, headers?: IHeaders) => void;
|
|
@ -1,30 +1,4 @@
|
|||
/*
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
|
||||
//SCRIPT START
|
||||
|
||||
var Constants = {
|
||||
export const Constants = {
|
||||
MediaTypes: {
|
||||
Any: "*/*",
|
||||
ImageJpeg: "image/jpeg",
|
||||
|
@ -36,7 +10,7 @@ var Constants = {
|
|||
SQL: "application/sql",
|
||||
TextHtml: "text/html",
|
||||
TextPlain: "text/plain",
|
||||
Xml: "application/xml"
|
||||
Xml: "application/xml",
|
||||
},
|
||||
|
||||
HttpMethods: {
|
||||
|
@ -45,7 +19,7 @@ var Constants = {
|
|||
Put: "PUT",
|
||||
Delete: "DELETE",
|
||||
Head: "HEAD",
|
||||
Options: "OPTIONS"
|
||||
Options: "OPTIONS",
|
||||
},
|
||||
|
||||
HttpHeaders: {
|
||||
|
@ -96,7 +70,7 @@ var Constants = {
|
|||
Location: "Location",
|
||||
Referer: "referer",
|
||||
A_IM: "A-IM",
|
||||
|
||||
|
||||
// Query
|
||||
Query: "x-ms-documentdb-query",
|
||||
IsQuery: "x-ms-documentdb-isquery",
|
||||
|
@ -128,7 +102,7 @@ var Constants = {
|
|||
// Version headers and values
|
||||
Version: "x-ms-version",
|
||||
|
||||
//Owner name
|
||||
// Owner name
|
||||
OwnerFullName: "x-ms-alt-content-path",
|
||||
|
||||
// Owner ID used for name based request in session token.
|
||||
|
@ -136,9 +110,9 @@ var Constants = {
|
|||
|
||||
// Partition Key
|
||||
PartitionKey: "x-ms-documentdb-partitionkey",
|
||||
PartitionKeyRangeID: 'x-ms-documentdb-partitionkeyrangeid',
|
||||
PartitionKeyRangeID: "x-ms-documentdb-partitionkeyrangeid",
|
||||
|
||||
//Quota Info
|
||||
// Quota Info
|
||||
MaxEntityCount: "x-ms-root-entity-max-count",
|
||||
CurrentEntityCount: "x-ms-root-entity-current-count",
|
||||
CollectionQuotaInMb: "x-ms-collection-quota-mb",
|
||||
|
@ -170,14 +144,14 @@ var Constants = {
|
|||
|
||||
// StoredProcedure related headers
|
||||
EnableScriptLogging: "x-ms-documentdb-script-enable-logging",
|
||||
ScriptLogResults: "x-ms-documentdb-script-log-results"
|
||||
ScriptLogResults: "x-ms-documentdb-script-log-results",
|
||||
},
|
||||
|
||||
// GlobalDB related constants
|
||||
WritableLocations: 'writableLocations',
|
||||
ReadableLocations: 'readableLocations',
|
||||
Name: 'name',
|
||||
DatabaseAccountEndpoint: 'databaseAccountEndpoint',
|
||||
WritableLocations: "writableLocations",
|
||||
ReadableLocations: "readableLocations",
|
||||
Name: "name",
|
||||
DatabaseAccountEndpoint: "databaseAccountEndpoint",
|
||||
|
||||
// Client generated retry count response header
|
||||
ThrottleRetryCount: "x-ms-throttle-retry-count",
|
||||
|
@ -192,21 +166,21 @@ var Constants = {
|
|||
DefaultNumberHashPrecision: 3,
|
||||
DefaultNumberRangePrecision: -1,
|
||||
DefaultStringHashPrecision: 3,
|
||||
DefaultStringRangePrecision: -1
|
||||
DefaultStringRangePrecision: -1,
|
||||
},
|
||||
|
||||
ConsistentHashRing: {
|
||||
DefaultVirtualNodesPerCollection: 128
|
||||
DefaultVirtualNodesPerCollection: 128,
|
||||
},
|
||||
|
||||
RegularExpressions: {
|
||||
TrimLeftSlashes: new RegExp("^[/]+"),
|
||||
TrimRightSlashes: new RegExp("[/]+$"),
|
||||
IllegalResourceIdCharacters: new RegExp("[/\\\\?#]")
|
||||
IllegalResourceIdCharacters: new RegExp("[/\\\\?#]"),
|
||||
},
|
||||
|
||||
Quota: {
|
||||
CollectionSize: "collectionSize"
|
||||
CollectionSize: "collectionSize",
|
||||
},
|
||||
|
||||
Path: {
|
||||
|
@ -221,7 +195,7 @@ var Constants = {
|
|||
ConflictsPathSegment: "conflicts",
|
||||
AttachmentsPathSegment: "attachments",
|
||||
PartitionKeyRangesPathSegment: "pkranges",
|
||||
SchemasPathSegment: "schemas"
|
||||
SchemasPathSegment: "schemas",
|
||||
},
|
||||
|
||||
OperationTypes: {
|
||||
|
@ -233,10 +207,23 @@ var Constants = {
|
|||
Query: "query",
|
||||
},
|
||||
|
||||
PartitionKeyRange: {
|
||||
// Partition Key Range Constants
|
||||
MinInclusive: "minInclusive",
|
||||
MaxExclusive: "maxExclusive",
|
||||
Id: "id",
|
||||
},
|
||||
|
||||
QueryRangeConstants: {
|
||||
// Partition Key Range Constants
|
||||
MinInclusive: "minInclusive",
|
||||
MaxExclusive: "maxExclusive",
|
||||
min: "min",
|
||||
},
|
||||
|
||||
EffectiveParitionKeyConstants: {
|
||||
MinimumInclusiveEffectivePartitionKey: "",
|
||||
MaximumExclusiveEffectivePartitionKey: "FF",
|
||||
},
|
||||
|
||||
};
|
||||
|
||||
//SCRIPT END
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
module.exports = Constants;
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
import { Constants } from ".";
|
||||
|
||||
const Regexes = Constants.RegularExpressions;
|
||||
|
||||
export class Helper {
|
||||
public static isStringNullOrEmpty(inputString: string) {
|
||||
// checks whether string is null, undefined, empty or only contains space
|
||||
return !inputString || /^\s*$/.test(inputString);
|
||||
}
|
||||
|
||||
public static trimSlashFromLeftAndRight(inputString: string) {
|
||||
if (typeof inputString !== "string") {
|
||||
throw new Error("invalid input: input is not string");
|
||||
}
|
||||
|
||||
return inputString.replace(Regexes.TrimLeftSlashes, "").replace(Regexes.TrimRightSlashes, "");
|
||||
}
|
||||
|
||||
public static validateResourceId(resourceId: string) {
|
||||
// if resourceId is not a string or is empty throw an error
|
||||
if (typeof resourceId !== "string" || this.isStringNullOrEmpty(resourceId)) {
|
||||
throw new Error("Resource Id must be a string and cannot be undefined, null or empty");
|
||||
}
|
||||
|
||||
// if resourceId starts or ends with space throw an error
|
||||
if (resourceId[resourceId.length - 1] === " ") {
|
||||
throw new Error("Resource Id cannot end with space");
|
||||
}
|
||||
|
||||
// if resource id contains illegal characters throw an error
|
||||
if (Regexes.IllegalResourceIdCharacters.test(resourceId)) {
|
||||
throw new Error("Illegal characters ['/', '\\', '?', '#'] cannot be used in resourceId");
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
public static getResourceIdFromPath(resourcePath: string) {
|
||||
if (!resourcePath || typeof resourcePath !== "string") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const trimmedPath = this.trimSlashFromLeftAndRight(resourcePath);
|
||||
const pathSegments = trimmedPath.split("/");
|
||||
|
||||
// number of segments of a path must always be even
|
||||
if (pathSegments.length % 2 !== 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return pathSegments[pathSegments.length - 1];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
export * from "./constants";
|
||||
export * from "./helper";
|
||||
export * from "./statusCodes";
|
||||
export * from "./uriFactory";
|
||||
export * from "./resourceId";
|
||||
export * from "./platform";
|
|
@ -0,0 +1,45 @@
|
|||
import * as os from "os";
|
||||
import * as semaphore from "semaphore";
|
||||
import * as util from "util";
|
||||
import { Constants } from ".";
|
||||
|
||||
export class Platform {
|
||||
public static getPlatformDefaultHeaders(): { [key: string]: string } {
|
||||
const defaultHeaders: { [key: string]: string } = {};
|
||||
defaultHeaders[Constants.HttpHeaders.UserAgent] = Platform.getUserAgent();
|
||||
return defaultHeaders;
|
||||
}
|
||||
|
||||
public static getDecodedDataLength(encodedData: string): number {
|
||||
const buffer = new Buffer(encodedData, "base64");
|
||||
return buffer.length;
|
||||
}
|
||||
|
||||
public static getUserAgent() {
|
||||
// gets the user agent in the following format
|
||||
// "{OSName}/{OSVersion} Nodejs/{NodejsVersion} documentdb-nodejs-sdk/{SDKVersion}"
|
||||
// for example:
|
||||
// "linux/3.4.0+ Nodejs/v0.10.25 documentdb-nodejs-sdk/1.10.0"
|
||||
// "win32/10.0.14393 Nodejs/v4.4.7 documentdb-nodejs-sdk/1.10.0"
|
||||
const osName = Platform._getSafeUserAgentSegmentInfo(os.platform());
|
||||
const osVersion = Platform._getSafeUserAgentSegmentInfo(os.release());
|
||||
const nodejsVersion = Platform._getSafeUserAgentSegmentInfo(process.version);
|
||||
|
||||
const userAgent =
|
||||
`${osName}/${osVersion} Nodejs/${nodejsVersion} ${Constants.SDKName}/${Constants.SDKVersion}`;
|
||||
return userAgent;
|
||||
}
|
||||
|
||||
public static _getSafeUserAgentSegmentInfo(s: string) {
|
||||
// catch null, undefined, etc
|
||||
if (typeof (s) !== "string") {
|
||||
s = "unknown";
|
||||
}
|
||||
// remove all white spaces
|
||||
s = s.replace(/\s+/g, "");
|
||||
if (!s) {
|
||||
s = "unknown";
|
||||
}
|
||||
return s;
|
||||
}
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче