Node.js SDK 1.12.1 release
This commit is contained in:
Srinath Narayanan 2017-08-09 14:19:14 -07:00
Родитель 3aaf6f96c6
Коммит 5e450ba55a
30 изменённых файлов: 747 добавлений и 263 удалений

Просмотреть файл

@ -1,3 +1,5 @@
## Changes in 1.12.1 : ##
## Changes in 1.12.0 : ##
- Added support for Request Unit per Minute (RU/m) feature.
- Added support for a new consistency level called ConsistentPrefix.

Просмотреть файл

@ -28,9 +28,9 @@ var crypto = require("crypto");
var AuthHandler = {
getAuthorizationHeader: function (documentClient, verb, path, resourceId, resourceType, headers) {
if (documentClient.masterKey) {
return this.getAuthorizationTokenUsingMasterKey(verb, resourceId, resourceType, headers, documentClient.masterKey);
return encodeURIComponent(this.getAuthorizationTokenUsingMasterKey(verb, resourceId, resourceType, headers, documentClient.masterKey));
} else if (documentClient.resourceTokens) {
return this.getAuthorizationTokenUsingResourceTokens(documentClient.resourceTokens, path, resourceId);
return encodeURIComponent(this.getAuthorizationTokenUsingResourceTokens(documentClient.resourceTokens, path, resourceId));
}
},
@ -38,10 +38,10 @@ var AuthHandler = {
var key = new Buffer(masterKey, "base64");
var text = (verb || "").toLowerCase() + "\n" +
(resourceType || "").toLowerCase() + "\n" +
(resourceId || "") + "\n" +
(headers["x-ms-date"] || "").toLowerCase() + "\n" +
(headers["date"] || "").toLowerCase() + "\n";
(resourceType || "").toLowerCase() + "\n" +
(resourceId || "") + "\n" +
(headers["x-ms-date"] || "").toLowerCase() + "\n" +
(headers["date"] || "").toLowerCase() + "\n";
var body = new Buffer(text, "utf8");
@ -61,23 +61,34 @@ var AuthHandler = {
if (!path && !resourceId) {
return resourceTokens[Object.keys(resourceTokens)[0]];
}
if (resourceTokens[resourceId]) {
if (resourceId && resourceTokens[resourceId]) {
return resourceTokens[resourceId];
} else {
var pathParts = path && path.split("/") || [];
var resourceTypes = ["dbs", "colls", "docs", "sprocs", "udfs", "triggers", "users", "permissions", "attachments", "media", "conflicts", "offers"];
// Get the last resource id from the path and get it's token from resourceTokens
for (var i = pathParts.length - 1; i >= 0; i--) {
if (resourceTypes.indexOf(pathParts[i]) === -1) {
if (resourceTokens[pathParts[i]]) {
return resourceTokens[pathParts[i]];
}
}
}
//minimum valid path /dbs
if (!path || path.length < 4) {
return null;
}
//remove '/' from left and right of path
path = path[0] == '/' ? path.substring(1) : path;
path = path[path.length - 1] == '/' ? path.substring(0, path.length - 1) : path;
var pathSegments = (path && path.split("/")) || [];
//if it's an incomplete path like /dbs/db1/colls/, start from the paretn resource
var index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
for (; index > 0; index -= 2) {
var id = decodeURI(pathSegments[index]);
if (resourceTokens[id]) {
return resourceTokens[id];
}
}
}
return null;
}
};
if (typeof exports !== "undefined") {

Просмотреть файл

@ -190,7 +190,15 @@ var Base = {
return result;
},
/** @ignore */
jsonStringifyAndEscapeNonASCII: function (arg) {
// escapes non-ASCII characters as \uXXXX
return JSON.stringify(arg).replace(/[\u0080-\uFFFF]/g, function(m) {
return "\\u" + ("0000" + m.charCodeAt(0).toString(16)).slice(-4);
});
},
getHeaders: function (documentClient, defaultHeaders, verb, path, resourceId, resourceType, options, partitionKeyRangeId) {
var headers = Base.extend({}, defaultHeaders);
@ -269,8 +277,7 @@ var Base = {
if (partitionKey === null || partitionKey.constructor !== Array) {
partitionKey = [partitionKey];
}
headers[Constants.HttpHeaders.PartitionKey] = JSON.stringify(partitionKey);
headers[Constants.HttpHeaders.PartitionKey] = this.jsonStringifyAndEscapeNonASCII(partitionKey);
}
}
@ -279,7 +286,7 @@ var Base = {
}
if (documentClient.masterKey || documentClient.resourceTokens) {
headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(AuthHandler.getAuthorizationHeader(documentClient, verb, path, resourceId, resourceType, headers));
headers[Constants.HttpHeaders.Authorization] = AuthHandler.getAuthorizationHeader(documentClient, verb, path, resourceId, resourceType, headers);
}
if (verb === "post" || verb === "put") {
@ -509,8 +516,8 @@ var Base = {
}
if (!firstId) return false;
if (firstId.length !== 8) return true;
var buffer = new Buffer(firstId, "base64");
if (buffer.length !== 4) return true;
var decodedDataLength = Platform.getDecodedDataLength(firstId);
if (decodedDataLength !== 4) return true;
return false;
},
/** @ignore */
@ -541,10 +548,6 @@ var Base = {
return true;
},
/** @ignore */
_getUserAgent: function () {
return Platform.getUserAgent();
}
};
//SCRIPT END

Просмотреть файл

@ -179,7 +179,7 @@ var Constants = {
CurrentVersion: "2017-01-19",
SDKName: "documentdb-nodejs-sdk",
SDKVersion: "1.12.0",
SDKVersion: "1.12.1",
DefaultPrecisions: {
DefaultNumberHashPrecision: 3,

Просмотреть файл

@ -24,12 +24,15 @@ SOFTWARE.
"use strict";
var Base = require("./base")
, AzureDocuments = require("./documents")
, QueryIterator = require("./queryIterator")
, RequestHandler = require("./request")
, RetryOptions = require("./retryOptions")
, GlobalEndpointManager = require("./globalEndpointManager")
, Constants = require("./constants");
, AzureDocuments = require("./documents")
, QueryIterator = require("./queryIterator")
, RequestHandler = require("./request")
, RetryOptions = require("./retryOptions")
, GlobalEndpointManager = require("./globalEndpointManager")
, Constants = require("./constants")
, Helper = require("./helper").Helper
, util = require("util")
, Platform = require("./platform");
//SCRIPT START
var DocumentClient = Base.defineClass(
@ -53,9 +56,12 @@ var DocumentClient = Base.defineClass(
if (auth.permissionFeed) {
this.resourceTokens = {};
for (var i = 0; i < auth.permissionFeed.length; i++) {
var resourceParts = auth.permissionFeed[i].resource.split("/");
var rid = resourceParts[resourceParts.length - 1];
this.resourceTokens[rid] = auth.permissionFeed[i]._token;
var resourceId = Helper.getResourceIdFromPath(auth.permissionFeed[i].resource);
if (!resourceId) {
throw new Error("authorization error: " + resourceId + "is an invalid resourceId in permissionFeed");
}
this.resourceTokens[resourceId] = auth.permissionFeed[i]._token;
}
}
}
@ -68,7 +74,12 @@ var DocumentClient = Base.defineClass(
this.defaultHeaders[Constants.HttpHeaders.ConsistencyLevel] = consistencyLevel;
}
this.defaultHeaders[Constants.HttpHeaders.UserAgent] = Base._getUserAgent();
var platformDefaultHeaders = Platform.getPlatformDefaultHeaders() || {};
for (var platformDefaultHeader in platformDefaultHeaders){
this.defaultHeaders[platformDefaultHeader] = platformDefaultHeaders[platformDefaultHeader];
}
this.defaultHeaders[Constants.HttpHeaders.UserAgent] = Platform.getUserAgent();
// overide this for default query params to be added to the url.
this.defaultUrlParams = "";
@ -427,6 +438,7 @@ var DocumentClient = Base.defineClass(
callback = optionsCallbackTuple.callback;
var initialHeaders = Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
// Add required headers slug and content-type.
if (options.slug) {
@ -1356,7 +1368,7 @@ var DocumentClient = Base.defineClass(
that.replace(newDocument, path, "docs", id, undefined, options, callback);
};
if (options.partitionKey === undefined) {
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
this.getPartitionKeyDefinition(Base.getCollectionLink(documentLink), function (err, partitionKeyDefinition, response, headers) {
if (err) return callback(err, response, headers);
options.partitionKey = that.extractPartitionKey(newDocument, partitionKeyDefinition);
@ -1806,6 +1818,7 @@ var DocumentClient = Base.defineClass(
callback = optionsCallbackTuple.callback;
var initialHeaders = Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
// Add required headers slug and content-type.
if (options.slug) {
@ -1865,6 +1878,7 @@ var DocumentClient = Base.defineClass(
var defaultHeaders = this.defaultHeaders;
var initialHeaders = Base.extend({}, defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
// Add required headers slug and content-type in case the body is a stream
if (options.slug) {
@ -1914,6 +1928,7 @@ var DocumentClient = Base.defineClass(
var defaultHeaders = this.defaultHeaders;
var initialHeaders = {};
initialHeaders = Base.extend(initialHeaders, defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
// Accept a single parameter or an array of parameters.
if (params !== null && params !== undefined && params.constructor !== Array) {
@ -2061,7 +2076,7 @@ var DocumentClient = Base.defineClass(
that.create(body, path, "docs", id, undefined, options, callback);
};
if (options.partitionKey === undefined) {
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
this.getPartitionKeyDefinition(collectionLink, function (err, partitionKeyDefinition, response, headers) {
if (err) return callback(err, response, headers);
options.partitionKey = that.extractPartitionKey(body, partitionKeyDefinition);
@ -2101,7 +2116,7 @@ var DocumentClient = Base.defineClass(
that.upsert(body, path, "docs", id, undefined, options, callback);
};
if (options.partitionKey === undefined) {
if (options.partitionKey === undefined && options.skipGetPartitionKeyDefinition !== true) {
this.getPartitionKeyDefinition(collectionLink, function (err, partitionKeyDefinition, response, headers) {
if (err) return callback(err, response, headers);
options.partitionKey = that.extractPartitionKey(body, partitionKeyDefinition);
@ -2142,7 +2157,8 @@ var DocumentClient = Base.defineClass(
/** @ignore */
create: function (body, path, type, id, initialHeaders, options, callback) {
initialHeaders = initialHeaders || this.defaultHeaders;
initialHeaders = initialHeaders || Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
var headers = Base.getHeaders(this, initialHeaders, "post", path, id, type, options);
var that = this;
@ -2154,7 +2170,8 @@ var DocumentClient = Base.defineClass(
/** @ignore */
upsert: function (body, path, type, id, initialHeaders, options, callback) {
initialHeaders = initialHeaders || this.defaultHeaders;
initialHeaders = initialHeaders || Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
var headers = Base.getHeaders(this, initialHeaders, "post", path, id, type, options);
this.setIsUpsertHeader(headers);
@ -2167,7 +2184,8 @@ var DocumentClient = Base.defineClass(
/** @ignore */
replace: function (resource, path, type, id, initialHeaders, options, callback) {
initialHeaders = initialHeaders || this.defaultHeaders;
initialHeaders = initialHeaders || Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
var headers = Base.getHeaders(this, initialHeaders, "put", path, id, type, options);
var that = this;
@ -2179,7 +2197,8 @@ var DocumentClient = Base.defineClass(
/** @ignore */
read: function (path, type, id, initialHeaders, options, callback) {
initialHeaders = initialHeaders || this.defaultHeaders;
initialHeaders = initialHeaders || Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
var headers = Base.getHeaders(this, initialHeaders, "get", path, id, type, options);
var that = this;
@ -2191,7 +2210,8 @@ var DocumentClient = Base.defineClass(
/** @ignore */
deleteResource: function (path, type, id, initialHeaders, options, callback) {
initialHeaders = initialHeaders || this.defaultHeaders;
initialHeaders = initialHeaders || Base.extend({}, this.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
var headers = Base.getHeaders(this, initialHeaders, "delete", path, id, type, options);
var that = this;
@ -2297,6 +2317,7 @@ var DocumentClient = Base.defineClass(
// Query operations will use ReadEndpoint even though it uses GET(for queryFeed) and POST(for regular query operations)
this._globalEndpointManager.getReadEndpoint(function (readEndpoint) {
var initialHeaders = Base.extend({}, documentclient.defaultHeaders);
initialHeaders = Base.extend(initialHeaders, options && options.initialHeaders);
if (query === undefined) {
var headers = Base.getHeaders(documentclient, initialHeaders, "get", path, id, type, options, partitionKeyRangeId);
@ -2391,7 +2412,7 @@ var DocumentClient = Base.defineClass(
} else {
return {
valid: false,
error: new Error(this.sprintf("The partition resolver does not implement method %s. The type of %s is \"%s\"", functionName, functionName, typeof partionResolver[functionName]))
error: new Error(util.format("The partition resolver does not implement method %s. The type of %s is \"%s\"", functionName, functionName, typeof partionResolver[functionName]))
};
}
},
@ -2429,7 +2450,7 @@ var DocumentClient = Base.defineClass(
}
if (!(headers instanceof Object)) {
throw new Error(this.sprintf('The "headers" parameter must be an instance of "Object". Actual type is: "%s".', typeof headers));
throw new Error(util.format('The "headers" parameter must be an instance of "Object". Actual type is: "%s".', typeof headers));
}
headers[Constants.HttpHeaders.IsUpsert] = true;
@ -2446,30 +2467,19 @@ var DocumentClient = Base.defineClass(
callback = optionsIn;
options = new Object();
} else if (typeof optionsIn !== 'object') {
throw new Error(this.sprintf('The "options" parameter must be of type "object". Actual type is: "%s".', typeof optionsIn));
throw new Error(util.format('The "options" parameter must be of type "object". Actual type is: "%s".', typeof optionsIn));
} else {
options = optionsIn;
}
// callback
if (callbackIn !== undefined && typeof callbackIn !== 'function') {
throw new Error(this.sprintf('The "callback" parameter must be of type "function". Actual type is: "%s".', typeof callbackIn));
throw new Error(util.format('The "callback" parameter must be of type "function". Actual type is: "%s".', typeof callbackIn));
} else if (typeof callbackIn === 'function') {
callback = callbackIn
}
return { options: options, callback: callback };
},
/** @ignore */
// Like C sprintf, currently only works for %s and %%.
sprintf: function (format) {
var args = arguments;
var i = 1;
return format.replace(/%((%)|s)/g, function (matchStr, subMatch1, subMatch2) {
// In case of %% subMatch2 would be '%'.
return subMatch2 || args[i++];
});
}
}
);
@ -2478,29 +2488,37 @@ var DocumentClient = Base.defineClass(
/**
* The request options
* @typedef {Object} RequestOptions - Options that can be specified for a requested issued to the DocumentDB servers.
* @property {string} [preTriggerInclude] - Indicates what is the pre trigger to be invoked before the operation.
* @property {string} [postTriggerInclude] - Indicates what is the post trigger to be invoked after the operation.
* @property {object} [accessCondition] - Conditions Associated with the request.
* @property {string} accessCondition.type - Conditional HTTP method header type (IfMatch or IfNoneMatch).
* @property {string} accessCondition.condition - Conditional HTTP method header value (the _etag field from the last version you read).
* @property {string} [indexingDirective] - Specifies indexing directives (index, do not index .. etc).
* @property {string} [consistencyLevel] - Consistency level required by the client.
* @property {string} [sessionToken] - Token for use with Session consistency.
* @property {number} [resourceTokenExpirySeconds] - Expiry time (in seconds) for resource token associated with permission (applicable only for requests on permissions).
* @property {string} [offerType] - Offer type when creating document collections.
* @property {boolean} [offerEnableRUPerMinuteThroughput] - Represents Request Units(RU)/Minute throughput is enabled/disabled for a collection in the Azure DocumentDB database service.
* <p>This option is only valid when creating a document collection.</p>
* @property {boolean} [disableRUPerMinuteUsage] - DisableRUPerMinuteUsage is used to enable/disable Request Units(RUs)/minute capacity to serve the request if regular provisioned RUs/second is exhausted.
* @property {boolean} [enableScriptLogging] - Enables or disables logging in JavaScript stored procedures.
* @property {string} [indexingDirective] - Specifies indexing directives (index, do not index .. etc).
* @property {boolean} [offerEnableRUPerMinuteThroughput] - Represents Request Units(RU)/Minute throughput is enabled/disabled for a collection in the Azure DocumentDB database service.
* @property {number} [offerThroughput] - The offer throughput provisioned for a collection in measurement of Requests-per-Unit in the Azure DocumentDB database service.
* @property {string} [offerType] - Offer type when creating document collections.
* <p>This option is only valid when creating a document collection.</p>
* @property {string} [partitionKey] - Specifies a partition key definition for a particular path in the Azure DocumentDB database service.
* @property {boolean} [populateQuotaInfo] - Enables/disables getting document collection quota related stats for document collection read requests.
* @property {string} [postTriggerInclude] - Indicates what is the post trigger to be invoked after the operation.
* @property {string} [preTriggerInclude] - Indicates what is the pre trigger to be invoked before the operation.
* @property {number} [resourceTokenExpirySeconds] - Expiry time (in seconds) for resource token associated with permission (applicable only for requests on permissions).
* @property {string} [sessionToken] - Token for use with Session consistency.
*/
/**
* The feed options
* @typedef {Object} FeedOptions - The feed options and query methods.
* @property {number} [maxItemCount] - Max number of items to be returned in the enumeration operation.
* @property {string} [continuation] - Opaque token for continuing the enumeration.
* @property {string} [sessionToken] - Token for use with Session consistency.
* @property {boolean} [EnableScanInQuery] - Allow scan on the queries which couldn't be served as indexing was opted out on the requested paths.
* @property {boolean} [disableRUPerMinuteUsage] - DisableRUPerMinuteUsage is used to enable/disable Request Units(RUs)/minute capacity to serve the request if regular provisioned RUs/second is exhausted.
* @typedef {Object} FeedOptions - The feed options and query methods.
* @property {string} [continuation] - Opaque token for continuing the enumeration.
* @property {boolean} [disableRUPerMinuteUsage] - DisableRUPerMinuteUsage is used to enable/disable Request Units(RUs)/minute capacity to serve the request if regular provisioned RUs/second is exhausted.
* @property {boolean} [enableCrossPartitionQuery] - A value indicating whether users are enabled to send more than one request to execute the query in the Azure DocumentDB database service.
<p>More than one request is necessary if the query is not scoped to single partition key value.</p>
* @property {boolean} [enableScanInQuery] - Allow scan on the queries which couldn't be served as indexing was opted out on the requested paths.
* @property {number} [maxDegreeOfParallelism] - The maximum number of concurrent operations that run client side during parallel query execution in the Azure DocumentDB database service. Negative values make the system automatically decides the number of concurrent operations to run.
* @property {number} [maxItemCount] - Max number of items to be returned in the enumeration operation.
* @property {string} [partitionKey] - Specifies a partition key definition for a particular path in the Azure DocumentDB database service.
* @property {string} [sessionToken] - Token for use with Session consistency.
*/
/**

Просмотреть файл

@ -26,6 +26,7 @@ SOFTWARE.
var Base = require("../base");
var MurmurHash = require('./murmurHash.js').MurmurHash;
//SCRIPT START
var ConsistentHashRing = Base.defineClass(
/**
* Initializes a new instance of the ConsistentHashRing

Просмотреть файл

@ -26,6 +26,7 @@ SOFTWARE.
var Base = require('../base');
var ConsistentHashRing = require('./consistentHashRing.js').ConsistentHashRing;
//SCRIPT START
var HashPartitionResolver = Base.defineClass(
/**
* HashPartitionResolver implements partitioning based on the value of a hash function,

Просмотреть файл

@ -25,6 +25,7 @@ SOFTWARE.
var Base = require("../base");
//SCRIPT START
var MurmurHash = Base.defineClass(
undefined,
undefined,

Просмотреть файл

@ -24,7 +24,10 @@ SOFTWARE.
"use strict"
var Base = require("./base"),
Regexes = require("./constants").RegularExpressions;
Constants = require("./constants");
var Regexes = Constants.RegularExpressions,
ResourceTypes = Constants.ResourceTypes;
//SCRIPT START
@ -69,6 +72,22 @@ var Helper = Base.defineClass(
return true;
},
getResourceIdFromPath: function(resourcePath) {
if (!resourcePath || typeof resourcePath !== "string") {
return null;
}
var trimmedPath = this.trimSlashFromLeftAndRight(resourcePath);
var pathSegments = trimmedPath.split('/');
//number of segments of a path must always be even
if (pathSegments.length % 2 !== 0) {
return null;
}
return pathSegments[pathSegments.length - 1];
}
}

Просмотреть файл

@ -26,9 +26,19 @@ SOFTWARE.
var Constants = require("./constants");
var os = require("os");
var util = require("util");
var semaphore = require("semaphore");
var Platform = {
/** @ignore */
getPlatformDefaultHeaders: function () {
var defaultHeaders = {};
defaultHeaders[Constants.HttpHeaders.UserAgent] = Platform.getUserAgent();
return defaultHeaders;
},
/** @ignore */
getDecodedDataLength: function (encodedData) {
var buffer = new Buffer(encodedData, "base64");
return buffer.length;
},
/** @ignore */
getUserAgent: function () {
// gets the user agent in the following format

Просмотреть файл

@ -25,8 +25,10 @@ SOFTWARE.
var Base = require("../base")
, DocumentProducer = require("./documentProducer")
, OrderByDocumentProducerComparator = DocumentProducer.OrderByDocumentProducerComparator;
//SCRIPT START
var AverageAggregator = Base.defineClass(
/**
@ -112,7 +114,7 @@ var MinAggregator = Base.defineClass(
*/
function () {
this.value = undefined;
this.comparer = new DocumentProducer.OrderByDocumentProducerComparator("Ascending");
this.comparer = new OrderByDocumentProducerComparator("Ascending");
},
{
/**
@ -154,7 +156,7 @@ var MaxAggregator = Base.defineClass(
*/
function () {
this.value = undefined;
this.comparer = new DocumentProducer.OrderByDocumentProducerComparator("Ascending");
this.comparer = new OrderByDocumentProducerComparator("Ascending");
},
{
/**
@ -223,7 +225,7 @@ var SumAggregator = Base.defineClass(
}
);
//SCRIPT END
//SCRIPT END
if (typeof exports !== "undefined") {
exports.AverageAggregator = AverageAggregator;
@ -231,4 +233,4 @@ if (typeof exports !== "undefined") {
exports.MinAggregator = MinAggregator;
exports.MaxAggregator = MaxAggregator;
exports.SumAggregator = SumAggregator;
}
}

Просмотреть файл

@ -241,7 +241,7 @@ var OrderByDocumentProducerComparator = Base.defineClass(
function (sortOrder) {
this.sortOrder = sortOrder;
this.targetPartitionKeyRangeDocProdComparator = new DocumentProducer.createTargetPartitionKeyRangeComparator();
this.targetPartitionKeyRangeDocProdComparator = DocumentProducer.createTargetPartitionKeyRangeComparator();
this._typeOrdComparator = Object.freeze({
NoValue: {

Просмотреть файл

@ -26,6 +26,12 @@ SOFTWARE.
var Base = require("../base")
, aggregators = require('./aggregators');
var AverageAggregator = aggregators.AverageAggregator
, CountAggregator = aggregators.CountAggregator
, MaxAggregator = aggregators.MaxAggregator
, MinAggregator = aggregators.MinAggregator
, SumAggregator = aggregators.SumAggregator;
//SCRIPT START
var OrderByEndpointComponent = Base.defineClass(

Просмотреть файл

@ -28,19 +28,19 @@ var Base = require("../base")
, DefaultQueryExecutionContext = require("./defaultQueryExecutionContext")
, PriorityQueue = require("priorityqueuejs")
, SmartRoutingMapProvider = require("../routing/smartRoutingMapProvider")
, CollectionRoutingMap = require("../routing/inMemoryCollectionRoutingMap")
, InMemoryCollectionRoutingMap = require("../routing/inMemoryCollectionRoutingMap")
, DocumentProducer = require("./documentProducer")
, QueryExecutionInfoParser = require("./partitionedQueryExecutionContextInfoParser")
, PartitionedQueryExecutionContextInfoParser = require("./partitionedQueryExecutionContextInfoParser")
, bs = require("binary-search-bounds")
, HeaderUtils = require("./headerUtils")
, semaphore = require("semaphore")
, assert = require('assert');
var QueryRange = CollectionRoutingMap.QueryRange;
var FormatPlaceHolder = "{documentdb-formattableorderbyquery-filter}";
var PartitionKeyRangeConstants = CollectionRoutingMap._PartitionKeyRange;
var QueryRange = InMemoryCollectionRoutingMap.QueryRange;
var _PartitionKeyRange = InMemoryCollectionRoutingMap._PartitionKeyRange;
//SCRIPT START
var ParallelQueryExecutionContext = Base.defineClass(
/**
* Provides the ParallelQueryExecutionContext.
@ -56,7 +56,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
* @param {object} partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo
* @ignore
*/
function (documentclient, collectionLink, query, options, partitionedQueryExecutionInfo) {
function (documentclient, collectionLink, query, options, partitionedQueryExecutionInfo) {
this.documentclient = documentclient;
this.collectionLink = collectionLink;
this.query = query;
@ -65,7 +65,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
this.err = undefined;
this.state = ParallelQueryExecutionContext.STATES.start;
this.routingProvider = new SmartRoutingMapProvider(this.documentclient);
this.sortOrders = QueryExecutionInfoParser.parseOrderBy(this.paritionedQueryExecutionInfo);
this.sortOrders = PartitionedQueryExecutionContextInfoParser.parseOrderBy(this.paritionedQueryExecutionInfo);
if (Array.isArray(this.sortOrders) && this.sortOrders.length > 0) {
this.documentProducerComparator = DocumentProducer.createOrderByComparator(this.sortOrders);
@ -83,7 +83,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
this.orderByPQ = new PriorityQueue(function (a, b) { return that.documentProducerComparator(b, a); });
this.state = ParallelQueryExecutionContext.STATES.started;
this.sem = require('semaphore')(1);
this.sem = new semaphore(1);
this.requestContinuation = options ? options.continuation : null;
@ -93,13 +93,14 @@ var ParallelQueryExecutionContext = Base.defineClass(
var createDocumentProducersAndFillUpPriorityQueueFunc = function () {
// ensure the lock is released after finishing up
that._onTargetPartitionRanges(function (err, targetPartitionRanges) {
that.waitingForInternalExcecutionContexts = targetPartitionRanges.length;
if (err) {
that.err = err;
// relase the lock
that.sem.leave();
return;
}
that.waitingForInternalExcecutionContexts = targetPartitionRanges.length;
var maxDegreeOfParallelism = options.maxDegreeOfParallelism || 1;
if (maxDegreeOfParallelism > 0) {
@ -107,7 +108,8 @@ var ParallelQueryExecutionContext = Base.defineClass(
} else {
maxDegreeOfParallelism = targetPartitionRanges.length;
}
var parallelismSem = require('semaphore')(Math.max(maxDegreeOfParallelism, 1));
var parallelismSem = semaphore(Math.max(maxDegreeOfParallelism, 1));
var targetPartitionQueryExecutionContextList = [];
@ -186,12 +188,12 @@ var ParallelQueryExecutionContext = Base.defineClass(
partitionKeyRanges) {
var startRange = {};
startRange[PartitionKeyRangeConstants.MinInclusive] = suppliedCompositeContinuationToken.range.min;
startRange[PartitionKeyRangeConstants.MaxExclusive] = suppliedCompositeContinuationToken.range.max;
startRange[_PartitionKeyRange.MinInclusive] = suppliedCompositeContinuationToken.range.min;
startRange[_PartitionKeyRange.MaxExclusive] = suppliedCompositeContinuationToken.range.max;
var vbCompareFunction = function (x, y) {
if (x[PartitionKeyRangeConstants.MinInclusive] > y[PartitionKeyRangeConstants.MinInclusive]) return 1;
if (x[PartitionKeyRangeConstants.MinInclusive] < y[PartitionKeyRangeConstants.MinInclusive]) return -1;
if (x[_PartitionKeyRange.MinInclusive] > y[_PartitionKeyRange.MinInclusive]) return 1;
if (x[_PartitionKeyRange.MinInclusive] < y[_PartitionKeyRange.MinInclusive]) return -1;
return 0;
}
@ -338,7 +340,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
* @instance
* @param {callback} callback - Function to execute for the current element. the function takes two parameters error, element.
*/
current: function (callback) {
current: function (callback) {
if (this.err) {
return callback(this.err, undefined, that._getAndResetActiveResponseHeaders());
}
@ -366,7 +368,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
* @instance
* @returns {Boolean} true if there is other elements to process in the ParallelQueryExecutionContext.
*/
hasMoreResults: function () {
hasMoreResults: function () {
return !(this.state === ParallelQueryExecutionContext.STATES.ended || this.err !== undefined);
},
@ -451,8 +453,8 @@ var ParallelQueryExecutionContext = Base.defineClass(
}
var min = documentProducer.targetPartitionKeyRange[PartitionKeyRangeConstants.MinInclusive];
var max = documentProducer.targetPartitionKeyRange[PartitionKeyRangeConstants.MaxExclusive];
var min = documentProducer.targetPartitionKeyRange[_PartitionKeyRange.MinInclusive];
var max = documentProducer.targetPartitionKeyRange[_PartitionKeyRange.MaxExclusive];
var range = {
'min': min,
'max': max,
@ -555,11 +557,13 @@ var ParallelQueryExecutionContext = Base.defineClass(
_createTargetPartitionQueryExecutionContext: function (partitionKeyTargetRange, continuationToken) {
// creates target partition range Query Execution Context
var rewrittenQuery = QueryExecutionInfoParser.parseRewrittenQuery(this.paritionedQueryExecutionInfo);
var rewrittenQuery = PartitionedQueryExecutionContextInfoParser.parseRewrittenQuery(this.paritionedQueryExecutionInfo);
var query = this.query;
if (typeof (query) === 'string') {
query = { 'query': query };
}
var FormatPlaceHolder = "{documentdb-formattableorderbyquery-filter}";
if (rewrittenQuery) {
query = JSON.parse(JSON.stringify(query));
// We hardcode the formattable filter to true for now
@ -579,7 +583,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
_onTargetPartitionRanges: function (callback) {
// invokes the callback when the target partition ranges are ready
var parsedRanges = QueryExecutionInfoParser.parseQueryRanges(this.paritionedQueryExecutionInfo);
var parsedRanges = PartitionedQueryExecutionContextInfoParser.parseQueryRanges(this.paritionedQueryExecutionInfo);
var queryRanges = parsedRanges.map(function (item) { return QueryRange.parseFromDict(item); });
return this.routingProvider.getOverlappingRanges(callback, this.collectionLink, queryRanges);
},
@ -590,6 +594,7 @@ var ParallelQueryExecutionContext = Base.defineClass(
}
);
//SCRIPT END
if (typeof exports !== "undefined") {

Просмотреть файл

@ -60,7 +60,7 @@ var PartitionedQueryExecutionContextInfoParser = Base.defineClass(
if (typeof (path) === 'string') {
return item[path];
}
assert(Array.isArray(path),
assert.ok(Array.isArray(path),
util.format("%s is expected to be an array", JSON.stringify(path)));
for (var index = 0; index < path.length; index++) {
item = item[path[index]];

Просмотреть файл

@ -27,9 +27,14 @@ var Base = require("../base")
, DefaultQueryExecutionContext = require("./defaultQueryExecutionContext")
, endpointComponent = require('./endpointComponent')
, assert = require("assert")
, QueryExecutionInfoParser = require("./partitionedQueryExecutionContextInfoParser")
, PartitionedQueryExecutionContextInfoParser = require("./partitionedQueryExecutionContextInfoParser")
, HeaderUtils = require("./headerUtils");
var AggregateEndpointComponent = endpointComponent.AggregateEndpointComponent
, OrderByEndpointComponent = endpointComponent.OrderByEndpointComponent
, TopEndpointComponent = endpointComponent.TopEndpointComponent;
//SCRIPT START
var PipelinedQueryExecutionContext = Base.defineClass(
/**
@ -49,19 +54,19 @@ var PipelinedQueryExecutionContext = Base.defineClass(
if (this.pageSize === undefined) {
this.pageSize = PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE;
}
var orderBy = QueryExecutionInfoParser.parseOrderBy(partitionedQueryExecutionInfo);
var orderBy = PartitionedQueryExecutionContextInfoParser.parseOrderBy(partitionedQueryExecutionInfo);
if (Array.isArray(orderBy) && orderBy.length > 0) {
this.endpoint = new endpointComponent.OrderByEndpointComponent(this.endpoint);
this.endpoint = new OrderByEndpointComponent(this.endpoint);
}
var aggregates = QueryExecutionInfoParser.parseAggregates(partitionedQueryExecutionInfo);
var aggregates = PartitionedQueryExecutionContextInfoParser.parseAggregates(partitionedQueryExecutionInfo);
if (Array.isArray(aggregates) && aggregates.length > 0) {
this.endpoint = new endpointComponent.AggregateEndpointComponent(this.endpoint, aggregates);
this.endpoint = new AggregateEndpointComponent(this.endpoint, aggregates);
}
var top = QueryExecutionInfoParser.parseTop(partitionedQueryExecutionInfo);
var top = PartitionedQueryExecutionContextInfoParser.parseTop(partitionedQueryExecutionInfo);
if (typeof (top) === 'number') {
this.endpoint = new endpointComponent.TopEndpointComponent(this.endpoint, top);
this.endpoint = new TopEndpointComponent(this.endpoint, top);
}
},
{

Просмотреть файл

@ -83,7 +83,7 @@ var ProxyQueryExecutionContext = Base.defineClass(
assert.notStrictEqual(this.resourceLink, undefined, "for top/orderby resourceLink is required.");
assert(!Array.isArray(this.resourceLink) || this.resourceLink.length === 1,
assert.ok(!Array.isArray(this.resourceLink) || this.resourceLink.length === 1,
"for top/orderby exactly one collectionLink is required");
var collectionLink = undefined;

Просмотреть файл

@ -25,7 +25,7 @@ SOFTWARE.
var Base = require("./base"),
Constants = require("./constants"),
QueryExecutionContext = require("./queryExecutionContext/proxyQueryExecutionContext");
ProxyQueryExecutionContext = require("./queryExecutionContext/proxyQueryExecutionContext");
//SCRIPT START
var QueryIterator = Base.defineClass(
@ -47,7 +47,6 @@ var QueryIterator = Base.defineClass(
this.options = options;
this.resourceLink = resourceLink;
this.queryExecutionContext = this._createQueryExecutionContext();
},
{
/**
@ -63,12 +62,12 @@ var QueryIterator = Base.defineClass(
this._forEachImplementation(callback);
},
/**
* Execute a provided function on the next element in the QueryIterator.
* @memberof QueryIterator
* @instance
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
*/
/**
* Execute a provided function on the next element in the QueryIterator.
* @memberof QueryIterator
* @instance
* @param {callback} callback - Function to execute for each element. the function takes two parameters error, element.
*/
nextItem: function (callback) {
this.queryExecutionContext.nextItem(callback);
},
@ -115,7 +114,7 @@ var QueryIterator = Base.defineClass(
*/
executeNext: function(callback) {
this.queryExecutionContext.fetchMore(function(err, resources, responseHeaders) {
if(err) {
if (err) {
return callback(err, undefined, responseHeaders);
}
@ -133,7 +132,7 @@ var QueryIterator = Base.defineClass(
},
/** @ignore */
_toArrayImplementation: function(callback){
_toArrayImplementation: function(callback) {
var that = this;
this.queryExecutionContext.nextItem(function (err, resource, headers) {
@ -145,13 +144,16 @@ var QueryIterator = Base.defineClass(
that.toArrayLastResHeaders = headers;
if (resource === undefined) {
// no more results
return callback(undefined, that.toArrayTempResources, that.toArrayLastResHeaders);
}
}
that.toArrayTempResources = that.toArrayTempResources.concat(resource);
that._toArrayImplementation(callback);
that.toArrayTempResources.push(resource);
setImmediate(function () {
that._toArrayImplementation(callback);
});
});
},
@ -174,13 +176,15 @@ var QueryIterator = Base.defineClass(
}
// recursively call itself to iterate to the remaining elements
that._forEachImplementation(callback);
setImmediate(function () {
that._forEachImplementation(callback);
});
});
},
/** @ignore */
_createQueryExecutionContext: function () {
return new QueryExecutionContext(this.documentclient, this.query, this.options, this.fetchFunctions, this.resourceLink);
return new ProxyQueryExecutionContext(this.documentclient, this.query, this.options, this.fetchFunctions, this.resourceLink);
}
}
);

Просмотреть файл

@ -116,25 +116,6 @@ var Range = Base.defineClass(
return false;
},
/** @ignore */
_toArrayImplementation: function(callback){
var that = this;
if (this._canFetchMore()) {
this._fetchMore(function(err, resources, headers){
if(err) {
return callback(err, undefined, headers);
}
that.resHeaders = headers;
that.resources = that.resources.concat(resources);
that._toArrayImplementation(callback);
});
} else {
this._state = this._states.ended;
callback(undefined, this.resources, this.resHeaders);
}
},
/** @ignore */
_toString: function () {
return String(this.low) + "," + String(this.high);

Просмотреть файл

@ -24,23 +24,31 @@ SOFTWARE.
"use strict";
var Documents = require("./documents")
, Constants = require("./constants")
, https = require("https")
, url = require("url")
, querystring = require("querystring")
, RetryUtility = require("./retryUtility")
// Dedicated Agent for socket pooling
, keepAliveAgent = new https.Agent({ keepAlive: true, maxSockets: Infinity });
, Constants = require("./constants")
, https = require("https")
, url = require("url")
, querystring = require("querystring")
, RetryUtility = require("./retryUtility")
// Dedicated Agent for socket pooling
, keepAliveAgent = new https.Agent({ keepAlive: true, maxSockets: Infinity });
//----------------------------------------------------------------------------
// Utility methods
//
function javaScriptFriendlyJSONStringify(s) {
// two line terminators (Line separator and Paragraph separator) are not needed to be escaped in JSON
// but are needed to be escaped in JavaScript.
return JSON.stringify(s).
replace(/\u2028/g, '\\u2028').
replace(/\u2029/g, '\\u2029');
}
function bodyFromData(data) {
if (data.pipe) return data;
if (Buffer.isBuffer(data)) return data;
if (typeof data === "string") return data;
if (typeof data === "object") return JSON.stringify(data);
if (typeof data === "object") return javaScriptFriendlyJSONStringify(data);
return undefined;
}
@ -51,7 +59,7 @@ function createRequestObject(connectionPolicy, requestOptions, callback){
httpsRequest.abort();
}
var isMedia = ( requestOptions.path.indexOf("media") > -1 );
var isMedia = (requestOptions.path.indexOf("//media") === 0);
var httpsRequest = https.request(requestOptions, function(response) {
// In case of media response, return the stream to the user and the user will need to handle reading the stream.

Просмотреть файл

@ -146,7 +146,7 @@ var InMemoryCollectionRoutingMap = Base.defineClass(
var index = bs.le(sortedLow, { v: effectivePartitionKeyValue, b: true }, this._vbCompareFunction);
// that's an error
assert(index >=0, "error in collection routing map, queried partition key is less than the start range.");
assert.ok(index >= 0, "error in collection routing map, queried partition key is less than the start range.");
return this._orderedPartitionKeyRanges[index];
},
@ -192,10 +192,10 @@ var InMemoryCollectionRoutingMap = Base.defineClass(
continue;
}
var minIndex = bs.le(sortedLow, { v: queryRange.min, b: !queryRange.isMinInclusive }, this._vbCompareFunction);
assert(minIndex >= 0, "error in collection routing map, queried value is less than the start range.");
assert.ok(minIndex >= 0, "error in collection routing map, queried value is less than the start range.");
var maxIndex = bs.ge(sortedHigh, { v: queryRange.max, b: queryRange.isMaxInclusive }, this._vbCompareFunction);
assert(maxIndex < sortedHigh.length, "error in collection routing map, queried value is greater than the end range.");
assert.ok(maxIndex < sortedHigh.length, "error in collection routing map, queried value is greater than the end range.");
// the for loop doesn't invoke any async callback
for (var j = minIndex; j < maxIndex + 1; j++) {

Просмотреть файл

@ -24,9 +24,10 @@ SOFTWARE.
"use strict";
var Base = require("../base")
, CollectionRoutingMap = require("./inMemoryCollectionRoutingMap");
, InMemoryCollectionRoutingMap = require("./inMemoryCollectionRoutingMap")
, semaphore = require("semaphore");
var CollectionRoutingMapFactory = CollectionRoutingMap.CollectionRoutingMapFactory;
var CollectionRoutingMapFactory = InMemoryCollectionRoutingMap.CollectionRoutingMapFactory;
//SCRIPT START
var PartitionKeyRangeCache = Base.defineClass(
@ -41,7 +42,7 @@ var PartitionKeyRangeCache = Base.defineClass(
function (documentclient) {
this.documentclient = documentclient;
this.collectionRoutingMapByCollectionId = {};
this.sem = require("semaphore")(1);
this.sem = semaphore(1);
},
{
/**

Просмотреть файл

@ -25,13 +25,13 @@ SOFTWARE.
var Base = require("../base")
, assert = require("assert")
, CollectionRoutingMap = require("./inMemoryCollectionRoutingMap")
, InMemoryCollectionRoutingMap = require("./inMemoryCollectionRoutingMap")
, PartitionKeyRangeCache = require("./partitionKeyRangeCache")
, util = require("util");
var CollectionRoutingMapFactory = CollectionRoutingMap.CollectionRoutingMapFactory;
var QueryRange = CollectionRoutingMap.QueryRange;
var _PartitionKeyRange = CollectionRoutingMap._PartitionKeyRange;
var CollectionRoutingMapFactory = InMemoryCollectionRoutingMap.CollectionRoutingMapFactory;
var QueryRange = InMemoryCollectionRoutingMap.QueryRange;
var _PartitionKeyRange = InMemoryCollectionRoutingMap._PartitionKeyRange;
//SCRIPT START
var SmartRoutingMapProvider = Base.defineClass(
@ -141,13 +141,13 @@ var SmartRoutingMapProvider = Base.defineClass(
}
var overlappingRanges = collectionRoutingMap.getOverlappingRanges(queryRange);
assert(overlappingRanges.length > 0, util.format("error: returned overlapping ranges for queryRange %s is empty", queryRange));
assert.ok(overlappingRanges.length > 0, util.format("error: returned overlapping ranges for queryRange %s is empty", queryRange));
partitionKeyRanges = partitionKeyRanges.concat(overlappingRanges);
var lastKnownTargetRange = QueryRange.parsePartitionKeyRange(partitionKeyRanges[partitionKeyRanges.length - 1]);
assert.notEqual(lastKnownTargetRange, undefined);
// the overlapping ranges must contain the requested range
assert(that._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0,
assert.ok(that._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0,
util.format("error: returned overlapping ranges %s does not contain the requested range %s", overlappingRanges, queryRange));
// the current range is contained in partitionKeyRanges just move forward

Просмотреть файл

@ -9,13 +9,14 @@
"database",
"cloud"
],
"version": "1.12.0",
"version": "1.12.1",
"author": "Microsoft Corporation",
"main": "./index.js",
"engine": {
"node": ">=0.8"
},
"devDependencies": {
"@types/node": "^8.0.7",
"eslint": "*",
"grunt": "^0.4.5",
"grunt-eslint": "^13.0.0",

Просмотреть файл

@ -0,0 +1,232 @@
/*
The MIT License (MIT)
Copyright (c) 2017 Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
"use strict";
var assert = require("assert"),
lib = require("../lib/"),
testConfig = require("./_testConfig.js"),
DocumentBase = lib.DocumentBase,
UriFactory = lib.UriFactory;
var Base = lib.Base,
DocumentDBClient = lib.DocumentClient;
var host = testConfig.host;
var masterKey = testConfig.masterKey;
describe("Authorization bug fix Test", function () {
/************** VARIABLES **************/
this.timeout(2500);
var client = new DocumentDBClient(host, { masterKey: masterKey });
var database = { id: "dbs" };
var collection = { id: "colls" };
var userReadPermission = { id: "User With Read Permission" };
var userAllPermission = { id: "User With All Permission" };
var collReadPermission = {
id: "collection Read Permission",
permissionMode: DocumentBase.PermissionMode.Read
};
var collAllPermission = {
id: "collection All Permission",
permissionMode: DocumentBase.PermissionMode.All
};
/************** METHODS **************/
var assertError = function (message, error) {
if (error) {
assert.fail("code: " + error.code + " " + message + error.body);
}
}
var cleanup = function (dbId, done) {
client.deleteDatabase(UriFactory.createDatabaseUri(dbId), function (err, db) {
//resource not found error
if (err && err.code == 404) {
return done();
}
assertError("error deleting database:", err);
return done();
});
};
var createResources = function (callback) {
//create a database
client.createDatabase(database, function (err, db) {
assertError("error creating database: ", err);
assert.equal(db.id, database.id, "database is not created properly");
database = db;
//create userReadPermission
client.createUser(database._self, userReadPermission, function (err, user) {
assertError("error creating userReadPermission: ", err);
assert.equal(userReadPermission.id, user.id, "userReadPermission is not created properly");
userReadPermission = user;
//create collection
client.createCollection(database._self, collection, function (err, coll) {
assertError("error creating document: ", err);
assert.equal(collection.id, coll.id, "coll1 is not created properly");
collection = coll;
//give permission to read collection, to userReadPermission
collReadPermission.resource = collection._self;
client.createPermission(userReadPermission._self, collReadPermission, function (err, readPermission) {
assertError("error creating permission: ", err);
assert.equal(readPermission.id, collReadPermission.id, "permission to read coll1 is not created properly");
collReadPermission = readPermission;
//create userAllPermission
client.createUser(database._self, userAllPermission, function (err, userAllPerm) {
assertError("error creating userAllPermission: ", err);
assert.equal(userAllPermission.id, userAllPerm.id, "userAllPermission is not created properly");
userAllPermission = userAllPerm;
collAllPermission.resource = collection._self;
client.createPermission(userAllPermission._self, collAllPermission, function (err, allPermission) {
assertError("error creating permission: ", err);
assert.equal(collAllPermission.id, allPermission.id, "permission to read coll2 is not created properly");
//permissions.push(permission);
collAllPermission = allPermission;
callback();
});
});
//create all permission for coll
});
});
});
});
};
var accessCollectionByPermission = function (documentClient, link, callback) {
//read collection
documentClient.readCollection(link, function (err, collection) {
assertError("error reading collection", err);
assert.equal(collection.id, collection.id, "invalid collection");
if (callback) {
callback();
}
});
};
var modifyCollectionByPermission = function (documentClient, link, callback) {
//read collection
documentClient.deleteCollection(link, function (err, collection) {
assertError("error deleting collection", err);
if (callback) {
callback();
}
});
};
/************** TEST **************/
beforeEach(function (done) {
cleanup(database.id, done);
});
afterEach(function (done) {
cleanup(database.id, done);
});
it("Accessing collection by resourceTokens", function (done) {
createResources(function () {
var rTokens = {};
rTokens[collection.id] = collReadPermission._token;
var collectionUri = UriFactory.createDocumentCollectionUri(database.id, collection.id);
var clientReadPermission = new DocumentDBClient(host, { resourceTokens: rTokens });
accessCollectionByPermission(clientReadPermission, collectionUri, done);
});
});
it("Accessing collection by permissionFeed", function (done) {
createResources(function () {
var clientReadPermission = new DocumentDBClient(host, { permissionFeed: [collReadPermission] });
//self link must be used to access a resource using permissionFeed
accessCollectionByPermission(clientReadPermission, collection._self, done);
});
});
it("Accessing collection withot permission fails", function (done) {
createResources(function () {
var clientNoPermission = new DocumentDBClient(host);
var collectionUri = UriFactory.createDocumentCollectionUri(database.id, collection.id);
clientNoPermission.readCollection(collectionUri, function (err, coll) {
assert(err !== undefined, "unauthorized access to database did not fail");
done();
});
});
});
it("Accessing document by permissionFeed of parent collection", function (done) {
createResources(function () {
client.createDocument(collection._self, { id: "document1" }, function (err, createdDoc) {
var clientReadPermission = new DocumentDBClient(host, { permissionFeed: [collReadPermission] });
assertError("error creating document", err);
assert.equal("document1", createdDoc.id, "invalid documnet create");
clientReadPermission.readDocument(createdDoc._self, function (err, readDoc) {
assertError("error reading document with parent permission", err);
assert.equal(readDoc.id, createdDoc.id, "invalid document read");
done();
});
});
});
});
it("Modifying collection by resourceTokens", function (done) {
createResources(function () {
var rTokens = {};
rTokens[collection.id] = collAllPermission._token;
var collectionUri = UriFactory.createDocumentCollectionUri(database.id, collection.id);
var clientAllPermission = new DocumentDBClient(host, { resourceTokens: rTokens });
modifyCollectionByPermission(clientAllPermission, collectionUri, done);
});
});
it("Modifying collection by permissionFeed", function (done) {
createResources(function () {
var clientAllPermission = new DocumentDBClient(host, { permissionFeed: [collAllPermission] });
//self link must be used to access a resource using permissionFeed
modifyCollectionByPermission(clientAllPermission, collection._self, done);
});
});
});

Просмотреть файл

@ -185,38 +185,3 @@ describe("Base.parsePath", function () {
});
});
});
describe("Base._getUserAgent", function () {
var Contants = require("../lib/constants");
var os = require("os");
var util = require("util");
var Platform = require("../lib/platform");
it("_getUserAgent()", function () {
var userAgent = Base._getUserAgent();
var expectedUserAgent = util.format("%s/%s Nodejs/%s documentdb-nodejs-sdk/%s",
os.platform(), os.release(), process.version,
Contants.SDKVersion
);
assert.strictEqual(userAgent, expectedUserAgent, "invalid UserAgent format");
});
describe("Platform._getSafeUserAgentSegmentInfo()", function () {
it("Removing spaces", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo('a b c');
assert.strictEqual(safeString, 'abc');
});
it("empty string handling", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo('');
assert.strictEqual(safeString, 'unknown');
});
it("undefined", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo(undefined);
assert.strictEqual(safeString, 'unknown');
});
it("null", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo(null);
assert.strictEqual(safeString, 'unknown');
});
});
});

Просмотреть файл

@ -0,0 +1,158 @@
/*
The MIT License (MIT)
Copyright (c) 2017 Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
"use strict";
var lib = require("../lib/"),
assert = require("assert"),
testConfig = require("./_testConfig.js"),
Stream = require("stream"),
DocumentDBClient = lib.DocumentClient,
UriFactory = lib.UriFactory;
var host = testConfig.host;
var masterKey = testConfig.masterKey;
describe("Collection Naming Test", function () {
/**************** VARIABLES ****************/
var client = new DocumentDBClient(host, { masterKey: masterKey });
var databaseId = "collNamingTestDB";
var collectionId = "media";
var documentId = "doc1";
var attachmentId = "atch1";
/************** METHODS **************/
var assertError = function (error, message) {
if (error) {
assert.fail("code: " + error.code+ " " + message + error.body);
}
}
var cleanup = function (dbId, done) {
client.deleteDatabase(UriFactory.createDatabaseUri(dbId), function (err, db) {
if (err && err.code === 404) {
return done();
}
assertError(err, "error deleting database");
return done();
});
};
var createReadableStream = function (firstChunk, secondChunk) {
var readableStream = new Stream.Readable();
var chunkCount = 0;
readableStream._read = function (n) {
if (chunkCount === 0) {
this.push(firstChunk || "first chunk ");
} else if (chunkCount === 1) {
this.push(secondChunk || "second chunk");
} else {
this.push(null);
}
chunkCount++;
};
return readableStream;
};
var createResources = function (specialName, callback) {
//create database
client.createDatabase({ id: databaseId }, function (err, db) {
assertError(err, "error creating database");
assert.equal(db.id, databaseId, "database is not created properly");
//create collection
var dbUri = UriFactory.createDatabaseUri(databaseId);
client.createCollection(dbUri, { id: collectionId }, function (err, collection) {
assertError(err, "error creating collection");
assert.equal(collection.id, collectionId, "collection is not created properly");
//createDocument
var collectionUri = UriFactory.createDocumentCollectionUri(databaseId, collectionId);
client.createDocument(collectionUri, { id: documentId }, function (err, document) {
assertError(err, "error creating document");
assert.equal(document.id, documentId, "document is not created properly");
//create attachment and upload media
var mediaOption = { slug: attachmentId, contentType: "application/text" };
var readableStream = createReadableStream("UPLOADING ", "MEDIA");
var documentUri = UriFactory.createDocumentUri(databaseId, collectionId, documentId);
client.createAttachmentAndUploadMedia(documentUri, readableStream, mediaOption, function (err, attachment) {
assertError(err, "error creating attachment");
assert.equal(attachment.id, attachmentId, "attachment is not created properly");
callback();
});
});
});
});
};
var readCollectionWithSpecialName = function (specialName, done) {
var collectionUri = UriFactory.createDocumentCollectionUri(databaseId, collectionId);
client.readCollection(collectionUri, function (err, collection) {
assertError(err, "error reading collection [" + collectionId + "]");
assert.equal(collection.id, collectionId, "collectionIds do not match");
done();
});
};
var readMedia = function (done) {
//read attachment
var attachmentUri = UriFactory.createAttachmentUri(databaseId, collectionId, documentId, attachmentId);
client.readAttachment(attachmentUri, function (err, attachment) {
assertError(err, "error reading attachment");
assert.equal(attachment.id, attachmentId, "attachmentIds don't match");
//read media
client.readMedia(attachment.media, function (err, media) {
assertError(err, "error reading media");
assert.equal(media, "UPLOADING MEDIA");
done();
});
});
};
/************** TESTS **************/
beforeEach(function (done) {
cleanup(databaseId, done);
});
afterEach(function (done) {
cleanup(databaseId, done);
});
it("Accessing a collection with 'media' in its name", function (done) {
createResources("media", function () {
readCollectionWithSpecialName("media", done);
});
});
it("Accessing media in a collection", function (done) {
createResources("media", function () {
readCollectionWithSpecialName("media", done);
});
});
});

Просмотреть файл

@ -94,7 +94,8 @@ describe("DocumentClient Tests", function () {
});
});
describe("sprintf", function () {
//we are using util.format function instead.
describe.skip("sprintf", function () {
it("0 strings", function (done) {
assert.equal("foo", client.sprintf("foo"));
done();

Просмотреть файл

@ -0,0 +1,60 @@
/*
The MIT License (MIT)
Copyright (c) 2014 Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
"use strict";
var assert = require("assert")
, Contants = require("../lib/constants")
, os = require("os")
, Platform = require("../lib/platform")
, util = require("util");
describe("Platform.getUserAgent", function () {
it("getUserAgent()", function () {
var userAgent = Platform.getUserAgent();
var expectedUserAgent = util.format("%s/%s Nodejs/%s documentdb-nodejs-sdk/%s",
os.platform(), os.release(), process.version,
Contants.SDKVersion
);
assert.strictEqual(userAgent, expectedUserAgent, "invalid UserAgent format");
});
describe("Platform._getSafeUserAgentSegmentInfo()", function () {
it("Removing spaces", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo('a b c');
assert.strictEqual(safeString, 'abc');
});
it("empty string handling", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo('');
assert.strictEqual(safeString, 'unknown');
});
it("undefined", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo(undefined);
assert.strictEqual(safeString, 'unknown');
});
it("null", function () {
var safeString = Platform._getSafeUserAgentSegmentInfo(null);
assert.strictEqual(safeString, 'unknown');
});
});
});

Просмотреть файл

@ -1521,30 +1521,39 @@ describe("NodeJS CRUD Tests", function () {
// setup entities
setupEntities(isNameBased, client, function (entities) {
var resourceTokens = {};
if (isNameBased) {
resourceTokens[entities.coll1.id] = entities.permissionOnColl1._token;
resourceTokens[entities.doc1.id] = entities.permissionOnColl1._token;
}
else {
resourceTokens[entities.coll1._rid] = entities.permissionOnColl1._token;
resourceTokens[entities.doc1._rid] = entities.permissionOnColl1._token;
}
var col1Client = new DocumentDBClient(host, { resourceTokens: resourceTokens });
var coll1Link = getCollectionLink(isNameBased, entities.db, entities.coll1);
// 1. Success-- Use Col1 Permission to Read
col1Client.readCollection(entities.coll1._self, function (err, successColl1) {
col1Client.readCollection(coll1Link, function (err, successColl1) {
assert.equal(err, undefined, "error reading collections");
assert(successColl1 !== undefined, "error reading collection");
// 2. Failure-- Use Col1 Permission to delete
col1Client.deleteCollection(getCollectionLink(isNameBased, entities.db, successColl1), function (err, result) {
col1Client.deleteCollection(coll1Link, function (err, result) {
assert(err !== undefined, "expected to fail, no permission to delete");
// 3. Success-- Use Col1 Permission to Read All Docs
col1Client.readDocuments(successColl1._self).toArray(function (err, successDocuments) {
col1Client.readDocuments(coll1Link).toArray(function (err, successDocuments) {
assert.equal(err, undefined, "error reading documents");
assert(successDocuments !== undefined, "error reading documents");
assert.equal(successDocuments.length, 2, "Expected 2 Documents to be succesfully read");
// 4. Success-- Use Col1 Permission to Read Col1Doc1
col1Client.readDocument(entities.doc1._self, function (err, successDoc) {
var doc1Link = getDocumentLink(isNameBased, entities.db, entities.coll1, entities.doc1);
col1Client.readDocument(doc1Link, function (err, successDoc) {
assert.equal(err, undefined, "error reading document");
assert(successDoc !== undefined, "error reading document");
assert.equal(successDoc.id, entities.doc1.id, "Expected to read children using parent permissions");
var col2Client = new DocumentDBClient(host, { permissionFeed: [entities.permissionOnColl2] });
addUpsertWrapperMethods(col2Client, isUpsertTest);
var doc = { id: "new doc", CustomProperty1: "BBBBBB", customProperty2: 1000 };
col2Client.createOrUpsertDocument(getCollectionLink(isNameBased, entities.db, entities.coll2), doc, function (err, successDoc) {
col2Client.createOrUpsertDocument(entities.coll2._self, doc, function (err, successDoc) {
assert.equal(err, undefined, "error creating document");
assert(successDoc !== undefined, "error creating document");
assert.equal(successDoc.CustomProperty1, doc.CustomProperty1, "document should have been created successfully");
@ -2856,7 +2865,7 @@ describe("NodeJS CRUD Tests", function () {
client.createCollection(getDatabaseLink(isNameBased, db), collectionDefinition, collectionRequestOptions, function (err, collection) {
assert.equal(err, undefined, "error creating collection: " + JSON.stringify(err));
client.readCollection(getCollectionLink(isNameBased, db, collection), { populateQuotaInfo: true }, function (err, collection, headers) {
client.readCollection(getCollectionLink(isNameBased, db, collection), { populateQuotaInfo : true}, function (err, collection, headers) {
assert.equal(err, undefined, "error reading collection: " + JSON.stringify(err));
// Validate the collection size quota
@ -2930,27 +2939,27 @@ describe("NodeJS CRUD Tests", function () {
var minOfferThroughputPCollectionWithMultiPartitions = 2000;
var maxOfferThroughputPCollectionWithSinglePartition = minOfferThroughputPCollectionWithMultiPartitions - 100;
it("nativeApi Should do offer read and query operations successfully name based single partition collection", function (done) {
it.skip("nativeApi Should do offer read and query operations successfully name based single partition collection", function (done) {
offerReadAndQueryTest(true, false, offerThroughputSinglePartitionCollection, mbInBytes, done);
});
it("nativeApi Should do offer read and query operations successfully rid based single partition collection", function (done) {
it.skip("nativeApi Should do offer read and query operations successfully rid based single partition collection", function (done) {
offerReadAndQueryTest(false, false, offerThroughputSinglePartitionCollection, mbInBytes, done);
});
it("nativeApi Should do offer read and query operations successfully w/ name based p-Collection w/ 1 partition", function (done) {
it.skip("nativeApi Should do offer read and query operations successfully w/ name based p-Collection w/ 1 partition", function (done) {
offerReadAndQueryTest(true, true, maxOfferThroughputPCollectionWithSinglePartition, mbInBytes, done);
});
it("nativeApi Should do offer read and query operations successfully w/ rid based p-Collection w/ 1 partition", function (done) {
it.skip("nativeApi Should do offer read and query operations successfully w/ rid based p-Collection w/ 1 partition", function (done) {
offerReadAndQueryTest(false, true, maxOfferThroughputPCollectionWithSinglePartition, mbInBytes, done);
});
it("nativeApi Should do offer read and query operations successfully w/ name based p-Collection w/ multi partitions", function (done) {
it.skip("nativeApi Should do offer read and query operations successfully w/ name based p-Collection w/ multi partitions", function (done) {
offerReadAndQueryTest(true, true, minOfferThroughputPCollectionWithMultiPartitions, 5 * mbInBytes, done);
});
it("nativeApi Should do offer read and query operations successfully w/ rid based p-Collection w/ multi partitions", function (done) {
it.skip("nativeApi Should do offer read and query operations successfully w/ rid based p-Collection w/ multi partitions", function (done) {
offerReadAndQueryTest(false, true, minOfferThroughputPCollectionWithMultiPartitions, 5 * mbInBytes, done);
});
@ -3149,7 +3158,6 @@ describe("NodeJS CRUD Tests", function () {
describe("TTL tests", function () {
this.timeout(60000);
var dummyDocumentDefinition = { id: "dummy doc" }
function createCollectionWithInvalidDefaultTtl(client, db, collectionDefinition, collId, defaultTtl, callback) {
collectionDefinition.id = collId;
@ -3216,40 +3224,30 @@ describe("NodeJS CRUD Tests", function () {
});
function checkDocumentGone(client, collection, createdDocument, callback) {
// Call to Upsert a dummy document here is a way to update the logical timestamp of the created document
client.upsertDocument(collection._self, dummyDocumentDefinition, function (err) {
assert.equal(err, undefined, "error upserting document");
client.readDocument(createdDocument._self, function (err) {
var badRequestErrorCode = 404;
assert.equal(err.code, badRequestErrorCode, "response should return error code " + badRequestErrorCode);
callback();
});
});
}
function checkDocumentExists(client, collection, createdDocument, callback) {
// Call to Upsert a dummy document here is a way to update the logical timestamp of the created document
client.upsertDocument(collection._self, dummyDocumentDefinition, function (err) {
assert.equal(err, undefined, "error upserting document");
client.readDocument(createdDocument._self, function (err, readDocument) {
assert.equal(err, undefined, "error reading document");
assert.equal(readDocument.ttl, createdDocument.ttl);
callback();
});
});
}
function positiveDefaultTtlStep4(client, collection, createdDocument, callback) {
// the created document should NOT be gone as it 's ttl value is set to 8 which overrides the collections' s defaultTtl value(5)
checkDocumentExists(client, collection, createdDocument, function () {
setTimeout(function () {
// the created document should be gone now as we have waited for (6 + 3) secs which is greater than documents 's ttl value of 8
// the created document should be gone now as we have waited for (6 + 4) secs which is greater than documents 's ttl value of 8
checkDocumentGone(client, collection, createdDocument, function () {
callback();
});
}, 3000);
}, 4000);
});
}
@ -3276,7 +3274,7 @@ describe("NodeJS CRUD Tests", function () {
client.createDocument(collection._self, documentDefinition, function (err, createdDocument) {
assert.equal(err, undefined, "error creating document");
setTimeout(positiveDefaultTtlStep3, 3000, client, collection, createdDocument, documentDefinition, callback);
setTimeout(positiveDefaultTtlStep3, 4000, client, collection, createdDocument, documentDefinition, callback);
});
});
}
@ -3290,7 +3288,7 @@ describe("NodeJS CRUD Tests", function () {
client.createDocument(collection._self, documentDefinition, function (err, createdDocument) {
assert.equal(err, undefined, "error creating document");
setTimeout(positiveDefaultTtlStep2, 6000, client, collection, createdDocument, documentDefinition, callback);
setTimeout(positiveDefaultTtlStep2, 5000, client, collection, createdDocument, documentDefinition, callback);
});
});
}
@ -3318,7 +3316,7 @@ describe("NodeJS CRUD Tests", function () {
client.createDocument(collection._self, documentDefinition, function (err, createdDocument) {
assert.equal(err, undefined, "error creating document");
setTimeout(positiveDefaultTtlStep1, 10000, client, collection, createdDocument, documentDefinition, function () {
setTimeout(positiveDefaultTtlStep1, 7000, client, collection, createdDocument, documentDefinition, function () {
done();
});
});
@ -3381,7 +3379,7 @@ describe("NodeJS CRUD Tests", function () {
client.createDocument(collection._self, documentDefinition, function (err, createdDocument3) {
assert.equal(err, undefined, "error creating document");
setTimeout(minusOneDefaultTtlStep1, 3000, client, collection, createdDocument1, createdDocument2, createdDocument3, function () {
setTimeout(minusOneDefaultTtlStep1, 4000, client, collection, createdDocument1, createdDocument2, createdDocument3, function () {
done();
});
});
@ -3413,7 +3411,7 @@ describe("NodeJS CRUD Tests", function () {
assert.equal(err, undefined, "error creating document");
// Created document still exists even after ttl time has passed since the TTL is disabled at collection level(no defaultTtl property defined)
setTimeout(checkDocumentExists, 6000, client, collection, createdDocument, function () {
setTimeout(checkDocumentExists, 7000, client, collection, createdDocument, function () {
done();
});
});
@ -3428,12 +3426,9 @@ describe("NodeJS CRUD Tests", function () {
});
}
function miscCasesStep3(client, collection, upsertedDocument, documentDefinition, dummyDocument, callback) {
// the upserted document should be gone now after 9 secs from the last write(upsert) of the document
function miscCasesStep3(client, collection, upsertedDocument, documentDefinition, callback) {
// the upserted document should be gone now after 10 secs from the last write(upsert) of the document
checkDocumentGone(client, collection, upsertedDocument, function () {
client.deleteDocument(dummyDocument._self, function (err) {
assert.equal(err, undefined, "error deleting document");
var query = "SELECT * FROM root r";
client.queryDocuments(collection._self, query).toArray(function (err, results) {
assert.equal(err, undefined, "error querying databases");
@ -3450,8 +3445,7 @@ describe("NodeJS CRUD Tests", function () {
client.createDocument(replacedCollection._self, documentDefinition, function (err, createdDocument) {
assert.equal(err, undefined, "error creating document");
setTimeout(miscCasesStep4, 6000, client, replacedCollection, createdDocument, documentDefinition, callback);
});
setTimeout(miscCasesStep4, 5000, client, replacedCollection, createdDocument, documentDefinition, callback);
});
});
});
@ -3459,25 +3453,20 @@ describe("NodeJS CRUD Tests", function () {
}
function miscCasesStep2(client, collection, documentDefinition, callback) {
// Call to Upsert a dummy document here is a way to update the logical timestamp of the created document
client.upsertDocument(collection._self, dummyDocumentDefinition, function (err, dummyDocument) {
assert.equal(err, undefined, "error upserting document");
// Upsert the document after 3 secs to reset the document 's ttl
documentDefinition.key = "value2";
client.upsertDocument(collection._self, documentDefinition, function (err, upsertedDocument) {
setTimeout(function () {
// Upserted document still exists after (3+6)9 secs from document creation time( with collection 's defaultTtl set to 8) since it' s ttl was reset after 3 secs by upserting it
// Upserted document still exists after (3+7)10 secs from document creation time( with collection 's defaultTtl set to 8) since it' s ttl was reset after 3 secs by upserting it
checkDocumentExists(client, collection, upsertedDocument, function () {
setTimeout(miscCasesStep3, 3000, client, collection, upsertedDocument, documentDefinition, dummyDocument, callback);
});
}, 6000);
setTimeout(miscCasesStep3, 3000, client, collection, upsertedDocument, documentDefinition, callback);
});
}, 7000);
});
}
function miscCasesStep1(client, collection, createdDocument, documentDefinition, callback) {
// the created document cannot be deleted since it should already be gone now
// the created document should be gone now as the ttl time expired
checkDocumentGone(client, collection, createdDocument, function () {
// We can create a document with the same id after the ttl time has expired
client.createDocument(collection._self, documentDefinition, function (err, createdDocument) {
@ -3511,7 +3500,7 @@ describe("NodeJS CRUD Tests", function () {
client.createDocument(collection._self, documentDefinition, function (err, createdDocument) {
assert.equal(err, undefined, "error creating document");
setTimeout(miscCasesStep1, (8 + 5) * 1000, client, collection, createdDocument, documentDefinition, function () {
setTimeout(miscCasesStep1, 10000, client, collection, createdDocument, documentDefinition, function () {
done();
});
});
@ -3958,8 +3947,8 @@ describe.skip("GlobalDBTests", function () {
it("Test locations cache", function (done) {
var client = new DocumentDBClient(host, { masterKey: masterKey });
var writableLocations = [{ name : writeLocation, databaseAccountEndpoint : writeLocationHost }];
var readableLocations = [{ name : readLocation, databaseAccountEndpoint : readLocationHost }, { name : readLocation2, databaseAccountEndpoint : readLocation2Host }];
var writableLocations = [{ name: writeLocation, databaseAccountEndpoint: writeLocationHost }];
var readableLocations = [{ name: readLocation, databaseAccountEndpoint: readLocationHost }, { name: readLocation2, databaseAccountEndpoint: readLocation2Host }];
client._globalEndpointManager._updateLocationsCache(writableLocations, readableLocations, function (endpoints) {
// If no preferred locations is set, we return the write endpoint as ReadEndpoint for better latency performance, write endpoint is set as expected
@ -3974,7 +3963,7 @@ describe.skip("GlobalDBTests", function () {
assert.equal(endpoints[0], host);
assert.equal(endpoints[1], host);
writableLocations = [{ name : writeLocation, databaseAccountEndpoint : writeLocationHost }];
writableLocations = [{ name: writeLocation, databaseAccountEndpoint: writeLocationHost }];
readableLocations = [];
client._globalEndpointManager._updateLocationsCache(writableLocations, readableLocations, function (endpoints) {
@ -3983,15 +3972,15 @@ describe.skip("GlobalDBTests", function () {
assert.equal(endpoints[1], writeLocationHost);
writableLocations = [];
readableLocations = [{ name : readLocation, databaseAccountEndpoint : readLocationHost }];
readableLocations = [{ name: readLocation, databaseAccountEndpoint: readLocationHost }];
client._globalEndpointManager._updateLocationsCache(writableLocations, readableLocations, function (endpoints) {
// If there are no writableLocations, both Read and Write Endpoints point to endpoint passed while creating the client instance
assert.equal(endpoints[0], host);
assert.equal(endpoints[1], host);
writableLocations = [{ name : writeLocation, databaseAccountEndpoint : writeLocationHost }];
readableLocations = [{ name : readLocation, databaseAccountEndpoint : readLocationHost }, { name : readLocation2, databaseAccountEndpoint : readLocation2Host }];
writableLocations = [{ name: writeLocation, databaseAccountEndpoint: writeLocationHost }];
readableLocations = [{ name: readLocation, databaseAccountEndpoint: readLocationHost }, { name: readLocation2, databaseAccountEndpoint: readLocation2Host }];
var connectionPolicy = new DocumentBase.ConnectionPolicy();
connectionPolicy.PreferredLocations = [readLocation2];
@ -4003,8 +3992,8 @@ describe.skip("GlobalDBTests", function () {
assert.equal(endpoints[0], writeLocationHost);
assert.equal(endpoints[1], readLocation2Host);
writableLocations = [{ name : writeLocation, databaseAccountEndpoint : writeLocationHost }, { name : readLocation2, databaseAccountEndpoint : readLocation2Host }];
readableLocations = [{ name : readLocation, databaseAccountEndpoint : readLocationHost }];
writableLocations = [{ name: writeLocation, databaseAccountEndpoint: writeLocationHost }, { name: readLocation2, databaseAccountEndpoint: readLocation2Host }];
readableLocations = [{ name: readLocation, databaseAccountEndpoint: readLocationHost }];
connectionPolicy = new DocumentBase.ConnectionPolicy();
connectionPolicy.PreferredLocations = [readLocation2];
@ -4016,8 +4005,8 @@ describe.skip("GlobalDBTests", function () {
assert.equal(endpoints[0], writeLocationHost);
assert.equal(endpoints[1], readLocation2Host);
writableLocations = [{ name : writeLocation, databaseAccountEndpoint : writeLocationHost }];
readableLocations = [{ name : readLocation, databaseAccountEndpoint : readLocationHost }, { name : readLocation2, databaseAccountEndpoint : readLocation2Host }];
writableLocations = [{ name: writeLocation, databaseAccountEndpoint: writeLocationHost }];
readableLocations = [{ name: readLocation, databaseAccountEndpoint: readLocationHost }, { name: readLocation2, databaseAccountEndpoint: readLocation2Host }];
connectionPolicy.EnableEndpointDiscovery = false;
client = new DocumentDBClient(host, { masterKey: masterKey }, connectionPolicy);