[INTERNAL] Line endings: Adds gitattribute to normalize line endings (#3062)

Several of the files have mixed line endings causing issues with PRs saying they changed the entire file instead of just 1 line. This is adding a gitattribute file that will force all the files to use the CRLF formatting. This also fixes all the .cs files to use the correct formatting.
This commit is contained in:
j82w 2022-03-03 12:09:51 -08:00 коммит произвёл GitHub
Родитель 63b0e07e73
Коммит af9b009971
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
47 изменённых файлов: 17142 добавлений и 17123 удалений

19
.gitattribute Normal file
Просмотреть файл

@ -0,0 +1,19 @@
# Declare files that will always have CRLF line endings on checkout
* text=auto
*.bmp binary
*.dll binary
*.gif binary
*.jpg binary
*.png binary
*.snk binary
*.exe binary
*.wmv binary
*.mp4 binary
*.ismv binary
*.isma binary
*.cs text diff=csharp eol=crlf
*.csproj text=auto
*.sln text=auto eol=crlf
*.sh text=auto eol=lf

Просмотреть файл

@ -1,266 +1,266 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace CosmosBenchmark
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime;
using CommandLine;
using Microsoft.Azure.Documents.Client;
using Newtonsoft.Json;
public class BenchmarkConfig
{
private static readonly string UserAgentSuffix = "cosmosdbdotnetbenchmark";
[Option('w', Required = true, HelpText = "Workload type insert, read")]
public string WorkloadType { get; set; }
[Option('e', Required = true, HelpText = "Cosmos account end point")]
public string EndPoint { get; set; }
[Option('k', Required = true, HelpText = "Cosmos account master key")]
[JsonIgnore]
public string Key { get; set; }
[Option(Required = false, HelpText = "Workload Name, it will override the workloadType value in published results")]
public string WorkloadName { get; set; }
[Option(Required = false, HelpText = "Database to use")]
public string Database { get; set; } = "db";
[Option(Required = false, HelpText = "Collection to use")]
public string Container { get; set; } = "data";
[Option('t', Required = false, HelpText = "Collection throughput use")]
public int Throughput { get; set; } = 100000;
[Option('n', Required = false, HelpText = "Number of documents to insert")]
public int ItemCount { get; set; } = 200000;
[Option(Required = false, HelpText = "Client consistency level to override")]
public string ConsistencyLevel { get; set; }
[Option(Required = false, HelpText = "Enable latency percentiles")]
public bool EnableLatencyPercentiles { get; set; }
[Option(Required = false, HelpText = "Start with new collection")]
public bool CleanupOnStart { get; set; } = false;
[Option(Required = false, HelpText = "Clean-up after run")]
public bool CleanupOnFinish { get; set; } = false;
[Option(Required = false, HelpText = "Container partition key path")]
public string PartitionKeyPath { get; set; } = "/partitionKey";
[Option("pl", Required = false, HelpText = "Degree of parallism")]
public int DegreeOfParallelism { get; set; } = -1;
[Option("tcp", Required = false, HelpText = "MaxRequestsPerTcpConnection")]
public int? MaxRequestsPerTcpConnection { get; set; } = null;
[Option(Required = false, HelpText = "MaxTcpConnectionsPerEndpoint")]
public int? MaxTcpConnectionsPerEndpoint { get; set; } = null;
[Option(Required = false, HelpText = "Item template")]
public string ItemTemplateFile { get; set; } = "Player.json";
[Option(Required = false, HelpText = "Min thread pool size")]
public int MinThreadPoolSize { get; set; } = 100;
[Option(Required = false, HelpText = "Write the task execution failure to console. Useful for debugging failures")]
public bool TraceFailures { get; set; }
[Option(Required = false, HelpText = "Publish run results")]
public bool PublishResults { get; set; }
[Option(Required = false, HelpText = "Run ID, only for publish")]
internal string RunId { get; set; }
[Option(Required = false, HelpText = "Commit ID, only for publish")]
public string CommitId { get; set; }
[Option(Required = false, HelpText = "Commit date, only for publish")]
public string CommitDate { get; set; }
[Option(Required = false, HelpText = "Commit time, only for publish")]
public string CommitTime { get; set; }
[Option(Required = false, HelpText = "Branch name, only for publish")]
public string BranchName { get; set; }
[Option(Required = false, HelpText = "Partitionkey, only for publish")]
public string ResultsPartitionKeyValue { get; set; }
[Option(Required = false, HelpText = "Disable core SDK logging")]
public bool DisableCoreSdkLogging { get; set; }
[Option(Required = false, HelpText = "Enable Telemetry")]
public bool EnableTelemetry { get; set; }
[Option(Required = false, HelpText = "Telemetry Schedule in Seconds")]
public int TelemetryScheduleInSec { get; set; }
[Option(Required = false, HelpText = "Telemetry Endpoint")]
public string TelemetryEndpoint { get; set; }
[Option(Required = false, HelpText = "Endpoint to publish results to")]
public string ResultsEndpoint { get; set; }
[Option(Required = false, HelpText = "Key to publish results to")]
[JsonIgnore]
public string ResultsKey { get; set; }
[Option(Required = false, HelpText = "Database to publish results to")]
public string ResultsDatabase { get; set; }
[Option(Required = false, HelpText = "Container to publish results to")]
public string ResultsContainer { get; set; } = "runsummary";
internal int GetTaskCount(int containerThroughput)
{
int taskCount = this.DegreeOfParallelism;
if (taskCount == -1)
{
// set TaskCount = 10 for each 10k RUs, minimum 1, maximum { #processor * 50 }
taskCount = Math.Max(containerThroughput / 1000, 1);
taskCount = Math.Min(taskCount, Environment.ProcessorCount * 50);
}
return taskCount;
}
internal void Print()
{
using (ConsoleColorContext ct = new ConsoleColorContext(ConsoleColor.Green))
{
Utility.TeeTraceInformation($"{nameof(BenchmarkConfig)} arguments");
Utility.TeeTraceInformation($"IsServerGC: {GCSettings.IsServerGC}");
Utility.TeeTraceInformation("--------------------------------------------------------------------- ");
Utility.TeeTraceInformation(JsonHelper.ToString(this));
Utility.TeeTraceInformation("--------------------------------------------------------------------- ");
Utility.TeeTraceInformation(string.Empty);
}
}
internal static BenchmarkConfig From(string[] args)
{
BenchmarkConfig options = null;
Parser parser = new Parser((settings) =>
{
settings.CaseSensitive = false;
settings.HelpWriter = Console.Error;
settings.AutoHelp = true;
});
parser.ParseArguments<BenchmarkConfig>(args)
.WithParsed<BenchmarkConfig>(e => options = e)
.WithNotParsed<BenchmarkConfig>(e => BenchmarkConfig.HandleParseError(e));
if (options.PublishResults)
{
if (string.IsNullOrEmpty(options.ResultsContainer)
|| string.IsNullOrWhiteSpace(options.ResultsPartitionKeyValue)
|| string.IsNullOrWhiteSpace(options.CommitId)
|| string.IsNullOrWhiteSpace(options.CommitDate)
|| string.IsNullOrWhiteSpace(options.CommitTime))
{
throw new ArgumentException($"Missing either {nameof(options.ResultsContainer)} {nameof(options.ResultsPartitionKeyValue)} {nameof(options.CommitId)} {nameof(options.CommitDate)} {nameof(options.CommitTime)}");
}
}
return options;
}
/// <summary>
/// Give each workload a unique user agent string
/// so the backend logs can be filtered by the workload.
/// </summary>
private string GetUserAgentPrefix()
{
return this.WorkloadName ?? this.WorkloadType ?? BenchmarkConfig.UserAgentSuffix;
}
internal Microsoft.Azure.Cosmos.CosmosClient CreateCosmosClient(string accountKey)
{
Microsoft.Azure.Cosmos.CosmosClientOptions clientOptions = new Microsoft.Azure.Cosmos.CosmosClientOptions()
{
ApplicationName = this.GetUserAgentPrefix(),
MaxRetryAttemptsOnRateLimitedRequests = 0,
MaxRequestsPerTcpConnection = this.MaxRequestsPerTcpConnection,
MaxTcpConnectionsPerEndpoint = this.MaxTcpConnectionsPerEndpoint
};
if (this.EnableTelemetry)
{
Environment.SetEnvironmentVariable(
Microsoft.Azure.Cosmos.Telemetry.ClientTelemetryOptions.EnvPropsClientTelemetryEnabled,
"true");
if (this.TelemetryScheduleInSec > 0)
{
Environment.SetEnvironmentVariable(
Microsoft.Azure.Cosmos.Telemetry.ClientTelemetryOptions.EnvPropsClientTelemetrySchedulingInSeconds,
Convert.ToString(this.TelemetryScheduleInSec));
}
if (!string.IsNullOrEmpty(this.TelemetryEndpoint))
{
Environment.SetEnvironmentVariable(
Microsoft.Azure.Cosmos.Telemetry.ClientTelemetryOptions.EnvPropsClientTelemetryEndpoint,
this.TelemetryEndpoint);
}
}
if (!string.IsNullOrWhiteSpace(this.ConsistencyLevel))
{
clientOptions.ConsistencyLevel = (Microsoft.Azure.Cosmos.ConsistencyLevel)Enum.Parse(typeof(Microsoft.Azure.Cosmos.ConsistencyLevel), this.ConsistencyLevel, ignoreCase: true);
}
return new Microsoft.Azure.Cosmos.CosmosClient(
this.EndPoint,
accountKey,
clientOptions);
}
internal DocumentClient CreateDocumentClient(string accountKey)
{
Microsoft.Azure.Documents.ConsistencyLevel? consistencyLevel = null;
if (!string.IsNullOrWhiteSpace(this.ConsistencyLevel))
{
consistencyLevel = (Microsoft.Azure.Documents.ConsistencyLevel)Enum.Parse(typeof(Microsoft.Azure.Documents.ConsistencyLevel), this.ConsistencyLevel, ignoreCase: true);
}
return new DocumentClient(new Uri(this.EndPoint),
accountKey,
new ConnectionPolicy()
{
ConnectionMode = Microsoft.Azure.Documents.Client.ConnectionMode.Direct,
ConnectionProtocol = Protocol.Tcp,
MaxRequestsPerTcpConnection = this.MaxRequestsPerTcpConnection,
MaxTcpConnectionsPerEndpoint = this.MaxTcpConnectionsPerEndpoint,
UserAgentSuffix = this.GetUserAgentPrefix(),
RetryOptions = new RetryOptions()
{
MaxRetryAttemptsOnThrottledRequests = 0
}
},
desiredConsistencyLevel: consistencyLevel);
}
private static void HandleParseError(IEnumerable<Error> errors)
{
using (ConsoleColorContext ct = new ConsoleColorContext(ConsoleColor.Red))
{
foreach (Error e in errors)
{
Console.WriteLine(e.ToString());
}
}
Environment.Exit(errors.Count());
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace CosmosBenchmark
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime;
using CommandLine;
using Microsoft.Azure.Documents.Client;
using Newtonsoft.Json;
public class BenchmarkConfig
{
private static readonly string UserAgentSuffix = "cosmosdbdotnetbenchmark";
[Option('w', Required = true, HelpText = "Workload type insert, read")]
public string WorkloadType { get; set; }
[Option('e', Required = true, HelpText = "Cosmos account end point")]
public string EndPoint { get; set; }
[Option('k', Required = true, HelpText = "Cosmos account master key")]
[JsonIgnore]
public string Key { get; set; }
[Option(Required = false, HelpText = "Workload Name, it will override the workloadType value in published results")]
public string WorkloadName { get; set; }
[Option(Required = false, HelpText = "Database to use")]
public string Database { get; set; } = "db";
[Option(Required = false, HelpText = "Collection to use")]
public string Container { get; set; } = "data";
[Option('t', Required = false, HelpText = "Collection throughput use")]
public int Throughput { get; set; } = 100000;
[Option('n', Required = false, HelpText = "Number of documents to insert")]
public int ItemCount { get; set; } = 200000;
[Option(Required = false, HelpText = "Client consistency level to override")]
public string ConsistencyLevel { get; set; }
[Option(Required = false, HelpText = "Enable latency percentiles")]
public bool EnableLatencyPercentiles { get; set; }
[Option(Required = false, HelpText = "Start with new collection")]
public bool CleanupOnStart { get; set; } = false;
[Option(Required = false, HelpText = "Clean-up after run")]
public bool CleanupOnFinish { get; set; } = false;
[Option(Required = false, HelpText = "Container partition key path")]
public string PartitionKeyPath { get; set; } = "/partitionKey";
[Option("pl", Required = false, HelpText = "Degree of parallism")]
public int DegreeOfParallelism { get; set; } = -1;
[Option("tcp", Required = false, HelpText = "MaxRequestsPerTcpConnection")]
public int? MaxRequestsPerTcpConnection { get; set; } = null;
[Option(Required = false, HelpText = "MaxTcpConnectionsPerEndpoint")]
public int? MaxTcpConnectionsPerEndpoint { get; set; } = null;
[Option(Required = false, HelpText = "Item template")]
public string ItemTemplateFile { get; set; } = "Player.json";
[Option(Required = false, HelpText = "Min thread pool size")]
public int MinThreadPoolSize { get; set; } = 100;
[Option(Required = false, HelpText = "Write the task execution failure to console. Useful for debugging failures")]
public bool TraceFailures { get; set; }
[Option(Required = false, HelpText = "Publish run results")]
public bool PublishResults { get; set; }
[Option(Required = false, HelpText = "Run ID, only for publish")]
internal string RunId { get; set; }
[Option(Required = false, HelpText = "Commit ID, only for publish")]
public string CommitId { get; set; }
[Option(Required = false, HelpText = "Commit date, only for publish")]
public string CommitDate { get; set; }
[Option(Required = false, HelpText = "Commit time, only for publish")]
public string CommitTime { get; set; }
[Option(Required = false, HelpText = "Branch name, only for publish")]
public string BranchName { get; set; }
[Option(Required = false, HelpText = "Partitionkey, only for publish")]
public string ResultsPartitionKeyValue { get; set; }
[Option(Required = false, HelpText = "Disable core SDK logging")]
public bool DisableCoreSdkLogging { get; set; }
[Option(Required = false, HelpText = "Enable Telemetry")]
public bool EnableTelemetry { get; set; }
[Option(Required = false, HelpText = "Telemetry Schedule in Seconds")]
public int TelemetryScheduleInSec { get; set; }
[Option(Required = false, HelpText = "Telemetry Endpoint")]
public string TelemetryEndpoint { get; set; }
[Option(Required = false, HelpText = "Endpoint to publish results to")]
public string ResultsEndpoint { get; set; }
[Option(Required = false, HelpText = "Key to publish results to")]
[JsonIgnore]
public string ResultsKey { get; set; }
[Option(Required = false, HelpText = "Database to publish results to")]
public string ResultsDatabase { get; set; }
[Option(Required = false, HelpText = "Container to publish results to")]
public string ResultsContainer { get; set; } = "runsummary";
internal int GetTaskCount(int containerThroughput)
{
int taskCount = this.DegreeOfParallelism;
if (taskCount == -1)
{
// set TaskCount = 10 for each 10k RUs, minimum 1, maximum { #processor * 50 }
taskCount = Math.Max(containerThroughput / 1000, 1);
taskCount = Math.Min(taskCount, Environment.ProcessorCount * 50);
}
return taskCount;
}
internal void Print()
{
using (ConsoleColorContext ct = new ConsoleColorContext(ConsoleColor.Green))
{
Utility.TeeTraceInformation($"{nameof(BenchmarkConfig)} arguments");
Utility.TeeTraceInformation($"IsServerGC: {GCSettings.IsServerGC}");
Utility.TeeTraceInformation("--------------------------------------------------------------------- ");
Utility.TeeTraceInformation(JsonHelper.ToString(this));
Utility.TeeTraceInformation("--------------------------------------------------------------------- ");
Utility.TeeTraceInformation(string.Empty);
}
}
internal static BenchmarkConfig From(string[] args)
{
BenchmarkConfig options = null;
Parser parser = new Parser((settings) =>
{
settings.CaseSensitive = false;
settings.HelpWriter = Console.Error;
settings.AutoHelp = true;
});
parser.ParseArguments<BenchmarkConfig>(args)
.WithParsed<BenchmarkConfig>(e => options = e)
.WithNotParsed<BenchmarkConfig>(e => BenchmarkConfig.HandleParseError(e));
if (options.PublishResults)
{
if (string.IsNullOrEmpty(options.ResultsContainer)
|| string.IsNullOrWhiteSpace(options.ResultsPartitionKeyValue)
|| string.IsNullOrWhiteSpace(options.CommitId)
|| string.IsNullOrWhiteSpace(options.CommitDate)
|| string.IsNullOrWhiteSpace(options.CommitTime))
{
throw new ArgumentException($"Missing either {nameof(options.ResultsContainer)} {nameof(options.ResultsPartitionKeyValue)} {nameof(options.CommitId)} {nameof(options.CommitDate)} {nameof(options.CommitTime)}");
}
}
return options;
}
/// <summary>
/// Give each workload a unique user agent string
/// so the backend logs can be filtered by the workload.
/// </summary>
private string GetUserAgentPrefix()
{
return this.WorkloadName ?? this.WorkloadType ?? BenchmarkConfig.UserAgentSuffix;
}
internal Microsoft.Azure.Cosmos.CosmosClient CreateCosmosClient(string accountKey)
{
Microsoft.Azure.Cosmos.CosmosClientOptions clientOptions = new Microsoft.Azure.Cosmos.CosmosClientOptions()
{
ApplicationName = this.GetUserAgentPrefix(),
MaxRetryAttemptsOnRateLimitedRequests = 0,
MaxRequestsPerTcpConnection = this.MaxRequestsPerTcpConnection,
MaxTcpConnectionsPerEndpoint = this.MaxTcpConnectionsPerEndpoint
};
if (this.EnableTelemetry)
{
Environment.SetEnvironmentVariable(
Microsoft.Azure.Cosmos.Telemetry.ClientTelemetryOptions.EnvPropsClientTelemetryEnabled,
"true");
if (this.TelemetryScheduleInSec > 0)
{
Environment.SetEnvironmentVariable(
Microsoft.Azure.Cosmos.Telemetry.ClientTelemetryOptions.EnvPropsClientTelemetrySchedulingInSeconds,
Convert.ToString(this.TelemetryScheduleInSec));
}
if (!string.IsNullOrEmpty(this.TelemetryEndpoint))
{
Environment.SetEnvironmentVariable(
Microsoft.Azure.Cosmos.Telemetry.ClientTelemetryOptions.EnvPropsClientTelemetryEndpoint,
this.TelemetryEndpoint);
}
}
if (!string.IsNullOrWhiteSpace(this.ConsistencyLevel))
{
clientOptions.ConsistencyLevel = (Microsoft.Azure.Cosmos.ConsistencyLevel)Enum.Parse(typeof(Microsoft.Azure.Cosmos.ConsistencyLevel), this.ConsistencyLevel, ignoreCase: true);
}
return new Microsoft.Azure.Cosmos.CosmosClient(
this.EndPoint,
accountKey,
clientOptions);
}
internal DocumentClient CreateDocumentClient(string accountKey)
{
Microsoft.Azure.Documents.ConsistencyLevel? consistencyLevel = null;
if (!string.IsNullOrWhiteSpace(this.ConsistencyLevel))
{
consistencyLevel = (Microsoft.Azure.Documents.ConsistencyLevel)Enum.Parse(typeof(Microsoft.Azure.Documents.ConsistencyLevel), this.ConsistencyLevel, ignoreCase: true);
}
return new DocumentClient(new Uri(this.EndPoint),
accountKey,
new ConnectionPolicy()
{
ConnectionMode = Microsoft.Azure.Documents.Client.ConnectionMode.Direct,
ConnectionProtocol = Protocol.Tcp,
MaxRequestsPerTcpConnection = this.MaxRequestsPerTcpConnection,
MaxTcpConnectionsPerEndpoint = this.MaxTcpConnectionsPerEndpoint,
UserAgentSuffix = this.GetUserAgentPrefix(),
RetryOptions = new RetryOptions()
{
MaxRetryAttemptsOnThrottledRequests = 0
}
},
desiredConsistencyLevel: consistencyLevel);
}
private static void HandleParseError(IEnumerable<Error> errors)
{
using (ConsoleColorContext ct = new ConsoleColorContext(ConsoleColor.Red))
{
foreach (Error e in errors)
{
Console.WriteLine(e.ToString());
}
}
Environment.Exit(errors.Count());
}
}
}

Просмотреть файл

@ -22,8 +22,8 @@ namespace Microsoft.Azure.Cosmos
{
public const int MaxAuthorizationHeaderSize = 1024;
public const int DefaultAllowedClockSkewInSeconds = 900;
public const int DefaultMasterTokenExpiryInSeconds = 900;
private const int MaxAadAuthorizationHeaderSize = 16 * 1024;
public const int DefaultMasterTokenExpiryInSeconds = 900;
private const int MaxAadAuthorizationHeaderSize = 16 * 1024;
private const int MaxResourceTokenAuthorizationHeaderSize = 8 * 1024;
private static readonly string AuthorizationFormatPrefixUrlEncoded = HttpUtility.UrlEncode(string.Format(CultureInfo.InvariantCulture, Constants.Properties.AuthorizationFormat,
Constants.Properties.MasterToken,
@ -253,28 +253,28 @@ namespace Microsoft.Azure.Cosmos
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat);
}
ReadOnlyMemory<char> authTypeValue = authType.Slice(typeKeyValueSepartorPosition + 1);
if (MemoryExtensions.Equals(authTypeValue.Span, Constants.Properties.AadToken.AsSpan(), StringComparison.OrdinalIgnoreCase))
{
if (authorizationTokenLength > AuthorizationHelper.MaxAadAuthorizationHeaderSize)
{
DefaultTrace.TraceError($"Token of type [{authTypeValue.Span.ToString()}] was of size [{authorizationTokenLength}] while the max allowed size is [{AuthorizationHelper.MaxAadAuthorizationHeaderSize}].");
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat, SubStatusCodes.InvalidAuthHeaderFormat);
}
}
else if (MemoryExtensions.Equals(authTypeValue.Span, Constants.Properties.ResourceToken.AsSpan(), StringComparison.OrdinalIgnoreCase))
{
if (authorizationTokenLength > AuthorizationHelper.MaxResourceTokenAuthorizationHeaderSize)
{
DefaultTrace.TraceError($"Token of type [{authTypeValue.Span.ToString()}] was of size [{authorizationTokenLength}] while the max allowed size is [{AuthorizationHelper.MaxResourceTokenAuthorizationHeaderSize}].");
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat, SubStatusCodes.InvalidAuthHeaderFormat);
}
}
else if (authorizationTokenLength > AuthorizationHelper.MaxAuthorizationHeaderSize)
{
DefaultTrace.TraceError($"Token of type [{authTypeValue.Span.ToString()}] was of size [{authorizationTokenLength}] while the max allowed size is [{AuthorizationHelper.MaxAuthorizationHeaderSize}].");
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat, SubStatusCodes.InvalidAuthHeaderFormat);
ReadOnlyMemory<char> authTypeValue = authType.Slice(typeKeyValueSepartorPosition + 1);
if (MemoryExtensions.Equals(authTypeValue.Span, Constants.Properties.AadToken.AsSpan(), StringComparison.OrdinalIgnoreCase))
{
if (authorizationTokenLength > AuthorizationHelper.MaxAadAuthorizationHeaderSize)
{
DefaultTrace.TraceError($"Token of type [{authTypeValue.Span.ToString()}] was of size [{authorizationTokenLength}] while the max allowed size is [{AuthorizationHelper.MaxAadAuthorizationHeaderSize}].");
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat, SubStatusCodes.InvalidAuthHeaderFormat);
}
}
else if (MemoryExtensions.Equals(authTypeValue.Span, Constants.Properties.ResourceToken.AsSpan(), StringComparison.OrdinalIgnoreCase))
{
if (authorizationTokenLength > AuthorizationHelper.MaxResourceTokenAuthorizationHeaderSize)
{
DefaultTrace.TraceError($"Token of type [{authTypeValue.Span.ToString()}] was of size [{authorizationTokenLength}] while the max allowed size is [{AuthorizationHelper.MaxResourceTokenAuthorizationHeaderSize}].");
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat, SubStatusCodes.InvalidAuthHeaderFormat);
}
}
else if (authorizationTokenLength > AuthorizationHelper.MaxAuthorizationHeaderSize)
{
DefaultTrace.TraceError($"Token of type [{authTypeValue.Span.ToString()}] was of size [{authorizationTokenLength}] while the max allowed size is [{AuthorizationHelper.MaxAuthorizationHeaderSize}].");
throw new UnauthorizedException(RMResources.InvalidAuthHeaderFormat, SubStatusCodes.InvalidAuthHeaderFormat);
}
int versionKeyValueSeparatorPosition = version.Span.IndexOf('=');

Просмотреть файл

@ -1,296 +1,296 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
internal class BatchCore : TransactionalBatchInternal
{
private readonly PartitionKey partitionKey;
private readonly ContainerInternal container;
private List<ItemBatchOperation> operations;
/// <summary>
/// Initializes a new instance of the <see cref="BatchCore"/> class.
/// </summary>
/// <param name="container">Container that has items on which batch operations are to be performed.</param>
/// <param name="partitionKey">The partition key for all items in the batch. <see cref="PartitionKey"/>.</param>
internal BatchCore(
ContainerInternal container,
PartitionKey partitionKey)
{
this.container = container;
this.partitionKey = partitionKey;
this.operations = new List<ItemBatchOperation>();
}
public override TransactionalBatch CreateItem<T>(
T item,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (item == null)
{
throw new ArgumentNullException(nameof(item));
}
this.operations.Add(new ItemBatchOperation<T>(
operationType: OperationType.Create,
operationIndex: this.operations.Count,
resource: item,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch CreateItemStream(
Stream streamPayload,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (streamPayload == null)
{
throw new ArgumentNullException(nameof(streamPayload));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Create,
operationIndex: this.operations.Count,
resourceStream: streamPayload,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch ReadItem(
string id,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Read,
operationIndex: this.operations.Count,
id: id,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch UpsertItem<T>(
T item,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (item == null)
{
throw new ArgumentNullException(nameof(item));
}
this.operations.Add(new ItemBatchOperation<T>(
operationType: OperationType.Upsert,
operationIndex: this.operations.Count,
resource: item,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch UpsertItemStream(
Stream streamPayload,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (streamPayload == null)
{
throw new ArgumentNullException(nameof(streamPayload));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Upsert,
operationIndex: this.operations.Count,
resourceStream: streamPayload,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch ReplaceItem<T>(
string id,
T item,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
if (item == null)
{
throw new ArgumentNullException(nameof(item));
}
this.operations.Add(new ItemBatchOperation<T>(
operationType: OperationType.Replace,
operationIndex: this.operations.Count,
id: id,
resource: item,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch ReplaceItemStream(
string id,
Stream streamPayload,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
if (streamPayload == null)
{
throw new ArgumentNullException(nameof(streamPayload));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Replace,
operationIndex: this.operations.Count,
id: id,
resourceStream: streamPayload,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch DeleteItem(
string id,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Delete,
operationIndex: this.operations.Count,
id: id,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override Task<TransactionalBatchResponse> ExecuteAsync(
CancellationToken cancellationToken = default)
{
return this.ExecuteAsync(
requestOptions: null,
cancellationToken: cancellationToken);
}
/// <summary>
/// Executes the batch at the Azure Cosmos service as an asynchronous operation.
/// </summary>
/// <param name="requestOptions">Options that apply to the batch.</param>
/// <param name="cancellationToken">(Optional) <see cref="CancellationToken"/> representing request cancellation.</param>
/// <returns>An awaitable <see cref="TransactionalBatchResponse"/> which contains the completion status and results of each operation.</returns>
public override Task<TransactionalBatchResponse> ExecuteAsync(
TransactionalBatchRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
return this.container.ClientContext.OperationHelperAsync(
nameof(ExecuteAsync),
requestOptions,
(trace) =>
{
BatchExecutor executor = new BatchExecutor(
container: this.container,
partitionKey: this.partitionKey,
operations: this.operations,
batchOptions: requestOptions);
this.operations = new List<ItemBatchOperation>();
return executor.ExecuteAsync(trace, cancellationToken);
});
}
/// <summary>
/// Adds an operation to patch an item into the batch.
/// </summary>
/// <param name="id">The cosmos item id.</param>
/// <param name="patchStream">A <see cref="Stream"/> containing the patch specification.</param>
/// <param name="requestOptions">(Optional) The options for the patch request. <see cref="TransactionalBatchPatchItemRequestOptions"/>.</param>
/// <returns>The <see cref="TransactionalBatch"/> instance with the operation added.</returns>
public virtual TransactionalBatch PatchItemStream(
string id,
Stream patchStream,
TransactionalBatchPatchItemRequestOptions requestOptions = null)
{
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Patch,
operationIndex: this.operations.Count,
id: id,
resourceStream: patchStream,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
/// <summary>
/// Adds an operation to patch an item into the batch.
/// </summary>
/// <param name="id">The cosmos item id.</param>
/// <param name="patchOperations">Represents a list of operations to be sequentially applied to the referred Cosmos item.</param>
/// <param name="requestOptions">(Optional) The options for the Patch request. <see cref="TransactionalBatchPatchItemRequestOptions"/>.</param>
/// <returns>The <see cref="TransactionalBatch"/> instance with the operation added.</returns>
public override TransactionalBatch PatchItem(
string id,
IReadOnlyList<PatchOperation> patchOperations,
TransactionalBatchPatchItemRequestOptions requestOptions = null)
{
if (string.IsNullOrWhiteSpace(id))
{
throw new ArgumentNullException(nameof(id));
}
if (patchOperations == null ||
!patchOperations.Any())
{
throw new ArgumentNullException(nameof(patchOperations));
}
PatchSpec patchSpec = new PatchSpec(patchOperations, requestOptions);
this.operations.Add(new ItemBatchOperation<PatchSpec>(
operationType: OperationType.Patch,
operationIndex: this.operations.Count,
id: id,
resource: patchSpec,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
internal class BatchCore : TransactionalBatchInternal
{
private readonly PartitionKey partitionKey;
private readonly ContainerInternal container;
private List<ItemBatchOperation> operations;
/// <summary>
/// Initializes a new instance of the <see cref="BatchCore"/> class.
/// </summary>
/// <param name="container">Container that has items on which batch operations are to be performed.</param>
/// <param name="partitionKey">The partition key for all items in the batch. <see cref="PartitionKey"/>.</param>
internal BatchCore(
ContainerInternal container,
PartitionKey partitionKey)
{
this.container = container;
this.partitionKey = partitionKey;
this.operations = new List<ItemBatchOperation>();
}
public override TransactionalBatch CreateItem<T>(
T item,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (item == null)
{
throw new ArgumentNullException(nameof(item));
}
this.operations.Add(new ItemBatchOperation<T>(
operationType: OperationType.Create,
operationIndex: this.operations.Count,
resource: item,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch CreateItemStream(
Stream streamPayload,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (streamPayload == null)
{
throw new ArgumentNullException(nameof(streamPayload));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Create,
operationIndex: this.operations.Count,
resourceStream: streamPayload,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch ReadItem(
string id,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Read,
operationIndex: this.operations.Count,
id: id,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch UpsertItem<T>(
T item,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (item == null)
{
throw new ArgumentNullException(nameof(item));
}
this.operations.Add(new ItemBatchOperation<T>(
operationType: OperationType.Upsert,
operationIndex: this.operations.Count,
resource: item,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch UpsertItemStream(
Stream streamPayload,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (streamPayload == null)
{
throw new ArgumentNullException(nameof(streamPayload));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Upsert,
operationIndex: this.operations.Count,
resourceStream: streamPayload,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch ReplaceItem<T>(
string id,
T item,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
if (item == null)
{
throw new ArgumentNullException(nameof(item));
}
this.operations.Add(new ItemBatchOperation<T>(
operationType: OperationType.Replace,
operationIndex: this.operations.Count,
id: id,
resource: item,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch ReplaceItemStream(
string id,
Stream streamPayload,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
if (streamPayload == null)
{
throw new ArgumentNullException(nameof(streamPayload));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Replace,
operationIndex: this.operations.Count,
id: id,
resourceStream: streamPayload,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override TransactionalBatch DeleteItem(
string id,
TransactionalBatchItemRequestOptions requestOptions = null)
{
if (id == null)
{
throw new ArgumentNullException(nameof(id));
}
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Delete,
operationIndex: this.operations.Count,
id: id,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
public override Task<TransactionalBatchResponse> ExecuteAsync(
CancellationToken cancellationToken = default)
{
return this.ExecuteAsync(
requestOptions: null,
cancellationToken: cancellationToken);
}
/// <summary>
/// Executes the batch at the Azure Cosmos service as an asynchronous operation.
/// </summary>
/// <param name="requestOptions">Options that apply to the batch.</param>
/// <param name="cancellationToken">(Optional) <see cref="CancellationToken"/> representing request cancellation.</param>
/// <returns>An awaitable <see cref="TransactionalBatchResponse"/> which contains the completion status and results of each operation.</returns>
public override Task<TransactionalBatchResponse> ExecuteAsync(
TransactionalBatchRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
return this.container.ClientContext.OperationHelperAsync(
nameof(ExecuteAsync),
requestOptions,
(trace) =>
{
BatchExecutor executor = new BatchExecutor(
container: this.container,
partitionKey: this.partitionKey,
operations: this.operations,
batchOptions: requestOptions);
this.operations = new List<ItemBatchOperation>();
return executor.ExecuteAsync(trace, cancellationToken);
});
}
/// <summary>
/// Adds an operation to patch an item into the batch.
/// </summary>
/// <param name="id">The cosmos item id.</param>
/// <param name="patchStream">A <see cref="Stream"/> containing the patch specification.</param>
/// <param name="requestOptions">(Optional) The options for the patch request. <see cref="TransactionalBatchPatchItemRequestOptions"/>.</param>
/// <returns>The <see cref="TransactionalBatch"/> instance with the operation added.</returns>
public virtual TransactionalBatch PatchItemStream(
string id,
Stream patchStream,
TransactionalBatchPatchItemRequestOptions requestOptions = null)
{
this.operations.Add(new ItemBatchOperation(
operationType: OperationType.Patch,
operationIndex: this.operations.Count,
id: id,
resourceStream: patchStream,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
/// <summary>
/// Adds an operation to patch an item into the batch.
/// </summary>
/// <param name="id">The cosmos item id.</param>
/// <param name="patchOperations">Represents a list of operations to be sequentially applied to the referred Cosmos item.</param>
/// <param name="requestOptions">(Optional) The options for the Patch request. <see cref="TransactionalBatchPatchItemRequestOptions"/>.</param>
/// <returns>The <see cref="TransactionalBatch"/> instance with the operation added.</returns>
public override TransactionalBatch PatchItem(
string id,
IReadOnlyList<PatchOperation> patchOperations,
TransactionalBatchPatchItemRequestOptions requestOptions = null)
{
if (string.IsNullOrWhiteSpace(id))
{
throw new ArgumentNullException(nameof(id));
}
if (patchOperations == null ||
!patchOperations.Any())
{
throw new ArgumentNullException(nameof(patchOperations));
}
PatchSpec patchSpec = new PatchSpec(patchOperations, requestOptions);
this.operations.Add(new ItemBatchOperation<PatchSpec>(
operationType: OperationType.Patch,
operationIndex: this.operations.Count,
id: id,
resource: patchSpec,
requestOptions: requestOptions,
containerCore: this.container));
return this;
}
}
}

Просмотреть файл

@ -1,144 +1,144 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
/// <summary>
/// Util methods for batch requests.
/// </summary>
internal static class BatchExecUtils
{
// Using the same buffer size as the Stream.DefaultCopyBufferSize
private const int BufferSize = 81920;
/// <summary>
/// Converts a Stream to a Memory{byte} wrapping a byte array.
/// </summary>
/// <param name="stream">Stream to be converted to bytes.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> to cancel the operation.</param>
/// <returns>A Memory{byte}.</returns>
public static async Task<Memory<byte>> StreamToMemoryAsync(
Stream stream,
CancellationToken cancellationToken)
{
if (stream.CanSeek)
{
// Some derived implementations of MemoryStream (such as versions of RecyclableMemoryStream prior to 1.2.2 that we may be using)
// return an incorrect response from TryGetBuffer. Use TryGetBuffer only on the MemoryStream type and not derived types.
if (stream is MemoryStream memStream
&& memStream.GetType() == typeof(MemoryStream)
&& memStream.TryGetBuffer(out ArraySegment<byte> memBuffer))
{
return memBuffer;
}
byte[] bytes = new byte[stream.Length];
int sum = 0;
int count;
while ((count = await stream.ReadAsync(bytes, sum, bytes.Length - sum, cancellationToken)) > 0)
{
sum += count;
}
return bytes;
}
else
{
int bufferSize = BatchExecUtils.BufferSize;
byte[] buffer = new byte[bufferSize];
using (MemoryStream memoryStream = new MemoryStream(bufferSize)) // using bufferSize as initial capacity as well
{
int sum = 0;
int count;
while ((count = await stream.ReadAsync(buffer, 0, bufferSize, cancellationToken)) > 0)
{
sum += count;
#pragma warning disable VSTHRD103 // Call async methods when in an async method
memoryStream.Write(buffer, 0, count);
#pragma warning restore VSTHRD103 // Call async methods when in an async method
}
return new Memory<byte>(memoryStream.GetBuffer(), 0, (int)memoryStream.Length);
}
}
}
public static void EnsureValid(
IReadOnlyList<ItemBatchOperation> operations,
RequestOptions batchOptions)
{
string errorMessage = BatchExecUtils.IsValid(operations, batchOptions);
if (errorMessage != null)
{
throw new ArgumentException(errorMessage);
}
}
internal static string IsValid(
IReadOnlyList<ItemBatchOperation> operations,
RequestOptions batchOptions)
{
string errorMessage = null;
if (operations.Count == 0)
{
errorMessage = ClientResources.BatchNoOperations;
}
if (errorMessage == null && batchOptions != null)
{
if (batchOptions.IfMatchEtag != null || batchOptions.IfNoneMatchEtag != null)
{
errorMessage = ClientResources.BatchRequestOptionNotSupported;
}
}
if (errorMessage == null)
{
foreach (ItemBatchOperation operation in operations)
{
if (operation.RequestOptions != null
&& operation.RequestOptions.Properties != null
&& (operation.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj)
| operation.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKeyString, out object epkStrObj)
| operation.RequestOptions.Properties.TryGetValue(HttpConstants.HttpHeaders.PartitionKey, out object pkStrObj)))
{
byte[] epk = epkObj as byte[];
string partitionKeyJsonString = pkStrObj as string;
if ((epk == null && partitionKeyJsonString == null) || !(epkStrObj is string epkStr))
{
errorMessage = string.Format(
ClientResources.EpkPropertiesPairingExpected,
WFConstants.BackendHeaders.EffectivePartitionKey,
WFConstants.BackendHeaders.EffectivePartitionKeyString);
}
if (operation.PartitionKey != null && !operation.RequestOptions.IsEffectivePartitionKeyRouting)
{
errorMessage = ClientResources.PKAndEpkSetTogether;
}
}
}
}
return errorMessage;
}
public static string GetPartitionKeyRangeId(PartitionKey partitionKey, PartitionKeyDefinition partitionKeyDefinition, Routing.CollectionRoutingMap collectionRoutingMap)
{
string effectivePartitionKey = partitionKey.InternalKey.GetEffectivePartitionKeyString(partitionKeyDefinition);
return collectionRoutingMap.GetRangeByEffectivePartitionKey(effectivePartitionKey).Id;
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
/// <summary>
/// Util methods for batch requests.
/// </summary>
internal static class BatchExecUtils
{
// Using the same buffer size as the Stream.DefaultCopyBufferSize
private const int BufferSize = 81920;
/// <summary>
/// Converts a Stream to a Memory{byte} wrapping a byte array.
/// </summary>
/// <param name="stream">Stream to be converted to bytes.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> to cancel the operation.</param>
/// <returns>A Memory{byte}.</returns>
public static async Task<Memory<byte>> StreamToMemoryAsync(
Stream stream,
CancellationToken cancellationToken)
{
if (stream.CanSeek)
{
// Some derived implementations of MemoryStream (such as versions of RecyclableMemoryStream prior to 1.2.2 that we may be using)
// return an incorrect response from TryGetBuffer. Use TryGetBuffer only on the MemoryStream type and not derived types.
if (stream is MemoryStream memStream
&& memStream.GetType() == typeof(MemoryStream)
&& memStream.TryGetBuffer(out ArraySegment<byte> memBuffer))
{
return memBuffer;
}
byte[] bytes = new byte[stream.Length];
int sum = 0;
int count;
while ((count = await stream.ReadAsync(bytes, sum, bytes.Length - sum, cancellationToken)) > 0)
{
sum += count;
}
return bytes;
}
else
{
int bufferSize = BatchExecUtils.BufferSize;
byte[] buffer = new byte[bufferSize];
using (MemoryStream memoryStream = new MemoryStream(bufferSize)) // using bufferSize as initial capacity as well
{
int sum = 0;
int count;
while ((count = await stream.ReadAsync(buffer, 0, bufferSize, cancellationToken)) > 0)
{
sum += count;
#pragma warning disable VSTHRD103 // Call async methods when in an async method
memoryStream.Write(buffer, 0, count);
#pragma warning restore VSTHRD103 // Call async methods when in an async method
}
return new Memory<byte>(memoryStream.GetBuffer(), 0, (int)memoryStream.Length);
}
}
}
public static void EnsureValid(
IReadOnlyList<ItemBatchOperation> operations,
RequestOptions batchOptions)
{
string errorMessage = BatchExecUtils.IsValid(operations, batchOptions);
if (errorMessage != null)
{
throw new ArgumentException(errorMessage);
}
}
internal static string IsValid(
IReadOnlyList<ItemBatchOperation> operations,
RequestOptions batchOptions)
{
string errorMessage = null;
if (operations.Count == 0)
{
errorMessage = ClientResources.BatchNoOperations;
}
if (errorMessage == null && batchOptions != null)
{
if (batchOptions.IfMatchEtag != null || batchOptions.IfNoneMatchEtag != null)
{
errorMessage = ClientResources.BatchRequestOptionNotSupported;
}
}
if (errorMessage == null)
{
foreach (ItemBatchOperation operation in operations)
{
if (operation.RequestOptions != null
&& operation.RequestOptions.Properties != null
&& (operation.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj)
| operation.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKeyString, out object epkStrObj)
| operation.RequestOptions.Properties.TryGetValue(HttpConstants.HttpHeaders.PartitionKey, out object pkStrObj)))
{
byte[] epk = epkObj as byte[];
string partitionKeyJsonString = pkStrObj as string;
if ((epk == null && partitionKeyJsonString == null) || !(epkStrObj is string epkStr))
{
errorMessage = string.Format(
ClientResources.EpkPropertiesPairingExpected,
WFConstants.BackendHeaders.EffectivePartitionKey,
WFConstants.BackendHeaders.EffectivePartitionKeyString);
}
if (operation.PartitionKey != null && !operation.RequestOptions.IsEffectivePartitionKeyRouting)
{
errorMessage = ClientResources.PKAndEpkSetTogether;
}
}
}
}
return errorMessage;
}
public static string GetPartitionKeyRangeId(PartitionKey partitionKey, PartitionKeyDefinition partitionKeyDefinition, Routing.CollectionRoutingMap collectionRoutingMap)
{
string effectivePartitionKey = partitionKey.InternalKey.GetEffectivePartitionKeyString(partitionKeyDefinition);
return collectionRoutingMap.GetRangeByEffectivePartitionKey(effectivePartitionKey).Id;
}
}
}

Просмотреть файл

@ -1,114 +1,114 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Documents;
internal sealed class BatchExecutor
{
private readonly ContainerInternal container;
private readonly CosmosClientContext clientContext;
private readonly IReadOnlyList<ItemBatchOperation> inputOperations;
private readonly PartitionKey partitionKey;
private readonly RequestOptions batchOptions;
public BatchExecutor(
ContainerInternal container,
PartitionKey partitionKey,
IReadOnlyList<ItemBatchOperation> operations,
RequestOptions batchOptions)
{
this.container = container;
this.clientContext = this.container.ClientContext;
this.inputOperations = operations;
this.partitionKey = partitionKey;
this.batchOptions = batchOptions;
}
public async Task<TransactionalBatchResponse> ExecuteAsync(ITrace trace, CancellationToken cancellationToken)
{
using (ITrace executeNextBatchTrace = trace.StartChild("Execute Next Batch", TraceComponent.Batch, Tracing.TraceLevel.Info))
{
BatchExecUtils.EnsureValid(this.inputOperations, this.batchOptions);
PartitionKey? serverRequestPartitionKey = this.partitionKey;
if (this.batchOptions != null && this.batchOptions.IsEffectivePartitionKeyRouting)
{
serverRequestPartitionKey = null;
}
SinglePartitionKeyServerBatchRequest serverRequest;
serverRequest = await SinglePartitionKeyServerBatchRequest.CreateAsync(
serverRequestPartitionKey,
new ArraySegment<ItemBatchOperation>(this.inputOperations.ToArray()),
this.clientContext.SerializerCore,
executeNextBatchTrace,
cancellationToken);
return await this.ExecuteServerRequestAsync(
serverRequest,
executeNextBatchTrace,
cancellationToken);
}
}
/// <summary>
/// Makes a single batch request to the server.
/// </summary>
/// <param name="serverRequest">A server request with a set of operations on items.</param>
/// <param name="trace">The trace.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
/// <returns>Response from the server.</returns>
private async Task<TransactionalBatchResponse> ExecuteServerRequestAsync(
SinglePartitionKeyServerBatchRequest serverRequest,
ITrace trace,
CancellationToken cancellationToken)
{
using (ITrace executeBatchTrace = trace.StartChild("Execute Batch Request", TraceComponent.Batch, Tracing.TraceLevel.Info))
{
using (Stream serverRequestPayload = serverRequest.TransferBodyStream())
{
Debug.Assert(serverRequestPayload != null, "Server request payload expected to be non-null");
ResponseMessage responseMessage = await this.clientContext.ProcessResourceOperationStreamAsync(
this.container.LinkUri,
ResourceType.Document,
OperationType.Batch,
this.batchOptions,
this.container,
serverRequest.PartitionKey.HasValue ? new FeedRangePartitionKey(serverRequest.PartitionKey.Value) : null,
serverRequestPayload,
requestMessage =>
{
requestMessage.Headers.Add(HttpConstants.HttpHeaders.IsBatchRequest, bool.TrueString);
requestMessage.Headers.Add(HttpConstants.HttpHeaders.IsBatchAtomic, bool.TrueString);
requestMessage.Headers.Add(HttpConstants.HttpHeaders.IsBatchOrdered, bool.TrueString);
},
executeBatchTrace,
cancellationToken);
return await TransactionalBatchResponse.FromResponseMessageAsync(
responseMessage,
serverRequest,
this.clientContext.SerializerCore,
shouldPromoteOperationStatus: true,
executeBatchTrace,
cancellationToken);
}
}
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Documents;
internal sealed class BatchExecutor
{
private readonly ContainerInternal container;
private readonly CosmosClientContext clientContext;
private readonly IReadOnlyList<ItemBatchOperation> inputOperations;
private readonly PartitionKey partitionKey;
private readonly RequestOptions batchOptions;
public BatchExecutor(
ContainerInternal container,
PartitionKey partitionKey,
IReadOnlyList<ItemBatchOperation> operations,
RequestOptions batchOptions)
{
this.container = container;
this.clientContext = this.container.ClientContext;
this.inputOperations = operations;
this.partitionKey = partitionKey;
this.batchOptions = batchOptions;
}
public async Task<TransactionalBatchResponse> ExecuteAsync(ITrace trace, CancellationToken cancellationToken)
{
using (ITrace executeNextBatchTrace = trace.StartChild("Execute Next Batch", TraceComponent.Batch, Tracing.TraceLevel.Info))
{
BatchExecUtils.EnsureValid(this.inputOperations, this.batchOptions);
PartitionKey? serverRequestPartitionKey = this.partitionKey;
if (this.batchOptions != null && this.batchOptions.IsEffectivePartitionKeyRouting)
{
serverRequestPartitionKey = null;
}
SinglePartitionKeyServerBatchRequest serverRequest;
serverRequest = await SinglePartitionKeyServerBatchRequest.CreateAsync(
serverRequestPartitionKey,
new ArraySegment<ItemBatchOperation>(this.inputOperations.ToArray()),
this.clientContext.SerializerCore,
executeNextBatchTrace,
cancellationToken);
return await this.ExecuteServerRequestAsync(
serverRequest,
executeNextBatchTrace,
cancellationToken);
}
}
/// <summary>
/// Makes a single batch request to the server.
/// </summary>
/// <param name="serverRequest">A server request with a set of operations on items.</param>
/// <param name="trace">The trace.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
/// <returns>Response from the server.</returns>
private async Task<TransactionalBatchResponse> ExecuteServerRequestAsync(
SinglePartitionKeyServerBatchRequest serverRequest,
ITrace trace,
CancellationToken cancellationToken)
{
using (ITrace executeBatchTrace = trace.StartChild("Execute Batch Request", TraceComponent.Batch, Tracing.TraceLevel.Info))
{
using (Stream serverRequestPayload = serverRequest.TransferBodyStream())
{
Debug.Assert(serverRequestPayload != null, "Server request payload expected to be non-null");
ResponseMessage responseMessage = await this.clientContext.ProcessResourceOperationStreamAsync(
this.container.LinkUri,
ResourceType.Document,
OperationType.Batch,
this.batchOptions,
this.container,
serverRequest.PartitionKey.HasValue ? new FeedRangePartitionKey(serverRequest.PartitionKey.Value) : null,
serverRequestPayload,
requestMessage =>
{
requestMessage.Headers.Add(HttpConstants.HttpHeaders.IsBatchRequest, bool.TrueString);
requestMessage.Headers.Add(HttpConstants.HttpHeaders.IsBatchAtomic, bool.TrueString);
requestMessage.Headers.Add(HttpConstants.HttpHeaders.IsBatchOrdered, bool.TrueString);
},
executeBatchTrace,
cancellationToken);
return await TransactionalBatchResponse.FromResponseMessageAsync(
responseMessage,
serverRequest,
this.clientContext.SerializerCore,
shouldPromoteOperationStatus: true,
executeBatchTrace,
cancellationToken);
}
}
}
}
}

Просмотреть файл

@ -1,58 +1,58 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System.IO;
using System.Reflection;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Layouts;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Schemas;
internal static class BatchSchemaProvider
{
static BatchSchemaProvider()
{
string json = BatchSchemaProvider.GetEmbeddedResource(@"Batch\HybridRowBatchSchemas.json");
BatchSchemaProvider.BatchSchemaNamespace = Namespace.Parse(json);
BatchSchemaProvider.BatchLayoutResolver = new LayoutResolverNamespace(BatchSchemaProvider.BatchSchemaNamespace);
BatchSchemaProvider.BatchOperationLayout = BatchSchemaProvider.BatchLayoutResolver.Resolve(BatchSchemaProvider.BatchSchemaNamespace.Schemas.Find(x => x.Name == "BatchOperation").SchemaId);
BatchSchemaProvider.BatchResultLayout = BatchSchemaProvider.BatchLayoutResolver.Resolve(BatchSchemaProvider.BatchSchemaNamespace.Schemas.Find(x => x.Name == "BatchResult").SchemaId);
}
public static Namespace BatchSchemaNamespace { get; private set; }
public static LayoutResolverNamespace BatchLayoutResolver { get; private set; }
public static Layout BatchOperationLayout { get; private set; }
public static Layout BatchResultLayout { get; private set; }
private static string GetEmbeddedResource(string resourceName)
{
Assembly assembly = Assembly.GetAssembly(typeof(BatchSchemaProvider));
// Assumes BatchSchemaProvider is in the default namespace of the assembly.
resourceName = BatchSchemaProvider.FormatResourceName(typeof(BatchSchemaProvider).Namespace, resourceName);
using (Stream resourceStream = assembly.GetManifestResourceStream(resourceName))
{
if (resourceStream == null)
{
return null;
}
using (StreamReader reader = new StreamReader(resourceStream))
{
return reader.ReadToEnd();
}
}
}
private static string FormatResourceName(string namespaceName, string resourceName)
{
return namespaceName + "." + resourceName.Replace(" ", "_").Replace("\\", ".").Replace("/", ".");
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System.IO;
using System.Reflection;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Layouts;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Schemas;
internal static class BatchSchemaProvider
{
static BatchSchemaProvider()
{
string json = BatchSchemaProvider.GetEmbeddedResource(@"Batch\HybridRowBatchSchemas.json");
BatchSchemaProvider.BatchSchemaNamespace = Namespace.Parse(json);
BatchSchemaProvider.BatchLayoutResolver = new LayoutResolverNamespace(BatchSchemaProvider.BatchSchemaNamespace);
BatchSchemaProvider.BatchOperationLayout = BatchSchemaProvider.BatchLayoutResolver.Resolve(BatchSchemaProvider.BatchSchemaNamespace.Schemas.Find(x => x.Name == "BatchOperation").SchemaId);
BatchSchemaProvider.BatchResultLayout = BatchSchemaProvider.BatchLayoutResolver.Resolve(BatchSchemaProvider.BatchSchemaNamespace.Schemas.Find(x => x.Name == "BatchResult").SchemaId);
}
public static Namespace BatchSchemaNamespace { get; private set; }
public static LayoutResolverNamespace BatchLayoutResolver { get; private set; }
public static Layout BatchOperationLayout { get; private set; }
public static Layout BatchResultLayout { get; private set; }
private static string GetEmbeddedResource(string resourceName)
{
Assembly assembly = Assembly.GetAssembly(typeof(BatchSchemaProvider));
// Assumes BatchSchemaProvider is in the default namespace of the assembly.
resourceName = BatchSchemaProvider.FormatResourceName(typeof(BatchSchemaProvider).Namespace, resourceName);
using (Stream resourceStream = assembly.GetManifestResourceStream(resourceName))
{
if (resourceStream == null)
{
return null;
}
using (StreamReader reader = new StreamReader(resourceStream))
{
return reader.ReadToEnd();
}
}
}
private static string FormatResourceName(string namespaceName, string resourceName)
{
return namespaceName + "." + resourceName.Replace(" ", "_").Replace("\\", ".").Replace("/", ".");
}
}
}

Просмотреть файл

@ -1,146 +1,146 @@
{
"name": "Microsoft.Azure.Cosmos.BatchApi",
"version": "v1",
"schemas": [
{
"name": "BatchOperation",
"id": 2145473648,
"type": "schema",
"properties": [
{
"path": "operationType",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "resourceType",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "partitionKey",
"type": {
"type": "utf8",
"storage": "variable"
}
},
{
"path": "effectivePartitionKey",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "id",
"type": {
"type": "utf8",
"storage": "variable"
}
},
{
"path": "binaryId",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "resourceBody",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "indexingDirective",
"type": {
"type": "utf8",
"storage": "sparse"
}
},
{
"path": "ifMatch",
"type": {
"type": "utf8",
"storage": "sparse"
}
},
{
"path": "ifNoneMatch",
"type": {
"type": "utf8",
"storage": "sparse"
}
},
{
"path": "timeToLiveInSeconds",
"type": {
"type": "int32",
"storage": "sparse"
}
},
{
"path": "minimalReturnPreference",
"type": {
"type": "bool",
"storage": "sparse"
}
}
]
},
{
"name": "BatchResult",
"id": 2145473649,
"type": "schema",
"properties": [
{
"path": "statusCode",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "subStatusCode",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "eTag",
"type": {
"type": "utf8",
"storage": "variable"
}
},
{
"path": "resourceBody",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "retryAfterMilliseconds",
"type": {
"type": "uint32",
"storage": "sparse"
}
},
{
"path": "requestCharge",
"type": {
"type": "float64",
"storage": "sparse"
}
}
]
}
]
{
"name": "Microsoft.Azure.Cosmos.BatchApi",
"version": "v1",
"schemas": [
{
"name": "BatchOperation",
"id": 2145473648,
"type": "schema",
"properties": [
{
"path": "operationType",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "resourceType",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "partitionKey",
"type": {
"type": "utf8",
"storage": "variable"
}
},
{
"path": "effectivePartitionKey",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "id",
"type": {
"type": "utf8",
"storage": "variable"
}
},
{
"path": "binaryId",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "resourceBody",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "indexingDirective",
"type": {
"type": "utf8",
"storage": "sparse"
}
},
{
"path": "ifMatch",
"type": {
"type": "utf8",
"storage": "sparse"
}
},
{
"path": "ifNoneMatch",
"type": {
"type": "utf8",
"storage": "sparse"
}
},
{
"path": "timeToLiveInSeconds",
"type": {
"type": "int32",
"storage": "sparse"
}
},
{
"path": "minimalReturnPreference",
"type": {
"type": "bool",
"storage": "sparse"
}
}
]
},
{
"name": "BatchResult",
"id": 2145473649,
"type": "schema",
"properties": [
{
"path": "statusCode",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "subStatusCode",
"type": {
"type": "int32",
"storage": "fixed"
}
},
{
"path": "eTag",
"type": {
"type": "utf8",
"storage": "variable"
}
},
{
"path": "resourceBody",
"type": {
"type": "binary",
"storage": "variable"
}
},
{
"path": "retryAfterMilliseconds",
"type": {
"type": "uint32",
"storage": "sparse"
}
},
{
"path": "requestCharge",
"type": {
"type": "float64",
"storage": "sparse"
}
}
]
}
]
}

Просмотреть файл

@ -1,414 +1,414 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Handlers;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Layouts;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Documents;
/// <summary>
/// Represents an operation on an item which will be executed as part of a batch request
/// on a container.
/// </summary>
internal class ItemBatchOperation : IDisposable
{
#pragma warning disable SA1401 // Fields should be private
protected Memory<byte> body;
#pragma warning restore SA1401 // Fields should be private
private bool isDisposed;
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
PartitionKey partitionKey,
string id = null,
Stream resourceStream = null,
TransactionalBatchItemRequestOptions requestOptions = null,
CosmosClientContext cosmosClientContext = null)
{
this.OperationType = operationType;
this.OperationIndex = operationIndex;
this.PartitionKey = partitionKey;
this.Id = id;
this.ResourceStream = resourceStream;
this.RequestOptions = requestOptions;
this.ClientContext = cosmosClientContext;
}
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
ContainerInternal containerCore,
string id = null,
Stream resourceStream = null,
TransactionalBatchItemRequestOptions requestOptions = null)
{
this.OperationType = operationType;
this.OperationIndex = operationIndex;
this.ContainerInternal = containerCore;
this.Id = id;
this.ResourceStream = resourceStream;
this.RequestOptions = requestOptions;
this.ClientContext = containerCore.ClientContext;
}
public PartitionKey? PartitionKey { get; internal set; }
public string Id { get; }
public OperationType OperationType { get; }
public Stream ResourceStream { get; protected set; }
public TransactionalBatchItemRequestOptions RequestOptions { get; }
public int OperationIndex { get; internal set; }
internal ContainerInternal ContainerInternal { get; }
internal string PartitionKeyJson { get; set; }
internal Documents.PartitionKey ParsedPartitionKey { get; set; }
private readonly CosmosClientContext ClientContext;
internal Memory<byte> ResourceBody
{
get
{
Debug.Assert(
this.ResourceStream == null || !this.body.IsEmpty,
"ResourceBody read without materialization of ResourceStream");
return this.body;
}
set => this.body = value;
}
/// <summary>
/// Operational context used in stream operations.
/// </summary>
/// <seealso cref="BatchAsyncBatcher"/>
/// <seealso cref="BatchAsyncStreamer"/>
/// <seealso cref="BatchAsyncContainerExecutor"/>
internal ItemBatchOperationContext Context { get; private set; }
internal ITrace Trace { get; set; }
/// <summary>
/// Disposes the current <see cref="ItemBatchOperation"/>.
/// </summary>
public void Dispose()
{
this.Dispose(true);
}
internal static Result WriteOperation(ref RowWriter writer, TypeArgument typeArg, ItemBatchOperation operation)
{
bool pkWritten = false;
Result r = writer.WriteInt32("operationType", (int)operation.OperationType);
if (r != Result.Success)
{
return r;
}
r = writer.WriteInt32("resourceType", (int)ResourceType.Document);
if (r != Result.Success)
{
return r;
}
if (operation.PartitionKeyJson != null)
{
r = writer.WriteString("partitionKey", operation.PartitionKeyJson);
if (r != Result.Success)
{
return r;
}
pkWritten = true;
}
if (operation.Id != null)
{
r = writer.WriteString("id", operation.Id);
if (r != Result.Success)
{
return r;
}
}
if (!operation.ResourceBody.IsEmpty)
{
r = writer.WriteBinary("resourceBody", operation.ResourceBody.Span);
if (r != Result.Success)
{
return r;
}
}
if (operation.RequestOptions != null)
{
TransactionalBatchItemRequestOptions options = operation.RequestOptions;
if (options.IndexingDirective.HasValue)
{
string indexingDirectiveString = IndexingDirectiveStrings.FromIndexingDirective(options.IndexingDirective.Value);
r = writer.WriteString("indexingDirective", indexingDirectiveString);
if (r != Result.Success)
{
return r;
}
}
if (options.IfMatchEtag != null)
{
r = writer.WriteString("ifMatch", options.IfMatchEtag);
if (r != Result.Success)
{
return r;
}
}
else if (options.IfNoneMatchEtag != null)
{
r = writer.WriteString("ifNoneMatch", options.IfNoneMatchEtag);
if (r != Result.Success)
{
return r;
}
}
if (options.Properties != null)
{
if (options.Properties.TryGetValue(WFConstants.BackendHeaders.BinaryId, out object binaryIdObj))
{
if (binaryIdObj is byte[] binaryId)
{
r = writer.WriteBinary("binaryId", binaryId);
if (r != Result.Success)
{
return r;
}
}
}
if (options.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj))
{
if (epkObj is byte[] epk)
{
r = writer.WriteBinary("effectivePartitionKey", epk);
if (r != Result.Success)
{
return r;
}
}
}
if (!pkWritten && options.Properties.TryGetValue(
HttpConstants.HttpHeaders.PartitionKey,
out object pkStrObj))
{
if (pkStrObj is string pkString)
{
r = writer.WriteString("partitionKey", pkString);
if (r != Result.Success)
{
return r;
}
}
}
if (options.Properties.TryGetValue(WFConstants.BackendHeaders.TimeToLiveInSeconds, out object ttlObj))
{
if (ttlObj is string ttlStr && int.TryParse(ttlStr, out int ttl))
{
r = writer.WriteInt32("timeToLiveInSeconds", ttl);
if (r != Result.Success)
{
return r;
}
}
}
}
}
if (RequestInvokerHandler.ShouldSetNoContentResponseHeaders(operation.RequestOptions,
operation.ClientContext?.ClientOptions,
operation.OperationType,
ResourceType.Document))
{
r = writer.WriteBool("minimalReturnPreference", true);
if (r != Result.Success)
{
return r;
}
}
return Result.Success;
}
/// <summary>
/// Computes and returns an approximation for the length of this <see cref="ItemBatchOperation"/>.
/// when serialized.
/// </summary>
/// <returns>An under-estimate of the length.</returns>
internal int GetApproximateSerializedLength()
{
int length = 0;
if (this.PartitionKeyJson != null)
{
length += this.PartitionKeyJson.Length;
}
if (this.Id != null)
{
length += this.Id.Length;
}
length += this.body.Length;
if (this.RequestOptions != null)
{
if (this.RequestOptions.IfMatchEtag != null)
{
length += this.RequestOptions.IfMatchEtag.Length;
}
if (this.RequestOptions.IfNoneMatchEtag != null)
{
length += this.RequestOptions.IfNoneMatchEtag.Length;
}
if (this.RequestOptions.IndexingDirective.HasValue)
{
length += 7; // "Default", "Include", "Exclude" are possible values
}
if (this.RequestOptions.Properties != null)
{
if (this.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.BinaryId, out object binaryIdObj))
{
if (binaryIdObj is byte[] binaryId)
{
length += binaryId.Length;
}
}
if (this.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj))
{
if (epkObj is byte[] epk)
{
length += epk.Length;
}
}
}
}
return length;
}
/// <summary>
/// Materializes the operation's resource into a Memory{byte} wrapping a byte array.
/// </summary>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> for cancellation.</param>
internal virtual async Task MaterializeResourceAsync(CosmosSerializerCore serializerCore, CancellationToken cancellationToken)
{
if (this.body.IsEmpty && this.ResourceStream != null)
{
this.body = await BatchExecUtils.StreamToMemoryAsync(this.ResourceStream, cancellationToken);
}
}
/// <summary>
/// Attached a context to the current operation to track resolution.
/// </summary>
/// <exception cref="InvalidOperationException">If the operation already had an attached context.</exception>
internal void AttachContext(ItemBatchOperationContext context)
{
if (this.Context != null)
{
throw new InvalidOperationException("Cannot modify the current context of an operation.");
}
this.Context = context;
}
/// <summary>
/// Disposes the disposable members held by this class.
/// </summary>
/// <param name="disposing">Indicates whether to dispose managed resources or not.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing && !this.isDisposed)
{
this.isDisposed = true;
if (this.ResourceStream != null)
{
this.ResourceStream.Dispose();
this.ResourceStream = null;
}
}
}
}
#pragma warning disable SA1402 // File may only contain a single type
internal class ItemBatchOperation<T> : ItemBatchOperation
#pragma warning restore SA1402 // File may only contain a single type
{
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
PartitionKey partitionKey,
T resource,
string id = null,
TransactionalBatchItemRequestOptions requestOptions = null,
CosmosClientContext cosmosClientContext = null)
: base(operationType, operationIndex, partitionKey: partitionKey, id: id, requestOptions: requestOptions, cosmosClientContext: cosmosClientContext)
{
this.Resource = resource;
}
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
T resource,
ContainerInternal containerCore,
string id = null,
TransactionalBatchItemRequestOptions requestOptions = null)
: base(operationType, operationIndex, containerCore: containerCore, id: id, requestOptions: requestOptions)
{
this.Resource = resource;
}
public T Resource { get; private set; }
/// <summary>
/// Materializes the operation's resource into a Memory{byte} wrapping a byte array.
/// </summary>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> for cancellation.</param>
internal override Task MaterializeResourceAsync(CosmosSerializerCore serializerCore, CancellationToken cancellationToken)
{
if (this.body.IsEmpty && this.Resource != null)
{
this.ResourceStream = serializerCore.ToStream(this.Resource);
return base.MaterializeResourceAsync(serializerCore, cancellationToken);
}
return Task.CompletedTask;
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Handlers;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Layouts;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Documents;
/// <summary>
/// Represents an operation on an item which will be executed as part of a batch request
/// on a container.
/// </summary>
internal class ItemBatchOperation : IDisposable
{
#pragma warning disable SA1401 // Fields should be private
protected Memory<byte> body;
#pragma warning restore SA1401 // Fields should be private
private bool isDisposed;
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
PartitionKey partitionKey,
string id = null,
Stream resourceStream = null,
TransactionalBatchItemRequestOptions requestOptions = null,
CosmosClientContext cosmosClientContext = null)
{
this.OperationType = operationType;
this.OperationIndex = operationIndex;
this.PartitionKey = partitionKey;
this.Id = id;
this.ResourceStream = resourceStream;
this.RequestOptions = requestOptions;
this.ClientContext = cosmosClientContext;
}
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
ContainerInternal containerCore,
string id = null,
Stream resourceStream = null,
TransactionalBatchItemRequestOptions requestOptions = null)
{
this.OperationType = operationType;
this.OperationIndex = operationIndex;
this.ContainerInternal = containerCore;
this.Id = id;
this.ResourceStream = resourceStream;
this.RequestOptions = requestOptions;
this.ClientContext = containerCore.ClientContext;
}
public PartitionKey? PartitionKey { get; internal set; }
public string Id { get; }
public OperationType OperationType { get; }
public Stream ResourceStream { get; protected set; }
public TransactionalBatchItemRequestOptions RequestOptions { get; }
public int OperationIndex { get; internal set; }
internal ContainerInternal ContainerInternal { get; }
internal string PartitionKeyJson { get; set; }
internal Documents.PartitionKey ParsedPartitionKey { get; set; }
private readonly CosmosClientContext ClientContext;
internal Memory<byte> ResourceBody
{
get
{
Debug.Assert(
this.ResourceStream == null || !this.body.IsEmpty,
"ResourceBody read without materialization of ResourceStream");
return this.body;
}
set => this.body = value;
}
/// <summary>
/// Operational context used in stream operations.
/// </summary>
/// <seealso cref="BatchAsyncBatcher"/>
/// <seealso cref="BatchAsyncStreamer"/>
/// <seealso cref="BatchAsyncContainerExecutor"/>
internal ItemBatchOperationContext Context { get; private set; }
internal ITrace Trace { get; set; }
/// <summary>
/// Disposes the current <see cref="ItemBatchOperation"/>.
/// </summary>
public void Dispose()
{
this.Dispose(true);
}
internal static Result WriteOperation(ref RowWriter writer, TypeArgument typeArg, ItemBatchOperation operation)
{
bool pkWritten = false;
Result r = writer.WriteInt32("operationType", (int)operation.OperationType);
if (r != Result.Success)
{
return r;
}
r = writer.WriteInt32("resourceType", (int)ResourceType.Document);
if (r != Result.Success)
{
return r;
}
if (operation.PartitionKeyJson != null)
{
r = writer.WriteString("partitionKey", operation.PartitionKeyJson);
if (r != Result.Success)
{
return r;
}
pkWritten = true;
}
if (operation.Id != null)
{
r = writer.WriteString("id", operation.Id);
if (r != Result.Success)
{
return r;
}
}
if (!operation.ResourceBody.IsEmpty)
{
r = writer.WriteBinary("resourceBody", operation.ResourceBody.Span);
if (r != Result.Success)
{
return r;
}
}
if (operation.RequestOptions != null)
{
TransactionalBatchItemRequestOptions options = operation.RequestOptions;
if (options.IndexingDirective.HasValue)
{
string indexingDirectiveString = IndexingDirectiveStrings.FromIndexingDirective(options.IndexingDirective.Value);
r = writer.WriteString("indexingDirective", indexingDirectiveString);
if (r != Result.Success)
{
return r;
}
}
if (options.IfMatchEtag != null)
{
r = writer.WriteString("ifMatch", options.IfMatchEtag);
if (r != Result.Success)
{
return r;
}
}
else if (options.IfNoneMatchEtag != null)
{
r = writer.WriteString("ifNoneMatch", options.IfNoneMatchEtag);
if (r != Result.Success)
{
return r;
}
}
if (options.Properties != null)
{
if (options.Properties.TryGetValue(WFConstants.BackendHeaders.BinaryId, out object binaryIdObj))
{
if (binaryIdObj is byte[] binaryId)
{
r = writer.WriteBinary("binaryId", binaryId);
if (r != Result.Success)
{
return r;
}
}
}
if (options.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj))
{
if (epkObj is byte[] epk)
{
r = writer.WriteBinary("effectivePartitionKey", epk);
if (r != Result.Success)
{
return r;
}
}
}
if (!pkWritten && options.Properties.TryGetValue(
HttpConstants.HttpHeaders.PartitionKey,
out object pkStrObj))
{
if (pkStrObj is string pkString)
{
r = writer.WriteString("partitionKey", pkString);
if (r != Result.Success)
{
return r;
}
}
}
if (options.Properties.TryGetValue(WFConstants.BackendHeaders.TimeToLiveInSeconds, out object ttlObj))
{
if (ttlObj is string ttlStr && int.TryParse(ttlStr, out int ttl))
{
r = writer.WriteInt32("timeToLiveInSeconds", ttl);
if (r != Result.Success)
{
return r;
}
}
}
}
}
if (RequestInvokerHandler.ShouldSetNoContentResponseHeaders(operation.RequestOptions,
operation.ClientContext?.ClientOptions,
operation.OperationType,
ResourceType.Document))
{
r = writer.WriteBool("minimalReturnPreference", true);
if (r != Result.Success)
{
return r;
}
}
return Result.Success;
}
/// <summary>
/// Computes and returns an approximation for the length of this <see cref="ItemBatchOperation"/>.
/// when serialized.
/// </summary>
/// <returns>An under-estimate of the length.</returns>
internal int GetApproximateSerializedLength()
{
int length = 0;
if (this.PartitionKeyJson != null)
{
length += this.PartitionKeyJson.Length;
}
if (this.Id != null)
{
length += this.Id.Length;
}
length += this.body.Length;
if (this.RequestOptions != null)
{
if (this.RequestOptions.IfMatchEtag != null)
{
length += this.RequestOptions.IfMatchEtag.Length;
}
if (this.RequestOptions.IfNoneMatchEtag != null)
{
length += this.RequestOptions.IfNoneMatchEtag.Length;
}
if (this.RequestOptions.IndexingDirective.HasValue)
{
length += 7; // "Default", "Include", "Exclude" are possible values
}
if (this.RequestOptions.Properties != null)
{
if (this.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.BinaryId, out object binaryIdObj))
{
if (binaryIdObj is byte[] binaryId)
{
length += binaryId.Length;
}
}
if (this.RequestOptions.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj))
{
if (epkObj is byte[] epk)
{
length += epk.Length;
}
}
}
}
return length;
}
/// <summary>
/// Materializes the operation's resource into a Memory{byte} wrapping a byte array.
/// </summary>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> for cancellation.</param>
internal virtual async Task MaterializeResourceAsync(CosmosSerializerCore serializerCore, CancellationToken cancellationToken)
{
if (this.body.IsEmpty && this.ResourceStream != null)
{
this.body = await BatchExecUtils.StreamToMemoryAsync(this.ResourceStream, cancellationToken);
}
}
/// <summary>
/// Attached a context to the current operation to track resolution.
/// </summary>
/// <exception cref="InvalidOperationException">If the operation already had an attached context.</exception>
internal void AttachContext(ItemBatchOperationContext context)
{
if (this.Context != null)
{
throw new InvalidOperationException("Cannot modify the current context of an operation.");
}
this.Context = context;
}
/// <summary>
/// Disposes the disposable members held by this class.
/// </summary>
/// <param name="disposing">Indicates whether to dispose managed resources or not.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing && !this.isDisposed)
{
this.isDisposed = true;
if (this.ResourceStream != null)
{
this.ResourceStream.Dispose();
this.ResourceStream = null;
}
}
}
}
#pragma warning disable SA1402 // File may only contain a single type
internal class ItemBatchOperation<T> : ItemBatchOperation
#pragma warning restore SA1402 // File may only contain a single type
{
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
PartitionKey partitionKey,
T resource,
string id = null,
TransactionalBatchItemRequestOptions requestOptions = null,
CosmosClientContext cosmosClientContext = null)
: base(operationType, operationIndex, partitionKey: partitionKey, id: id, requestOptions: requestOptions, cosmosClientContext: cosmosClientContext)
{
this.Resource = resource;
}
public ItemBatchOperation(
OperationType operationType,
int operationIndex,
T resource,
ContainerInternal containerCore,
string id = null,
TransactionalBatchItemRequestOptions requestOptions = null)
: base(operationType, operationIndex, containerCore: containerCore, id: id, requestOptions: requestOptions)
{
this.Resource = resource;
}
public T Resource { get; private set; }
/// <summary>
/// Materializes the operation's resource into a Memory{byte} wrapping a byte array.
/// </summary>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> for cancellation.</param>
internal override Task MaterializeResourceAsync(CosmosSerializerCore serializerCore, CancellationToken cancellationToken)
{
if (this.body.IsEmpty && this.Resource != null)
{
this.ResourceStream = serializerCore.ToStream(this.Resource);
return base.MaterializeResourceAsync(serializerCore, cancellationToken);
}
return Task.CompletedTask;
}
}
}

Просмотреть файл

@ -1,174 +1,174 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.RecordIO;
#pragma warning disable CA1001 // Types that own disposable fields should be disposable
internal abstract class ServerBatchRequest
#pragma warning restore CA1001 // Types that own disposable fields should be disposable
{
private readonly int maxBodyLength;
private readonly int maxOperationCount;
private readonly CosmosSerializerCore serializerCore;
private ArraySegment<ItemBatchOperation> operations;
private MemorySpanResizer<byte> operationResizableWriteBuffer;
private MemoryStream bodyStream;
private long bodyStreamPositionBeforeWritingCurrentRecord;
private bool shouldDeleteLastWrittenRecord;
private int lastWrittenOperationIndex;
/// <summary>
/// Initializes a new instance of the <see cref="ServerBatchRequest"/> class.
/// </summary>
/// <param name="maxBodyLength">Maximum length allowed for the request body.</param>
/// <param name="maxOperationCount">Maximum number of operations allowed in the request.</param>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
protected ServerBatchRequest(int maxBodyLength, int maxOperationCount, CosmosSerializerCore serializerCore)
{
this.maxBodyLength = maxBodyLength;
this.maxOperationCount = maxOperationCount;
this.serializerCore = serializerCore;
}
public IReadOnlyList<ItemBatchOperation> Operations => this.operations;
/// <summary>
/// Returns the body Stream.
/// Caller is responsible for disposing it after use.
/// </summary>
/// <returns>Body stream.</returns>
public MemoryStream TransferBodyStream()
{
MemoryStream bodyStream = this.bodyStream;
this.bodyStream = null;
return bodyStream;
}
/// <summary>
/// Adds as many operations as possible from the provided list of operations
/// in the list order while having the body stream not exceed maxBodySize.
/// </summary>
/// <param name="operations">Operations to be added; read-only.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
/// <param name="ensureContinuousOperationIndexes">Whether to stop adding operations to the request once there is non-continuity in the operation indexes.</param>
/// <returns>Any pending operations that were not included in the request.</returns>
protected async Task<ArraySegment<ItemBatchOperation>> CreateBodyStreamAsync(
ArraySegment<ItemBatchOperation> operations,
CancellationToken cancellationToken,
bool ensureContinuousOperationIndexes = false)
{
int estimatedMaxOperationLength = 0;
int approximateTotalLength = 0;
int previousOperationIndex = -1;
int materializedCount = 0;
foreach (ItemBatchOperation operation in operations)
{
if (ensureContinuousOperationIndexes && previousOperationIndex != -1 && operation.OperationIndex != previousOperationIndex + 1)
{
break;
}
await operation.MaterializeResourceAsync(this.serializerCore, cancellationToken);
materializedCount++;
previousOperationIndex = operation.OperationIndex;
int currentLength = operation.GetApproximateSerializedLength();
estimatedMaxOperationLength = Math.Max(currentLength, estimatedMaxOperationLength);
approximateTotalLength += currentLength;
if (approximateTotalLength > this.maxBodyLength)
{
break;
}
if (materializedCount == this.maxOperationCount)
{
break;
}
}
this.operations = new ArraySegment<ItemBatchOperation>(operations.Array, operations.Offset, materializedCount);
const int operationSerializationOverheadOverEstimateInBytes = 200;
this.bodyStream = new MemoryStream(approximateTotalLength + (operationSerializationOverheadOverEstimateInBytes * materializedCount));
this.operationResizableWriteBuffer = new MemorySpanResizer<byte>(estimatedMaxOperationLength + operationSerializationOverheadOverEstimateInBytes);
Result r = await this.bodyStream.WriteRecordIOAsync(default(Segment), this.WriteOperation);
Debug.Assert(r == Result.Success, "Failed to serialize batch request");
this.bodyStream.Position = 0;
if (this.shouldDeleteLastWrittenRecord)
{
this.bodyStream.SetLength(this.bodyStreamPositionBeforeWritingCurrentRecord);
this.operations = new ArraySegment<ItemBatchOperation>(operations.Array, operations.Offset, this.lastWrittenOperationIndex);
}
else
{
this.operations = new ArraySegment<ItemBatchOperation>(operations.Array, operations.Offset, this.lastWrittenOperationIndex + 1);
}
int overflowOperations = operations.Count - this.operations.Count;
return new ArraySegment<ItemBatchOperation>(operations.Array, this.operations.Count + operations.Offset, overflowOperations);
}
private Result WriteOperation(long index, out ReadOnlyMemory<byte> buffer)
{
if (this.bodyStream.Length > this.maxBodyLength)
{
// If there is only one operation within the request, we will keep it even if it
// exceeds the maximum size allowed for the body.
if (index > 1)
{
this.shouldDeleteLastWrittenRecord = true;
}
buffer = default(ReadOnlyMemory<byte>);
return Result.Success;
}
this.bodyStreamPositionBeforeWritingCurrentRecord = this.bodyStream.Length;
if (index >= this.operations.Count)
{
buffer = default(ReadOnlyMemory<byte>);
return Result.Success;
}
ItemBatchOperation operation = this.operations.Array[this.operations.Offset + (int)index];
RowBuffer row = new RowBuffer(this.operationResizableWriteBuffer.Memory.Length, this.operationResizableWriteBuffer);
row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchOperationLayout, BatchSchemaProvider.BatchLayoutResolver);
Result r = RowWriter.WriteBuffer(ref row, operation, ItemBatchOperation.WriteOperation);
if (r != Result.Success)
{
buffer = null;
return r;
}
this.lastWrittenOperationIndex = (int)index;
buffer = this.operationResizableWriteBuffer.Memory.Slice(0, row.Length);
return Result.Success;
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.RecordIO;
#pragma warning disable CA1001 // Types that own disposable fields should be disposable
internal abstract class ServerBatchRequest
#pragma warning restore CA1001 // Types that own disposable fields should be disposable
{
private readonly int maxBodyLength;
private readonly int maxOperationCount;
private readonly CosmosSerializerCore serializerCore;
private ArraySegment<ItemBatchOperation> operations;
private MemorySpanResizer<byte> operationResizableWriteBuffer;
private MemoryStream bodyStream;
private long bodyStreamPositionBeforeWritingCurrentRecord;
private bool shouldDeleteLastWrittenRecord;
private int lastWrittenOperationIndex;
/// <summary>
/// Initializes a new instance of the <see cref="ServerBatchRequest"/> class.
/// </summary>
/// <param name="maxBodyLength">Maximum length allowed for the request body.</param>
/// <param name="maxOperationCount">Maximum number of operations allowed in the request.</param>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
protected ServerBatchRequest(int maxBodyLength, int maxOperationCount, CosmosSerializerCore serializerCore)
{
this.maxBodyLength = maxBodyLength;
this.maxOperationCount = maxOperationCount;
this.serializerCore = serializerCore;
}
public IReadOnlyList<ItemBatchOperation> Operations => this.operations;
/// <summary>
/// Returns the body Stream.
/// Caller is responsible for disposing it after use.
/// </summary>
/// <returns>Body stream.</returns>
public MemoryStream TransferBodyStream()
{
MemoryStream bodyStream = this.bodyStream;
this.bodyStream = null;
return bodyStream;
}
/// <summary>
/// Adds as many operations as possible from the provided list of operations
/// in the list order while having the body stream not exceed maxBodySize.
/// </summary>
/// <param name="operations">Operations to be added; read-only.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
/// <param name="ensureContinuousOperationIndexes">Whether to stop adding operations to the request once there is non-continuity in the operation indexes.</param>
/// <returns>Any pending operations that were not included in the request.</returns>
protected async Task<ArraySegment<ItemBatchOperation>> CreateBodyStreamAsync(
ArraySegment<ItemBatchOperation> operations,
CancellationToken cancellationToken,
bool ensureContinuousOperationIndexes = false)
{
int estimatedMaxOperationLength = 0;
int approximateTotalLength = 0;
int previousOperationIndex = -1;
int materializedCount = 0;
foreach (ItemBatchOperation operation in operations)
{
if (ensureContinuousOperationIndexes && previousOperationIndex != -1 && operation.OperationIndex != previousOperationIndex + 1)
{
break;
}
await operation.MaterializeResourceAsync(this.serializerCore, cancellationToken);
materializedCount++;
previousOperationIndex = operation.OperationIndex;
int currentLength = operation.GetApproximateSerializedLength();
estimatedMaxOperationLength = Math.Max(currentLength, estimatedMaxOperationLength);
approximateTotalLength += currentLength;
if (approximateTotalLength > this.maxBodyLength)
{
break;
}
if (materializedCount == this.maxOperationCount)
{
break;
}
}
this.operations = new ArraySegment<ItemBatchOperation>(operations.Array, operations.Offset, materializedCount);
const int operationSerializationOverheadOverEstimateInBytes = 200;
this.bodyStream = new MemoryStream(approximateTotalLength + (operationSerializationOverheadOverEstimateInBytes * materializedCount));
this.operationResizableWriteBuffer = new MemorySpanResizer<byte>(estimatedMaxOperationLength + operationSerializationOverheadOverEstimateInBytes);
Result r = await this.bodyStream.WriteRecordIOAsync(default(Segment), this.WriteOperation);
Debug.Assert(r == Result.Success, "Failed to serialize batch request");
this.bodyStream.Position = 0;
if (this.shouldDeleteLastWrittenRecord)
{
this.bodyStream.SetLength(this.bodyStreamPositionBeforeWritingCurrentRecord);
this.operations = new ArraySegment<ItemBatchOperation>(operations.Array, operations.Offset, this.lastWrittenOperationIndex);
}
else
{
this.operations = new ArraySegment<ItemBatchOperation>(operations.Array, operations.Offset, this.lastWrittenOperationIndex + 1);
}
int overflowOperations = operations.Count - this.operations.Count;
return new ArraySegment<ItemBatchOperation>(operations.Array, this.operations.Count + operations.Offset, overflowOperations);
}
private Result WriteOperation(long index, out ReadOnlyMemory<byte> buffer)
{
if (this.bodyStream.Length > this.maxBodyLength)
{
// If there is only one operation within the request, we will keep it even if it
// exceeds the maximum size allowed for the body.
if (index > 1)
{
this.shouldDeleteLastWrittenRecord = true;
}
buffer = default(ReadOnlyMemory<byte>);
return Result.Success;
}
this.bodyStreamPositionBeforeWritingCurrentRecord = this.bodyStream.Length;
if (index >= this.operations.Count)
{
buffer = default(ReadOnlyMemory<byte>);
return Result.Success;
}
ItemBatchOperation operation = this.operations.Array[this.operations.Offset + (int)index];
RowBuffer row = new RowBuffer(this.operationResizableWriteBuffer.Memory.Length, this.operationResizableWriteBuffer);
row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchOperationLayout, BatchSchemaProvider.BatchLayoutResolver);
Result r = RowWriter.WriteBuffer(ref row, operation, ItemBatchOperation.WriteOperation);
if (r != Result.Success)
{
buffer = null;
return r;
}
this.lastWrittenOperationIndex = (int)index;
buffer = this.operationResizableWriteBuffer.Memory.Slice(0, row.Length);
return Result.Success;
}
}
}

Просмотреть файл

@ -1,60 +1,60 @@
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Tracing;
internal sealed class SinglePartitionKeyServerBatchRequest : ServerBatchRequest
{
/// <summary>
/// Initializes a new instance of the <see cref="SinglePartitionKeyServerBatchRequest"/> class.
/// Single partition key server request.
/// </summary>
/// <param name="partitionKey">Partition key that applies to all operations in this request.</param>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
private SinglePartitionKeyServerBatchRequest(
PartitionKey? partitionKey,
CosmosSerializerCore serializerCore)
: base(maxBodyLength: int.MaxValue,
maxOperationCount: int.MaxValue,
serializerCore: serializerCore)
{
this.PartitionKey = partitionKey;
}
/// <summary>
/// PartitionKey that applies to all operations in this request.
/// </summary>
public PartitionKey? PartitionKey { get; }
/// <summary>
/// Creates an instance of <see cref="SinglePartitionKeyServerBatchRequest"/>.
/// The body of the request is populated with operations till it reaches the provided maxBodyLength.
/// </summary>
/// <param name="partitionKey">Partition key of the request.</param>
/// <param name="operations">Operations to be added into this batch request.</param>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
/// <param name="trace">The trace.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
/// <returns>A newly created instance of <see cref="SinglePartitionKeyServerBatchRequest"/>.</returns>
public static async Task<SinglePartitionKeyServerBatchRequest> CreateAsync(
PartitionKey? partitionKey,
ArraySegment<ItemBatchOperation> operations,
CosmosSerializerCore serializerCore,
ITrace trace,
CancellationToken cancellationToken)
{
using (trace.StartChild("Create Batch Request", TraceComponent.Batch, TraceLevel.Info))
{
SinglePartitionKeyServerBatchRequest request = new SinglePartitionKeyServerBatchRequest(partitionKey, serializerCore);
await request.CreateBodyStreamAsync(operations, cancellationToken);
return request;
}
}
}
}
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ------------------------------------------------------------
namespace Microsoft.Azure.Cosmos
{
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Tracing;
internal sealed class SinglePartitionKeyServerBatchRequest : ServerBatchRequest
{
/// <summary>
/// Initializes a new instance of the <see cref="SinglePartitionKeyServerBatchRequest"/> class.
/// Single partition key server request.
/// </summary>
/// <param name="partitionKey">Partition key that applies to all operations in this request.</param>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
private SinglePartitionKeyServerBatchRequest(
PartitionKey? partitionKey,
CosmosSerializerCore serializerCore)
: base(maxBodyLength: int.MaxValue,
maxOperationCount: int.MaxValue,
serializerCore: serializerCore)
{
this.PartitionKey = partitionKey;
}
/// <summary>
/// PartitionKey that applies to all operations in this request.
/// </summary>
public PartitionKey? PartitionKey { get; }
/// <summary>
/// Creates an instance of <see cref="SinglePartitionKeyServerBatchRequest"/>.
/// The body of the request is populated with operations till it reaches the provided maxBodyLength.
/// </summary>
/// <param name="partitionKey">Partition key of the request.</param>
/// <param name="operations">Operations to be added into this batch request.</param>
/// <param name="serializerCore">Serializer to serialize user provided objects to JSON.</param>
/// <param name="trace">The trace.</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
/// <returns>A newly created instance of <see cref="SinglePartitionKeyServerBatchRequest"/>.</returns>
public static async Task<SinglePartitionKeyServerBatchRequest> CreateAsync(
PartitionKey? partitionKey,
ArraySegment<ItemBatchOperation> operations,
CosmosSerializerCore serializerCore,
ITrace trace,
CancellationToken cancellationToken)
{
using (trace.StartChild("Create Batch Request", TraceComponent.Batch, TraceLevel.Info))
{
SinglePartitionKeyServerBatchRequest request = new SinglePartitionKeyServerBatchRequest(partitionKey, serializerCore);
await request.CreateBodyStreamAsync(operations, cancellationToken);
return request;
}
}
}
}

Просмотреть файл

@ -46,7 +46,7 @@ namespace Microsoft.Azure.Cosmos
this.MaxConnectionLimit = defaultMaxConcurrentConnectionLimit;
this.RetryOptions = new RetryOptions();
this.EnableReadRequestsFallback = null;
this.EnableClientTelemetry = ClientTelemetryOptions.IsClientTelemetryEnabled();
}

Просмотреть файл

@ -20,7 +20,7 @@ namespace Microsoft.Azure.Cosmos.Diagnostics
if (trace == null)
{
throw new ArgumentNullException(nameof(trace));
}
}
// Need to set to the root trace, since we don't know which layer of the stack the response message was returned from.
ITrace rootTrace = trace;
@ -45,8 +45,8 @@ namespace Microsoft.Azure.Cosmos.Diagnostics
}
public override IReadOnlyList<(string regionName, Uri uri)> GetContactedRegions()
{
return this.Value?.RegionsContacted;
{
return this.Value?.RegionsContacted;
}
internal bool IsGoneExceptionHit()

Просмотреть файл

@ -5,36 +5,36 @@
namespace Microsoft.Azure.Cosmos.Handler
{
using System;
using System.Collections.Generic;
using System.Collections.Generic;
using Documents.Rntbd;
using Microsoft.Azure.Cosmos.Core.Trace;
using Microsoft.Azure.Cosmos.Telemetry;
using Microsoft.Azure.Cosmos.Core.Trace;
using Microsoft.Azure.Cosmos.Telemetry;
/// <summary>
/// This is a helper class that creates a single static instance to avoid each
/// client instance from creating a new System Usage monitor with Diagnostics and Telemetry Recorders(if enabled).
/// client instance from creating a new System Usage monitor with Diagnostics and Telemetry Recorders(if enabled).
/// The diagnostics should never block a request, and is a best attempt
/// If the CPU load history fails then don't try it in the future.
/// </summary>
internal class DiagnosticsHandlerHelper
{
{
private const string Diagnostickey = "diagnostic";
private const string Telemetrykey = "telemetry";
private const string Telemetrykey = "telemetry";
public static readonly TimeSpan DiagnosticsRefreshInterval = TimeSpan.FromSeconds(10);
private readonly SystemUsageRecorder diagnosticSystemUsageRecorder = new SystemUsageRecorder(
identifier: Diagnostickey,
historyLength: 6,
refreshInterval: DiagnosticsHandlerHelper.DiagnosticsRefreshInterval);
refreshInterval: DiagnosticsHandlerHelper.DiagnosticsRefreshInterval);
private static readonly TimeSpan ClientTelemetryRefreshInterval = TimeSpan.FromSeconds(5);
private readonly SystemUsageRecorder telemetrySystemUsageRecorder = new SystemUsageRecorder(
identifier: Telemetrykey,
historyLength: 120,
refreshInterval: DiagnosticsHandlerHelper.ClientTelemetryRefreshInterval);
private static bool isDiagnosticsMonitoringEnabled = false;
private static bool isTelemetryMonitoringEnabled = false;
private static bool isDiagnosticsMonitoringEnabled = false;
private static bool isTelemetryMonitoringEnabled = false;
/// <summary>
/// Singleton to make sure only one instance of DiagnosticHandlerHelper is there.
@ -46,52 +46,52 @@ namespace Microsoft.Azure.Cosmos.Handler
null;
#else
new DiagnosticsHandlerHelper();
#endif
#endif
private readonly SystemUsageMonitor systemUsageMonitor = null;
/// <summary>
/// Start System Usage Monitor with Diagnostic and Telemetry Recorder if Telemetry is enabled
/// Otherwise Start System Usage Monitor with only Diagnostic Recorder
/// <summary>
/// Start System Usage Monitor with Diagnostic and Telemetry Recorder if Telemetry is enabled
/// Otherwise Start System Usage Monitor with only Diagnostic Recorder
/// </summary>
private DiagnosticsHandlerHelper()
{
DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled = false;
DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled = false;
// If the CPU monitor fails for some reason don't block the application
try
{
DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled = ClientTelemetryOptions.IsClientTelemetryEnabled();
List<SystemUsageRecorder> recorders = new List<SystemUsageRecorder>()
{
this.diagnosticSystemUsageRecorder,
};
if (DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled)
{
recorders.Add(this.telemetrySystemUsageRecorder);
}
this.systemUsageMonitor = SystemUsageMonitor.CreateAndStart(recorders);
{
DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled = ClientTelemetryOptions.IsClientTelemetryEnabled();
List<SystemUsageRecorder> recorders = new List<SystemUsageRecorder>()
{
this.diagnosticSystemUsageRecorder,
};
if (DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled)
{
recorders.Add(this.telemetrySystemUsageRecorder);
}
this.systemUsageMonitor = SystemUsageMonitor.CreateAndStart(recorders);
DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled = true;
}
catch (Exception ex)
{
DefaultTrace.TraceError(ex.Message);
{
DefaultTrace.TraceError(ex.Message);
DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled = false;
DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled = false;
DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled = false;
}
}
}
/// <summary>
/// This method will give CPU Usage(%), Memory Usage(kb) and ThreadPool Information from Diagnostic recorder,
/// This method will give CPU Usage(%), Memory Usage(kb) and ThreadPool Information from Diagnostic recorder,
/// It will return null if Diagnostic Monitoring is not enabled or throws any error while reading data from the recorder.
/// </summary>
public SystemUsageHistory GetDiagnosticsSystemHistory()
{
{
if (!DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled)
{
return null;
@ -102,7 +102,7 @@ namespace Microsoft.Azure.Cosmos.Handler
return this.diagnosticSystemUsageRecorder.Data;
}
catch (Exception ex)
{
{
DefaultTrace.TraceError(ex.Message);
DiagnosticsHandlerHelper.isDiagnosticsMonitoringEnabled = false;
return null;
@ -110,23 +110,23 @@ namespace Microsoft.Azure.Cosmos.Handler
}
/// <summary>
/// This method will give CPU Usage(%), Memory Usage(kb) and ThreadPool Information from Client Telemetry recorder.
/// This method will give CPU Usage(%), Memory Usage(kb) and ThreadPool Information from Client Telemetry recorder.
/// It will return null if Diagnostic Monitoring is not enabled or throws any error while reading data from the recorder.
/// </summary>
/// <returns> CpuAndMemoryUsageRecorder</returns>
public SystemUsageHistory GetClientTelemetrySystemHistory()
{
{
if (!DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled)
{
return null;
}
try
{
{
return this.telemetrySystemUsageRecorder.Data;
}
catch (Exception ex)
{
{
DefaultTrace.TraceError(ex.Message);
DiagnosticsHandlerHelper.isTelemetryMonitoringEnabled = false;
return null;

Просмотреть файл

@ -39,7 +39,7 @@ namespace Microsoft.Azure.Cosmos.Handlers
operationType: request.OperationType,
resourceType: request.ResourceType,
consistencyLevel: request.Headers?[Documents.HttpConstants.HttpHeaders.ConsistencyLevel],
requestCharge: response.Headers.RequestCharge,
requestCharge: response.Headers.RequestCharge,
subStatusCode: response.Headers.SubStatusCodeLiteral);
}
catch (Exception ex)

Просмотреть файл

@ -29,10 +29,10 @@ namespace Microsoft.Azure.Cosmos.Handlers
CancellationToken cancellationToken)
{
try
{
{
ResponseMessage response = await this.ProcessMessageAsync(request, cancellationToken);
Debug.Assert(System.Diagnostics.Trace.CorrelationManager.ActivityId != Guid.Empty, "Trace activity id is missing");
Debug.Assert(System.Diagnostics.Trace.CorrelationManager.ActivityId != Guid.Empty, "Trace activity id is missing");
return response;
}
//catch DocumentClientException and exceptions that inherit it. Other exception types happen before a backend request
@ -91,8 +91,8 @@ namespace Microsoft.Azure.Cosmos.Handlers
throw new ArgumentNullException(nameof(request));
}
DocumentServiceRequest serviceRequest = request.ToDocumentServiceRequest();
DocumentServiceRequest serviceRequest = request.ToDocumentServiceRequest();
ClientSideRequestStatisticsTraceDatum clientSideRequestStatisticsTraceDatum = new ClientSideRequestStatisticsTraceDatum(DateTime.UtcNow);
serviceRequest.RequestContext.ClientRequestStatistics = clientSideRequestStatisticsTraceDatum;
@ -105,7 +105,7 @@ namespace Microsoft.Azure.Cosmos.Handlers
AuthorizationTokenType.PrimaryMasterKey,
request.Trace);
serviceRequest.Headers[HttpConstants.HttpHeaders.Authorization] = authorization;
serviceRequest.Headers[HttpConstants.HttpHeaders.Authorization] = authorization;
IStoreModel storeProxy = this.client.DocumentClient.GetStoreProxy(serviceRequest);
using (ITrace processMessageAsyncTrace = request.Trace.StartChild(
@ -115,17 +115,17 @@ namespace Microsoft.Azure.Cosmos.Handlers
{
request.Trace = processMessageAsyncTrace;
processMessageAsyncTrace.AddDatum("Client Side Request Stats", clientSideRequestStatisticsTraceDatum);
DocumentServiceResponse response = null;
try
{
response = request.OperationType == OperationType.Upsert
? await this.ProcessUpsertAsync(storeProxy, serviceRequest, cancellationToken)
: await storeProxy.ProcessMessageAsync(serviceRequest, cancellationToken);
}
finally
{
processMessageAsyncTrace.UpdateRegionContacted(clientSideRequestStatisticsTraceDatum);
DocumentServiceResponse response = null;
try
{
response = request.OperationType == OperationType.Upsert
? await this.ProcessUpsertAsync(storeProxy, serviceRequest, cancellationToken)
: await storeProxy.ProcessMessageAsync(serviceRequest, cancellationToken);
}
finally
{
processMessageAsyncTrace.UpdateRegionContacted(clientSideRequestStatisticsTraceDatum);
}
return response.ToCosmosResponseMessage(

Просмотреть файл

@ -1,191 +1,191 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System.Collections;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq.Expressions;
using Microsoft.Azure.Cosmos.SqlObjects;
internal static class ArrayBuiltinFunctions
{
private static Dictionary<string, BuiltinFunctionVisitor> ArrayBuiltinFunctionDefinitions { get; set; }
private class ArrayConcatVisitor : SqlBuiltinFunctionVisitor
{
public ArrayConcatVisitor()
: base("ARRAY_CONCAT", true, null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 2)
{
SqlScalarExpression array1 = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression array2 = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[1], context);
return SqlFunctionCallScalarExpression.CreateBuiltin("ARRAY_CONCAT", array1, array2);
}
return null;
}
}
private class ArrayContainsVisitor : SqlBuiltinFunctionVisitor
{
public ArrayContainsVisitor()
: base("ARRAY_CONTAINS", true, null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
Expression searchList = null;
Expression searchExpression = null;
// If non static Contains
if (methodCallExpression.Arguments.Count == 1)
{
searchList = methodCallExpression.Object;
searchExpression = methodCallExpression.Arguments[0];
}
// if extension method (static) Contains
else if (methodCallExpression.Arguments.Count == 2)
{
searchList = methodCallExpression.Arguments[0];
searchExpression = methodCallExpression.Arguments[1];
}
if (searchList == null || searchExpression == null)
{
return null;
}
if (searchList.NodeType == ExpressionType.Constant)
{
return this.VisitIN(searchExpression, (ConstantExpression)searchList, context);
}
SqlScalarExpression array = ExpressionToSql.VisitScalarExpression(searchList, context);
SqlScalarExpression expression = ExpressionToSql.VisitScalarExpression(searchExpression, context);
return SqlFunctionCallScalarExpression.CreateBuiltin("ARRAY_CONTAINS", array, expression);
}
private SqlScalarExpression VisitIN(Expression expression, ConstantExpression constantExpressionList, TranslationContext context)
{
List<SqlScalarExpression> items = new List<SqlScalarExpression>();
foreach (object item in (IEnumerable)constantExpressionList.Value)
{
items.Add(ExpressionToSql.VisitConstant(Expression.Constant(item), context));
}
// if the items list empty, then just return false expression
if (items.Count == 0)
{
return SqlLiteralScalarExpression.SqlFalseLiteralScalarExpression;
}
SqlScalarExpression scalarExpression = ExpressionToSql.VisitNonSubqueryScalarExpression(expression, context);
return SqlInScalarExpression.Create(scalarExpression, false, items.ToImmutableArray());
}
}
private class ArrayCountVisitor : SqlBuiltinFunctionVisitor
{
public ArrayCountVisitor()
: base("ARRAY_LENGTH", true, null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression array = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlFunctionCallScalarExpression.CreateBuiltin("ARRAY_LENGTH", array);
}
return null;
}
}
private class ArrayGetItemVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Object != null && methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression memberExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression indexExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlMemberIndexerScalarExpression.Create(memberExpression, indexExpression);
}
return null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
private class ArrayToArrayVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return methodCallExpression.Arguments.Count == 1
? ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context)
: null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
static ArrayBuiltinFunctions()
{
ArrayBuiltinFunctionDefinitions = new Dictionary<string, BuiltinFunctionVisitor>
{
{
"Concat",
new ArrayConcatVisitor()
},
{
"Contains",
new ArrayContainsVisitor()
},
{
"Count",
new ArrayCountVisitor()
},
{
"get_Item",
new ArrayGetItemVisitor()
},
{
"ToArray",
new ArrayToArrayVisitor()
},
{
"ToList",
new ArrayToArrayVisitor()
}
};
}
public static SqlScalarExpression Visit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return ArrayBuiltinFunctionDefinitions.TryGetValue(methodCallExpression.Method.Name, out BuiltinFunctionVisitor visitor)
? visitor.Visit(methodCallExpression, context)
: throw new DocumentQueryException(string.Format(CultureInfo.CurrentCulture, ClientResources.MethodNotSupported, methodCallExpression.Method.Name));
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System.Collections;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq.Expressions;
using Microsoft.Azure.Cosmos.SqlObjects;
internal static class ArrayBuiltinFunctions
{
private static Dictionary<string, BuiltinFunctionVisitor> ArrayBuiltinFunctionDefinitions { get; set; }
private class ArrayConcatVisitor : SqlBuiltinFunctionVisitor
{
public ArrayConcatVisitor()
: base("ARRAY_CONCAT", true, null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 2)
{
SqlScalarExpression array1 = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression array2 = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[1], context);
return SqlFunctionCallScalarExpression.CreateBuiltin("ARRAY_CONCAT", array1, array2);
}
return null;
}
}
private class ArrayContainsVisitor : SqlBuiltinFunctionVisitor
{
public ArrayContainsVisitor()
: base("ARRAY_CONTAINS", true, null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
Expression searchList = null;
Expression searchExpression = null;
// If non static Contains
if (methodCallExpression.Arguments.Count == 1)
{
searchList = methodCallExpression.Object;
searchExpression = methodCallExpression.Arguments[0];
}
// if extension method (static) Contains
else if (methodCallExpression.Arguments.Count == 2)
{
searchList = methodCallExpression.Arguments[0];
searchExpression = methodCallExpression.Arguments[1];
}
if (searchList == null || searchExpression == null)
{
return null;
}
if (searchList.NodeType == ExpressionType.Constant)
{
return this.VisitIN(searchExpression, (ConstantExpression)searchList, context);
}
SqlScalarExpression array = ExpressionToSql.VisitScalarExpression(searchList, context);
SqlScalarExpression expression = ExpressionToSql.VisitScalarExpression(searchExpression, context);
return SqlFunctionCallScalarExpression.CreateBuiltin("ARRAY_CONTAINS", array, expression);
}
private SqlScalarExpression VisitIN(Expression expression, ConstantExpression constantExpressionList, TranslationContext context)
{
List<SqlScalarExpression> items = new List<SqlScalarExpression>();
foreach (object item in (IEnumerable)constantExpressionList.Value)
{
items.Add(ExpressionToSql.VisitConstant(Expression.Constant(item), context));
}
// if the items list empty, then just return false expression
if (items.Count == 0)
{
return SqlLiteralScalarExpression.SqlFalseLiteralScalarExpression;
}
SqlScalarExpression scalarExpression = ExpressionToSql.VisitNonSubqueryScalarExpression(expression, context);
return SqlInScalarExpression.Create(scalarExpression, false, items.ToImmutableArray());
}
}
private class ArrayCountVisitor : SqlBuiltinFunctionVisitor
{
public ArrayCountVisitor()
: base("ARRAY_LENGTH", true, null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression array = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlFunctionCallScalarExpression.CreateBuiltin("ARRAY_LENGTH", array);
}
return null;
}
}
private class ArrayGetItemVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Object != null && methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression memberExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression indexExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlMemberIndexerScalarExpression.Create(memberExpression, indexExpression);
}
return null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
private class ArrayToArrayVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return methodCallExpression.Arguments.Count == 1
? ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context)
: null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
static ArrayBuiltinFunctions()
{
ArrayBuiltinFunctionDefinitions = new Dictionary<string, BuiltinFunctionVisitor>
{
{
"Concat",
new ArrayConcatVisitor()
},
{
"Contains",
new ArrayContainsVisitor()
},
{
"Count",
new ArrayCountVisitor()
},
{
"get_Item",
new ArrayGetItemVisitor()
},
{
"ToArray",
new ArrayToArrayVisitor()
},
{
"ToList",
new ArrayToArrayVisitor()
}
};
}
public static SqlScalarExpression Visit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return ArrayBuiltinFunctionDefinitions.TryGetValue(methodCallExpression.Method.Name, out BuiltinFunctionVisitor visitor)
? visitor.Visit(methodCallExpression, context)
: throw new DocumentQueryException(string.Format(CultureInfo.CurrentCulture, ClientResources.MethodNotSupported, methodCallExpression.Method.Name));
}
}
}

Просмотреть файл

@ -1,437 +1,437 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq.Expressions;
using Microsoft.Azure.Cosmos.SqlObjects;
internal static class StringBuiltinFunctions
{
private static Dictionary<string, BuiltinFunctionVisitor> StringBuiltinFunctionDefinitions { get; set; }
private class StringVisitConcat : SqlBuiltinFunctionVisitor
{
public StringVisitConcat()
: base("CONCAT",
true,
new List<Type[]>()
{
new Type[]{typeof(string), typeof(string)},
new Type[]{typeof(string), typeof(string), typeof(string)},
new Type[]{typeof(string), typeof(string), typeof(string), typeof(string)},
})
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1
&& methodCallExpression.Arguments[0] is NewArrayExpression newArrayExpression)
{
ReadOnlyCollection<Expression> argumentsExpressions = newArrayExpression.Expressions;
List<SqlScalarExpression> arguments = new List<SqlScalarExpression>();
foreach (Expression argument in argumentsExpressions)
{
arguments.Add(ExpressionToSql.VisitScalarExpression(argument, context));
}
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Concat, arguments.ToImmutableArray());
}
return null;
}
}
private class StringVisitContains : SqlBuiltinFunctionVisitor
{
public StringVisitContains()
: base("CONTAINS",
false,
new List<Type[]>()
{
new Type[]{typeof(string)},
new Type[]{typeof(char)}
})
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 2)
{
SqlScalarExpression haystack = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression needle = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression caseSensitivity = SqlStringWithComparisonVisitor.GetCaseSensitivityExpression(methodCallExpression.Arguments[1]);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Contains, haystack, needle, caseSensitivity);
}
return null;
}
}
private class StringVisitCount : SqlBuiltinFunctionVisitor
{
public StringVisitCount()
: base("LENGTH",
true,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression str = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Length, str);
}
return null;
}
}
private class StringVisitTrimStart : SqlBuiltinFunctionVisitor
{
public StringVisitTrimStart()
: base("LTRIM",
false,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
bool validInNet = false;
bool validInNetCore = false;
if (methodCallExpression.Arguments.Count == 1 &&
methodCallExpression.Arguments[0].NodeType == ExpressionType.Constant &&
methodCallExpression.Arguments[0].Type == typeof(char[]))
{
char[] argumentsExpressions = (char[])((ConstantExpression)methodCallExpression.Arguments[0]).Value;
if (argumentsExpressions.Length == 0)
{
validInNet = true;
}
}
else if (methodCallExpression.Arguments.Count == 0)
{
validInNetCore = true;
}
if (validInNet || validInNetCore)
{
SqlScalarExpression str = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Ltrim, str);
}
return null;
}
}
private class StringVisitReverse : SqlBuiltinFunctionVisitor
{
public StringVisitReverse()
: base("REVERSE",
true,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression str = ExpressionToSql.VisitNonSubqueryScalarExpression(methodCallExpression.Arguments[0], context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Reverse, str);
}
return null;
}
}
private sealed class SqlStringWithComparisonVisitor : BuiltinFunctionVisitor
{
private static readonly IImmutableSet<StringComparison> SensitiveCaseComparisons = ImmutableHashSet.Create<StringComparison>(new[]
{
StringComparison.CurrentCulture,
StringComparison.InvariantCulture,
StringComparison.Ordinal
});
private static readonly IImmutableSet<StringComparison> IgnoreCaseComparisons = ImmutableHashSet.Create<StringComparison>(new[]
{
StringComparison.CurrentCultureIgnoreCase,
StringComparison.InvariantCultureIgnoreCase,
StringComparison.OrdinalIgnoreCase
});
public string SqlName { get; }
public SqlStringWithComparisonVisitor(string sqlName)
{
this.SqlName = sqlName ?? throw new ArgumentNullException(nameof(sqlName));
}
public static SqlScalarExpression GetCaseSensitivityExpression(Expression expression)
{
if (expression is ConstantExpression inputExpression
&& inputExpression.Value is StringComparison comparisonValue)
{
if (SensitiveCaseComparisons.Contains(comparisonValue))
{
return SqlLiteralScalarExpression.Create(SqlBooleanLiteral.Create(false));
}
else if (IgnoreCaseComparisons.Contains(comparisonValue))
{
return SqlLiteralScalarExpression.Create(SqlBooleanLiteral.Create(true));
}
}
return null;
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
int argumentCount = methodCallExpression.Arguments.Count;
if (argumentCount == 0 || argumentCount > 2)
{
return null;
}
List<SqlScalarExpression> arguments = new List<SqlScalarExpression>
{
ExpressionToSql.VisitNonSubqueryScalarExpression(methodCallExpression.Object, context),
ExpressionToSql.VisitNonSubqueryScalarExpression(methodCallExpression.Arguments[0], context)
};
if (argumentCount > 1)
{
arguments.Add(GetCaseSensitivityExpression(methodCallExpression.Arguments[1]));
}
return SqlFunctionCallScalarExpression.CreateBuiltin(this.SqlName, arguments.ToArray());
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
private class StringVisitTrimEnd : SqlBuiltinFunctionVisitor
{
public StringVisitTrimEnd()
: base("RTRIM",
false,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
bool validInNet = false;
bool validInNetCore = false;
if (methodCallExpression.Arguments.Count == 1 &&
methodCallExpression.Arguments[0].NodeType == ExpressionType.Constant &&
methodCallExpression.Arguments[0].Type == typeof(char[]))
{
char[] argumentsExpressions = (char[])((ConstantExpression)methodCallExpression.Arguments[0]).Value;
if (argumentsExpressions.Length == 0)
{
validInNet = true;
}
}
else if (methodCallExpression.Arguments.Count == 0)
{
validInNetCore = true;
}
if (validInNet || validInNetCore)
{
SqlScalarExpression str = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Rtrim, str);
}
return null;
}
}
private class StringGetCharsVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression memberExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression indexExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression[] arguments = new SqlScalarExpression[]
{
memberExpression,
indexExpression,
ExpressionToSql.VisitScalarExpression(Expression.Constant(1), context)
};
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Substring, arguments);
}
return null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
private class StringEqualsVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression left = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression right = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlBinaryScalarExpression.Create(SqlBinaryScalarOperatorKind.Equal, left, right);
}
if (methodCallExpression.Arguments.Count == 2)
{
SqlScalarExpression left = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression right = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression caseSensitivity = SqlStringWithComparisonVisitor.GetCaseSensitivityExpression(methodCallExpression.Arguments[1]);
return SqlFunctionCallScalarExpression.CreateBuiltin(
SqlFunctionCallScalarExpression.Names.StringEquals,
left,
right,
caseSensitivity);
}
return null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
static StringBuiltinFunctions()
{
StringBuiltinFunctionDefinitions = new Dictionary<string, BuiltinFunctionVisitor>
{
{
"Concat",
new StringVisitConcat()
},
{
"Contains",
new StringVisitContains()
},
{
"EndsWith",
new SqlStringWithComparisonVisitor(SqlFunctionCallScalarExpression.Names.Endswith)
},
{
"IndexOf",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.IndexOf,
false,
new List<Type[]>()
{
new Type[]{typeof(char)},
new Type[]{typeof(string)},
new Type[]{typeof(char), typeof(int)},
new Type[]{typeof(string), typeof(int)},
})
},
{
"Count",
new StringVisitCount()
},
{
"ToLower",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Lower,
false,
new List<Type[]>()
{
new Type[]{}
})
},
{
"TrimStart",
new StringVisitTrimStart()
},
{
"Replace",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Replace,
false,
new List<Type[]>()
{
new Type[]{typeof(char), typeof(char)},
new Type[]{typeof(string), typeof(string)}
})
},
{
"Reverse",
new StringVisitReverse()
},
{
"TrimEnd",
new StringVisitTrimEnd()
},
{
"StartsWith",
new SqlStringWithComparisonVisitor(SqlFunctionCallScalarExpression.Names.Startswith)
},
{
"Substring",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Substring,
false,
new List<Type[]>()
{
new Type[]{typeof(int), typeof(int)}
})
},
{
"ToUpper",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Upper,
false,
new List<Type[]>()
{
new Type[]{}
})
},
{
"get_Chars",
new StringGetCharsVisitor()
},
{
"Equals",
new StringEqualsVisitor()
}
};
}
public static SqlScalarExpression Visit(MethodCallExpression methodCallExpression, TranslationContext context)
{
BuiltinFunctionVisitor visitor = null;
if (StringBuiltinFunctionDefinitions.TryGetValue(methodCallExpression.Method.Name, out visitor))
{
return visitor.Visit(methodCallExpression, context);
}
throw new DocumentQueryException(string.Format(CultureInfo.CurrentCulture, ClientResources.MethodNotSupported, methodCallExpression.Method.Name));
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq.Expressions;
using Microsoft.Azure.Cosmos.SqlObjects;
internal static class StringBuiltinFunctions
{
private static Dictionary<string, BuiltinFunctionVisitor> StringBuiltinFunctionDefinitions { get; set; }
private class StringVisitConcat : SqlBuiltinFunctionVisitor
{
public StringVisitConcat()
: base("CONCAT",
true,
new List<Type[]>()
{
new Type[]{typeof(string), typeof(string)},
new Type[]{typeof(string), typeof(string), typeof(string)},
new Type[]{typeof(string), typeof(string), typeof(string), typeof(string)},
})
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1
&& methodCallExpression.Arguments[0] is NewArrayExpression newArrayExpression)
{
ReadOnlyCollection<Expression> argumentsExpressions = newArrayExpression.Expressions;
List<SqlScalarExpression> arguments = new List<SqlScalarExpression>();
foreach (Expression argument in argumentsExpressions)
{
arguments.Add(ExpressionToSql.VisitScalarExpression(argument, context));
}
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Concat, arguments.ToImmutableArray());
}
return null;
}
}
private class StringVisitContains : SqlBuiltinFunctionVisitor
{
public StringVisitContains()
: base("CONTAINS",
false,
new List<Type[]>()
{
new Type[]{typeof(string)},
new Type[]{typeof(char)}
})
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 2)
{
SqlScalarExpression haystack = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression needle = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression caseSensitivity = SqlStringWithComparisonVisitor.GetCaseSensitivityExpression(methodCallExpression.Arguments[1]);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Contains, haystack, needle, caseSensitivity);
}
return null;
}
}
private class StringVisitCount : SqlBuiltinFunctionVisitor
{
public StringVisitCount()
: base("LENGTH",
true,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression str = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Length, str);
}
return null;
}
}
private class StringVisitTrimStart : SqlBuiltinFunctionVisitor
{
public StringVisitTrimStart()
: base("LTRIM",
false,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
bool validInNet = false;
bool validInNetCore = false;
if (methodCallExpression.Arguments.Count == 1 &&
methodCallExpression.Arguments[0].NodeType == ExpressionType.Constant &&
methodCallExpression.Arguments[0].Type == typeof(char[]))
{
char[] argumentsExpressions = (char[])((ConstantExpression)methodCallExpression.Arguments[0]).Value;
if (argumentsExpressions.Length == 0)
{
validInNet = true;
}
}
else if (methodCallExpression.Arguments.Count == 0)
{
validInNetCore = true;
}
if (validInNet || validInNetCore)
{
SqlScalarExpression str = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Ltrim, str);
}
return null;
}
}
private class StringVisitReverse : SqlBuiltinFunctionVisitor
{
public StringVisitReverse()
: base("REVERSE",
true,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression str = ExpressionToSql.VisitNonSubqueryScalarExpression(methodCallExpression.Arguments[0], context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Reverse, str);
}
return null;
}
}
private sealed class SqlStringWithComparisonVisitor : BuiltinFunctionVisitor
{
private static readonly IImmutableSet<StringComparison> SensitiveCaseComparisons = ImmutableHashSet.Create<StringComparison>(new[]
{
StringComparison.CurrentCulture,
StringComparison.InvariantCulture,
StringComparison.Ordinal
});
private static readonly IImmutableSet<StringComparison> IgnoreCaseComparisons = ImmutableHashSet.Create<StringComparison>(new[]
{
StringComparison.CurrentCultureIgnoreCase,
StringComparison.InvariantCultureIgnoreCase,
StringComparison.OrdinalIgnoreCase
});
public string SqlName { get; }
public SqlStringWithComparisonVisitor(string sqlName)
{
this.SqlName = sqlName ?? throw new ArgumentNullException(nameof(sqlName));
}
public static SqlScalarExpression GetCaseSensitivityExpression(Expression expression)
{
if (expression is ConstantExpression inputExpression
&& inputExpression.Value is StringComparison comparisonValue)
{
if (SensitiveCaseComparisons.Contains(comparisonValue))
{
return SqlLiteralScalarExpression.Create(SqlBooleanLiteral.Create(false));
}
else if (IgnoreCaseComparisons.Contains(comparisonValue))
{
return SqlLiteralScalarExpression.Create(SqlBooleanLiteral.Create(true));
}
}
return null;
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
int argumentCount = methodCallExpression.Arguments.Count;
if (argumentCount == 0 || argumentCount > 2)
{
return null;
}
List<SqlScalarExpression> arguments = new List<SqlScalarExpression>
{
ExpressionToSql.VisitNonSubqueryScalarExpression(methodCallExpression.Object, context),
ExpressionToSql.VisitNonSubqueryScalarExpression(methodCallExpression.Arguments[0], context)
};
if (argumentCount > 1)
{
arguments.Add(GetCaseSensitivityExpression(methodCallExpression.Arguments[1]));
}
return SqlFunctionCallScalarExpression.CreateBuiltin(this.SqlName, arguments.ToArray());
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
private class StringVisitTrimEnd : SqlBuiltinFunctionVisitor
{
public StringVisitTrimEnd()
: base("RTRIM",
false,
null)
{
}
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
bool validInNet = false;
bool validInNetCore = false;
if (methodCallExpression.Arguments.Count == 1 &&
methodCallExpression.Arguments[0].NodeType == ExpressionType.Constant &&
methodCallExpression.Arguments[0].Type == typeof(char[]))
{
char[] argumentsExpressions = (char[])((ConstantExpression)methodCallExpression.Arguments[0]).Value;
if (argumentsExpressions.Length == 0)
{
validInNet = true;
}
}
else if (methodCallExpression.Arguments.Count == 0)
{
validInNetCore = true;
}
if (validInNet || validInNetCore)
{
SqlScalarExpression str = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Rtrim, str);
}
return null;
}
}
private class StringGetCharsVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression memberExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression indexExpression = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression[] arguments = new SqlScalarExpression[]
{
memberExpression,
indexExpression,
ExpressionToSql.VisitScalarExpression(Expression.Constant(1), context)
};
return SqlFunctionCallScalarExpression.CreateBuiltin(SqlFunctionCallScalarExpression.Names.Substring, arguments);
}
return null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
private class StringEqualsVisitor : BuiltinFunctionVisitor
{
protected override SqlScalarExpression VisitImplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
if (methodCallExpression.Arguments.Count == 1)
{
SqlScalarExpression left = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression right = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
return SqlBinaryScalarExpression.Create(SqlBinaryScalarOperatorKind.Equal, left, right);
}
if (methodCallExpression.Arguments.Count == 2)
{
SqlScalarExpression left = ExpressionToSql.VisitScalarExpression(methodCallExpression.Object, context);
SqlScalarExpression right = ExpressionToSql.VisitScalarExpression(methodCallExpression.Arguments[0], context);
SqlScalarExpression caseSensitivity = SqlStringWithComparisonVisitor.GetCaseSensitivityExpression(methodCallExpression.Arguments[1]);
return SqlFunctionCallScalarExpression.CreateBuiltin(
SqlFunctionCallScalarExpression.Names.StringEquals,
left,
right,
caseSensitivity);
}
return null;
}
protected override SqlScalarExpression VisitExplicit(MethodCallExpression methodCallExpression, TranslationContext context)
{
return null;
}
}
static StringBuiltinFunctions()
{
StringBuiltinFunctionDefinitions = new Dictionary<string, BuiltinFunctionVisitor>
{
{
"Concat",
new StringVisitConcat()
},
{
"Contains",
new StringVisitContains()
},
{
"EndsWith",
new SqlStringWithComparisonVisitor(SqlFunctionCallScalarExpression.Names.Endswith)
},
{
"IndexOf",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.IndexOf,
false,
new List<Type[]>()
{
new Type[]{typeof(char)},
new Type[]{typeof(string)},
new Type[]{typeof(char), typeof(int)},
new Type[]{typeof(string), typeof(int)},
})
},
{
"Count",
new StringVisitCount()
},
{
"ToLower",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Lower,
false,
new List<Type[]>()
{
new Type[]{}
})
},
{
"TrimStart",
new StringVisitTrimStart()
},
{
"Replace",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Replace,
false,
new List<Type[]>()
{
new Type[]{typeof(char), typeof(char)},
new Type[]{typeof(string), typeof(string)}
})
},
{
"Reverse",
new StringVisitReverse()
},
{
"TrimEnd",
new StringVisitTrimEnd()
},
{
"StartsWith",
new SqlStringWithComparisonVisitor(SqlFunctionCallScalarExpression.Names.Startswith)
},
{
"Substring",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Substring,
false,
new List<Type[]>()
{
new Type[]{typeof(int), typeof(int)}
})
},
{
"ToUpper",
new SqlBuiltinFunctionVisitor(SqlFunctionCallScalarExpression.Names.Upper,
false,
new List<Type[]>()
{
new Type[]{}
})
},
{
"get_Chars",
new StringGetCharsVisitor()
},
{
"Equals",
new StringEqualsVisitor()
}
};
}
public static SqlScalarExpression Visit(MethodCallExpression methodCallExpression, TranslationContext context)
{
BuiltinFunctionVisitor visitor = null;
if (StringBuiltinFunctionDefinitions.TryGetValue(methodCallExpression.Method.Name, out visitor))
{
return visitor.Visit(methodCallExpression, context);
}
throw new DocumentQueryException(string.Format(CultureInfo.CurrentCulture, ClientResources.MethodNotSupported, methodCallExpression.Method.Name));
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,102 +1,102 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System;
using System.Globalization;
using System.Linq;
using System.Linq.Expressions;
using Microsoft.Azure.Cosmos.Spatial;
using Microsoft.Azure.Cosmos.SqlObjects;
using Newtonsoft.Json.Linq;
/// <summary>
/// Constructs <see cref="SqlScalarExpression"/> from a geometry <see cref="Expression"/>.
/// </summary>
internal static class GeometrySqlExpressionFactory
{
/// <summary>
/// Constructs <see cref="SqlScalarExpression"/> from a geometry <see cref="Expression"/>.
/// </summary>
/// <param name="geometryExpression">
/// Expression of type <see cref="Geometry"/>.
/// </param>
/// <returns>Instance of <see cref="SqlScalarExpression"/> representing geometry <paramref name="geometryExpression"/>.</returns>.
public static SqlScalarExpression Construct(Expression geometryExpression)
{
if (!typeof(Geometry).IsAssignableFrom(geometryExpression.Type))
{
throw new ArgumentException("geometryExpression");
}
if (geometryExpression.NodeType == ExpressionType.Constant)
{
// This is just optimization - if got constant, we don't need to compile expression etc.
JObject jsonObject = JObject.FromObject(((ConstantExpression)geometryExpression).Value);
return GeometrySqlExpressionFactory.FromJToken(jsonObject);
}
Geometry geometry;
try
{
Expression<Func<Geometry>> le = Expression.Lambda<Func<Geometry>>(geometryExpression);
Func<Geometry> compiledExpression = le.Compile();
geometry = compiledExpression();
}
catch (Exception ex)
{
throw new DocumentQueryException(
string.Format(CultureInfo.CurrentCulture, ClientResources.FailedToEvaluateSpatialExpression), ex);
}
return GeometrySqlExpressionFactory.FromJToken(JObject.FromObject(geometry));
}
/// <summary>
/// Constructs <see cref="SqlScalarExpression"/> from a geometry <see cref="JToken"/>.
/// </summary>
/// <param name="jToken">Json token.</param>
/// <returns>Instance of <see cref="SqlScalarExpression"/>.</returns>
private static SqlScalarExpression FromJToken(JToken jToken)
{
switch (jToken.Type)
{
case JTokenType.Array:
return SqlArrayCreateScalarExpression.Create(jToken.Select(FromJToken).ToArray());
case JTokenType.Boolean:
return SqlLiteralScalarExpression.Create(SqlBooleanLiteral.Create(jToken.Value<bool>()));
case JTokenType.Null:
return SqlLiteralScalarExpression.SqlNullLiteralScalarExpression;
case JTokenType.String:
return SqlLiteralScalarExpression.Create(SqlStringLiteral.Create(jToken.Value<string>()));
case JTokenType.Object:
SqlObjectProperty[] properties =
((JObject)jToken).Properties()
.Select(
p =>
SqlObjectProperty.Create(
SqlPropertyName.Create(p.Name),
FromJToken(p.Value)))
.ToArray();
return SqlObjectCreateScalarExpression.Create(properties);
case JTokenType.Float:
case JTokenType.Integer:
SqlNumberLiteral sqlNumberLiteral = SqlNumberLiteral.Create(jToken.Value<double>());
return SqlLiteralScalarExpression.Create(sqlNumberLiteral);
default:
throw new DocumentQueryException(string.Format(CultureInfo.CurrentCulture, ClientResources.UnexpectedTokenType, jToken.Type));
}
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System;
using System.Globalization;
using System.Linq;
using System.Linq.Expressions;
using Microsoft.Azure.Cosmos.Spatial;
using Microsoft.Azure.Cosmos.SqlObjects;
using Newtonsoft.Json.Linq;
/// <summary>
/// Constructs <see cref="SqlScalarExpression"/> from a geometry <see cref="Expression"/>.
/// </summary>
internal static class GeometrySqlExpressionFactory
{
/// <summary>
/// Constructs <see cref="SqlScalarExpression"/> from a geometry <see cref="Expression"/>.
/// </summary>
/// <param name="geometryExpression">
/// Expression of type <see cref="Geometry"/>.
/// </param>
/// <returns>Instance of <see cref="SqlScalarExpression"/> representing geometry <paramref name="geometryExpression"/>.</returns>.
public static SqlScalarExpression Construct(Expression geometryExpression)
{
if (!typeof(Geometry).IsAssignableFrom(geometryExpression.Type))
{
throw new ArgumentException("geometryExpression");
}
if (geometryExpression.NodeType == ExpressionType.Constant)
{
// This is just optimization - if got constant, we don't need to compile expression etc.
JObject jsonObject = JObject.FromObject(((ConstantExpression)geometryExpression).Value);
return GeometrySqlExpressionFactory.FromJToken(jsonObject);
}
Geometry geometry;
try
{
Expression<Func<Geometry>> le = Expression.Lambda<Func<Geometry>>(geometryExpression);
Func<Geometry> compiledExpression = le.Compile();
geometry = compiledExpression();
}
catch (Exception ex)
{
throw new DocumentQueryException(
string.Format(CultureInfo.CurrentCulture, ClientResources.FailedToEvaluateSpatialExpression), ex);
}
return GeometrySqlExpressionFactory.FromJToken(JObject.FromObject(geometry));
}
/// <summary>
/// Constructs <see cref="SqlScalarExpression"/> from a geometry <see cref="JToken"/>.
/// </summary>
/// <param name="jToken">Json token.</param>
/// <returns>Instance of <see cref="SqlScalarExpression"/>.</returns>
private static SqlScalarExpression FromJToken(JToken jToken)
{
switch (jToken.Type)
{
case JTokenType.Array:
return SqlArrayCreateScalarExpression.Create(jToken.Select(FromJToken).ToArray());
case JTokenType.Boolean:
return SqlLiteralScalarExpression.Create(SqlBooleanLiteral.Create(jToken.Value<bool>()));
case JTokenType.Null:
return SqlLiteralScalarExpression.SqlNullLiteralScalarExpression;
case JTokenType.String:
return SqlLiteralScalarExpression.Create(SqlStringLiteral.Create(jToken.Value<string>()));
case JTokenType.Object:
SqlObjectProperty[] properties =
((JObject)jToken).Properties()
.Select(
p =>
SqlObjectProperty.Create(
SqlPropertyName.Create(p.Name),
FromJToken(p.Value)))
.ToArray();
return SqlObjectCreateScalarExpression.Create(properties);
case JTokenType.Float:
case JTokenType.Integer:
SqlNumberLiteral sqlNumberLiteral = SqlNumberLiteral.Create(jToken.Value<double>());
return SqlLiteralScalarExpression.Create(sqlNumberLiteral);
default:
throw new DocumentQueryException(string.Format(CultureInfo.CurrentCulture, ClientResources.UnexpectedTokenType, jToken.Type));
}
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,126 +1,126 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.Azure.Cosmos.SqlObjects;
internal static class SqlExpressionManipulation
{
public static SqlScalarExpression Substitute(SqlScalarExpression replacement, SqlIdentifier toReplace, SqlScalarExpression into)
{
if (into == null)
{
return null;
}
if (replacement == null)
{
throw new ArgumentNullException("replacement");
}
switch (into)
{
case SqlArrayCreateScalarExpression arrayExp:
{
SqlScalarExpression[] items = new SqlScalarExpression[arrayExp.Items.Length];
for (int i = 0; i < items.Length; i++)
{
SqlScalarExpression item = arrayExp.Items[i];
SqlScalarExpression replitem = Substitute(replacement, toReplace, item);
items[i] = replitem;
}
return SqlArrayCreateScalarExpression.Create(items);
}
case SqlBinaryScalarExpression binaryExp:
{
SqlScalarExpression replleft = Substitute(replacement, toReplace, binaryExp.LeftExpression);
SqlScalarExpression replright = Substitute(replacement, toReplace, binaryExp.RightExpression);
return SqlBinaryScalarExpression.Create(binaryExp.OperatorKind, replleft, replright);
}
case SqlUnaryScalarExpression unaryExp:
{
SqlScalarExpression repl = Substitute(replacement, toReplace, unaryExp.Expression);
return SqlUnaryScalarExpression.Create(unaryExp.OperatorKind, repl);
}
case SqlLiteralScalarExpression literalScalarExpression:
{
return into;
}
case SqlFunctionCallScalarExpression funcExp:
{
SqlScalarExpression[] items = new SqlScalarExpression[funcExp.Arguments.Length];
for (int i = 0; i < items.Length; i++)
{
SqlScalarExpression item = funcExp.Arguments[i];
SqlScalarExpression replitem = Substitute(replacement, toReplace, item);
items[i] = replitem;
}
return SqlFunctionCallScalarExpression.Create(funcExp.Name, funcExp.IsUdf, items);
}
case SqlObjectCreateScalarExpression objExp:
{
return SqlObjectCreateScalarExpression.Create(
objExp
.Properties
.Select(prop => SqlObjectProperty.Create(prop.Name, Substitute(replacement, toReplace, prop.Value)))
.ToImmutableArray());
}
case SqlMemberIndexerScalarExpression memberExp:
{
SqlScalarExpression replMember = Substitute(replacement, toReplace, memberExp.Member);
SqlScalarExpression replIndex = Substitute(replacement, toReplace, memberExp.Indexer);
return SqlMemberIndexerScalarExpression.Create(replMember, replIndex);
}
case SqlPropertyRefScalarExpression propExp:
{
// This is the leaf of the recursion
if (propExp.Member == null)
{
if (propExp.Identifier.Value == toReplace.Value)
{
return replacement;
}
else
{
return propExp;
}
}
else
{
SqlScalarExpression replMember = Substitute(replacement, toReplace, propExp.Member);
return SqlPropertyRefScalarExpression.Create(replMember, propExp.Identifier);
}
}
case SqlConditionalScalarExpression conditionalExpression:
{
SqlScalarExpression condition = Substitute(replacement, toReplace, conditionalExpression.Condition);
SqlScalarExpression first = Substitute(replacement, toReplace, conditionalExpression.Consequent);
SqlScalarExpression second = Substitute(replacement, toReplace, conditionalExpression.Alternative);
return SqlConditionalScalarExpression.Create(condition, first, second);
}
case SqlInScalarExpression inExpression:
{
SqlScalarExpression expression = Substitute(replacement, toReplace, inExpression.Needle);
SqlScalarExpression[] items = new SqlScalarExpression[inExpression.Haystack.Length];
for (int i = 0; i < items.Length; i++)
{
items[i] = Substitute(replacement, toReplace, inExpression.Haystack[i]);
}
return SqlInScalarExpression.Create(expression, inExpression.Not, items);
}
default:
throw new ArgumentOutOfRangeException("Unexpected Sql Scalar expression kind " + into.GetType());
}
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Linq
{
using System;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.Azure.Cosmos.SqlObjects;
internal static class SqlExpressionManipulation
{
public static SqlScalarExpression Substitute(SqlScalarExpression replacement, SqlIdentifier toReplace, SqlScalarExpression into)
{
if (into == null)
{
return null;
}
if (replacement == null)
{
throw new ArgumentNullException("replacement");
}
switch (into)
{
case SqlArrayCreateScalarExpression arrayExp:
{
SqlScalarExpression[] items = new SqlScalarExpression[arrayExp.Items.Length];
for (int i = 0; i < items.Length; i++)
{
SqlScalarExpression item = arrayExp.Items[i];
SqlScalarExpression replitem = Substitute(replacement, toReplace, item);
items[i] = replitem;
}
return SqlArrayCreateScalarExpression.Create(items);
}
case SqlBinaryScalarExpression binaryExp:
{
SqlScalarExpression replleft = Substitute(replacement, toReplace, binaryExp.LeftExpression);
SqlScalarExpression replright = Substitute(replacement, toReplace, binaryExp.RightExpression);
return SqlBinaryScalarExpression.Create(binaryExp.OperatorKind, replleft, replright);
}
case SqlUnaryScalarExpression unaryExp:
{
SqlScalarExpression repl = Substitute(replacement, toReplace, unaryExp.Expression);
return SqlUnaryScalarExpression.Create(unaryExp.OperatorKind, repl);
}
case SqlLiteralScalarExpression literalScalarExpression:
{
return into;
}
case SqlFunctionCallScalarExpression funcExp:
{
SqlScalarExpression[] items = new SqlScalarExpression[funcExp.Arguments.Length];
for (int i = 0; i < items.Length; i++)
{
SqlScalarExpression item = funcExp.Arguments[i];
SqlScalarExpression replitem = Substitute(replacement, toReplace, item);
items[i] = replitem;
}
return SqlFunctionCallScalarExpression.Create(funcExp.Name, funcExp.IsUdf, items);
}
case SqlObjectCreateScalarExpression objExp:
{
return SqlObjectCreateScalarExpression.Create(
objExp
.Properties
.Select(prop => SqlObjectProperty.Create(prop.Name, Substitute(replacement, toReplace, prop.Value)))
.ToImmutableArray());
}
case SqlMemberIndexerScalarExpression memberExp:
{
SqlScalarExpression replMember = Substitute(replacement, toReplace, memberExp.Member);
SqlScalarExpression replIndex = Substitute(replacement, toReplace, memberExp.Indexer);
return SqlMemberIndexerScalarExpression.Create(replMember, replIndex);
}
case SqlPropertyRefScalarExpression propExp:
{
// This is the leaf of the recursion
if (propExp.Member == null)
{
if (propExp.Identifier.Value == toReplace.Value)
{
return replacement;
}
else
{
return propExp;
}
}
else
{
SqlScalarExpression replMember = Substitute(replacement, toReplace, propExp.Member);
return SqlPropertyRefScalarExpression.Create(replMember, propExp.Identifier);
}
}
case SqlConditionalScalarExpression conditionalExpression:
{
SqlScalarExpression condition = Substitute(replacement, toReplace, conditionalExpression.Condition);
SqlScalarExpression first = Substitute(replacement, toReplace, conditionalExpression.Consequent);
SqlScalarExpression second = Substitute(replacement, toReplace, conditionalExpression.Alternative);
return SqlConditionalScalarExpression.Create(condition, first, second);
}
case SqlInScalarExpression inExpression:
{
SqlScalarExpression expression = Substitute(replacement, toReplace, inExpression.Needle);
SqlScalarExpression[] items = new SqlScalarExpression[inExpression.Haystack.Length];
for (int i = 0; i < items.Length; i++)
{
items[i] = Substitute(replacement, toReplace, inExpression.Haystack[i]);
}
return SqlInScalarExpression.Create(expression, inExpression.Not, items);
}
default:
throw new ArgumentOutOfRangeException("Unexpected Sql Scalar expression kind " + into.GetType());
}
}
}
}

Просмотреть файл

@ -110,22 +110,22 @@ namespace Microsoft.Azure.Cosmos
ConnectionPolicy connectionPolicy = clientOptions.GetConnectionPolicy(cosmosClient.ClientId);
ClientTelemetry telemetry = null;
if (connectionPolicy.EnableClientTelemetry)
{
try
{
telemetry = ClientTelemetry.CreateAndStartBackgroundTelemetry(
documentClient: documentClient,
userAgent: connectionPolicy.UserAgentContainer.UserAgent,
connectionMode: connectionPolicy.ConnectionMode,
authorizationTokenProvider: cosmosClient.AuthorizationTokenProvider,
diagnosticsHelper: DiagnosticsHandlerHelper.Instance,
preferredRegions: clientOptions.ApplicationPreferredRegions);
}
catch (Exception ex)
{
DefaultTrace.TraceInformation($"Error While starting Telemetry Job : {ex.Message}. Hence disabling Client Telemetry");
connectionPolicy.EnableClientTelemetry = false;
{
try
{
telemetry = ClientTelemetry.CreateAndStartBackgroundTelemetry(
documentClient: documentClient,
userAgent: connectionPolicy.UserAgentContainer.UserAgent,
connectionMode: connectionPolicy.ConnectionMode,
authorizationTokenProvider: cosmosClient.AuthorizationTokenProvider,
diagnosticsHelper: DiagnosticsHandlerHelper.Instance,
preferredRegions: clientOptions.ApplicationPreferredRegions);
}
catch (Exception ex)
{
DefaultTrace.TraceInformation($"Error While starting Telemetry Job : {ex.Message}. Hence disabling Client Telemetry");
connectionPolicy.EnableClientTelemetry = false;
}
}
@ -445,7 +445,7 @@ namespace Microsoft.Azure.Cosmos
if (!this.isDisposed)
{
if (disposing)
{
{
this.telemetry?.Dispose();
this.batchExecutorCache.Dispose();
this.DocumentClient.Dispose();

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -141,18 +141,18 @@ namespace Microsoft.Azure.Cosmos.Common
// Task starts running here.
Task<TValue> generator = actualValue.Value;
// Even if the current thread goes away, all exceptions will be observed
Task unused = generator.ContinueWith(c => c.Exception, TaskContinuationOptions.OnlyOnFaulted);
try
{
return await generator;
}
catch (Exception) when (object.ReferenceEquals(actualValue, newLazyValue))
{
// If the lambda this thread added to values triggered an exception remove it from the cache.
this.TryRemoveValue(key, actualValue);
throw;
// Even if the current thread goes away, all exceptions will be observed
Task unused = generator.ContinueWith(c => c.Exception, TaskContinuationOptions.OnlyOnFaulted);
try
{
return await generator;
}
catch (Exception) when (object.ReferenceEquals(actualValue, newLazyValue))
{
// If the lambda this thread added to values triggered an exception remove it from the cache.
this.TryRemoveValue(key, actualValue);
throw;
}
}
@ -174,23 +174,23 @@ namespace Microsoft.Azure.Cosmos.Common
if (this.values.TryGetValue(key, out initialLazyValue) && initialLazyValue.IsValueCreated && initialLazyValue.Value.IsCompleted)
{
// Accessing Exception marks as observed.
Exception e = initialLazyValue.Value.Exception;
Exception e = initialLazyValue.Value.Exception;
return this.TryRemoveValue(key, initialLazyValue);
}
return false;
}
private bool TryRemoveValue(TKey key, AsyncLazy<TValue> initialLazyValue)
{
// This is a nice trick to do "atomic remove if value not changed".
// ConcurrentDictionary inherits from ICollection<KVP<..>>, which allows removal of specific key value pair, instead of removal just by key.
ICollection<KeyValuePair<TKey, AsyncLazy<TValue>>> valuesAsCollection = this.values as ICollection<KeyValuePair<TKey, AsyncLazy<TValue>>>;
Debug.Assert(valuesAsCollection != null, "Values collection expected to implement ICollection<KVP<TKey, AsyncLazy<TValue>>.");
return valuesAsCollection?.Remove(new KeyValuePair<TKey, AsyncLazy<TValue>>(key, initialLazyValue)) ?? false;
}
}
private bool TryRemoveValue(TKey key, AsyncLazy<TValue> initialLazyValue)
{
// This is a nice trick to do "atomic remove if value not changed".
// ConcurrentDictionary inherits from ICollection<KVP<..>>, which allows removal of specific key value pair, instead of removal just by key.
ICollection<KeyValuePair<TKey, AsyncLazy<TValue>>> valuesAsCollection = this.values as ICollection<KeyValuePair<TKey, AsyncLazy<TValue>>>;
Debug.Assert(valuesAsCollection != null, "Values collection expected to implement ICollection<KVP<TKey, AsyncLazy<TValue>>.");
return valuesAsCollection?.Remove(new KeyValuePair<TKey, AsyncLazy<TValue>>(key, initialLazyValue)) ?? false;
}
/// <summary>
/// Remove value from cache and return it if present.
/// </summary>

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -145,7 +145,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
// If cancellation is requested after the delay then return from here.
if (this.cancellationTokenSource.IsCancellationRequested)
{
DefaultTrace.TraceInformation("Observer Task Cancelled.");
DefaultTrace.TraceInformation("Observer Task Cancelled.");
break;
}
@ -181,7 +181,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
/// <param name="operationType"></param>
/// <param name="resourceType"></param>
/// <param name="consistencyLevel"></param>
/// <param name="requestCharge"></param>
/// <param name="requestCharge"></param>
/// <param name="subStatusCode"></param>
internal void Collect(CosmosDiagnostics cosmosDiagnostics,
HttpStatusCode statusCode,
@ -191,7 +191,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
OperationType operationType,
ResourceType resourceType,
string consistencyLevel,
double requestCharge,
double requestCharge,
string subStatusCode)
{
DefaultTrace.TraceVerbose("Collecting Operation data for Telemetry.");
@ -211,7 +211,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
containerName: containerId,
operation: operationType,
resource: resourceType,
statusCode: (int)statusCode,
statusCode: (int)statusCode,
subStatusCode: subStatusCode);
(LongConcurrentHistogram latency, LongConcurrentHistogram requestcharge) = this.operationInfoMap
@ -250,15 +250,15 @@ namespace Microsoft.Azure.Cosmos.Telemetry
{
DefaultTrace.TraceVerbose("Started Recording System Usage for telemetry.");
SystemUsageHistory systemUsageHistory = this.diagnosticsHelper.GetClientTelemetrySystemHistory();
SystemUsageHistory systemUsageHistory = this.diagnosticsHelper.GetClientTelemetrySystemHistory();
if (systemUsageHistory != null )
{
ClientTelemetryHelper.RecordSystemUsage(systemUsageHistory, this.clientTelemetryInfo.SystemInfo);
}
else
{
DefaultTrace.TraceWarning("System Usage History not available");
}
else
{
DefaultTrace.TraceWarning("System Usage History not available");
}
}
catch (Exception ex)

Просмотреть файл

@ -10,7 +10,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
using System.Threading;
using System.Threading.Tasks;
using HdrHistogram;
using Microsoft.Azure.Cosmos.Core.Trace;
using Microsoft.Azure.Cosmos.Core.Trace;
using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Rntbd;
@ -86,31 +86,31 @@ namespace Microsoft.Azure.Cosmos.Telemetry
return azMetadata;
}
/// <summary>
/// Record System Usage and update passed system Info collection. Right now, it collects following metrics
/// 1) CPU Usage
/// 2) Memory Remaining
/// 3) Available Threads
///
/// </summary>
/// <param name="systemUsageHistory"></param>
/// <param name="systemInfoCollection"></param>
internal static void RecordSystemUsage(
SystemUsageHistory systemUsageHistory,
List<SystemInfo> systemInfoCollection)
{
if (systemUsageHistory.Values == null)
{
return;
}
DefaultTrace.TraceInformation("System Usage recorded by telemetry is : {0}", systemUsageHistory);
systemInfoCollection.Add(TelemetrySystemUsage.GetCpuInfo(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetMemoryRemainingInfo(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetAvailableThreadsInfo(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetThreadWaitIntervalInMs(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetThreadStarvationSignalCount(systemUsageHistory.Values));
/// <summary>
/// Record System Usage and update passed system Info collection. Right now, it collects following metrics
/// 1) CPU Usage
/// 2) Memory Remaining
/// 3) Available Threads
///
/// </summary>
/// <param name="systemUsageHistory"></param>
/// <param name="systemInfoCollection"></param>
internal static void RecordSystemUsage(
SystemUsageHistory systemUsageHistory,
List<SystemInfo> systemInfoCollection)
{
if (systemUsageHistory.Values == null)
{
return;
}
DefaultTrace.TraceInformation("System Usage recorded by telemetry is : {0}", systemUsageHistory);
systemInfoCollection.Add(TelemetrySystemUsage.GetCpuInfo(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetMemoryRemainingInfo(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetAvailableThreadsInfo(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetThreadWaitIntervalInMs(systemUsageHistory.Values));
systemInfoCollection.Add(TelemetrySystemUsage.GetThreadStarvationSignalCount(systemUsageHistory.Values));
}
/// <summary>
@ -118,8 +118,8 @@ namespace Microsoft.Azure.Cosmos.Telemetry
/// </summary>
/// <param name="metrics"></param>
/// <returns>Collection of ReportPayload</returns>
internal static List<OperationInfo> ToListWithMetricsInfo(
IDictionary<OperationInfo,
internal static List<OperationInfo> ToListWithMetricsInfo(
IDictionary<OperationInfo,
(LongConcurrentHistogram latency, LongConcurrentHistogram requestcharge)> metrics)
{
DefaultTrace.TraceInformation("Aggregating operation information to list started");
@ -153,11 +153,11 @@ namespace Microsoft.Azure.Cosmos.Telemetry
internal static string GetContactedRegions(CosmosDiagnostics cosmosDiagnostics)
{
IReadOnlyList<(string regionName, Uri uri)> regionList = cosmosDiagnostics.GetContactedRegions();
if (regionList == null || regionList.Count == 0)
{
return null;
}
if (regionList == null || regionList.Count == 0)
{
return null;
}
if (regionList.Count == 1)
{

Просмотреть файл

@ -33,38 +33,38 @@ namespace Microsoft.Azure.Cosmos.Telemetry
internal const long RequestChargeMin = 1;
internal const int RequestChargePrecision = 2;
internal const string RequestChargeName = "RequestCharge";
internal const string RequestChargeUnit = "RU";
internal const string RequestChargeUnit = "RU";
// Expecting histogram to have Minimum CPU Usage of .001% and Maximum CPU Usage of 999.99%
// Expecting histogram to have Minimum CPU Usage of .001% and Maximum CPU Usage of 999.99%
internal const long CpuMax = 99999;
internal const long CpuMin = 1;
internal const int CpuPrecision = 2;
internal const int CpuPrecision = 2;
internal const String CpuName = "CPU";
internal const String CpuUnit = "Percentage";
// Expecting histogram to have Minimum Memory Remaining of 1 MB and Maximum Memory Remaining of Long Max Value
internal const String CpuUnit = "Percentage";
// Expecting histogram to have Minimum Memory Remaining of 1 MB and Maximum Memory Remaining of Long Max Value
internal const long MemoryMax = Int64.MaxValue;
internal const long MemoryMin = 1;
internal const int MemoryPrecision = 2;
internal const String MemoryName = "MemoryRemaining";
internal const String MemoryUnit = "MB";
// Expecting histogram to have Minimum Available Threads = 0 and Maximum Available Threads = it can be any anything depends on the machine
internal const String MemoryUnit = "MB";
// Expecting histogram to have Minimum Available Threads = 0 and Maximum Available Threads = it can be any anything depends on the machine
internal const long AvailableThreadsMax = Int64.MaxValue;
internal const long AvailableThreadsMin = 1;
internal const int AvailableThreadsPrecision = 2;
internal const int AvailableThreadsPrecision = 2;
internal const String AvailableThreadsName = "SystemPool_AvailableThreads";
internal const String AvailableThreadsUnit = "ThreadCount";
// Expecting histogram to have Minimum ThreadWaitIntervalInMs of 1 and Maximum ThreadWaitIntervalInMs of 1 second
internal const String AvailableThreadsUnit = "ThreadCount";
// Expecting histogram to have Minimum ThreadWaitIntervalInMs of 1 and Maximum ThreadWaitIntervalInMs of 1 second
internal const long ThreadWaitIntervalInMsMax = TimeSpan.TicksPerSecond;
internal const long ThreadWaitIntervalInMsMin = 1;
internal const int ThreadWaitIntervalInMsPrecision = 2;
internal const string ThreadWaitIntervalInMsName = "SystemPool_ThreadWaitInterval";
internal const string ThreadWaitIntervalInMsUnit = "MilliSecond";
internal const string ThreadWaitIntervalInMsUnit = "MilliSecond";
internal const string IsThreadStarvingName = "SystemPool_IsThreadStarving_True";
internal const string IsThreadStarvingUnit = "Count";
internal const string IsThreadStarvingUnit = "Count";
internal const string DefaultVmMetadataUrL = "http://169.254.169.254/metadata/instance?api-version=2020-06-01";
internal const double DefaultTimeStampInSeconds = 600;
@ -85,21 +85,21 @@ namespace Microsoft.Azure.Cosmos.Telemetry
internal static readonly JsonSerializerSettings JsonSerializerSettings = new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore };
private static Uri vmMetadataUrl;
private static Uri vmMetadataUrl;
private static Uri clientTelemetryEndpoint;
private static string environmentName;
private static string environmentName;
private static TimeSpan scheduledTimeSpan = TimeSpan.Zero;
internal static bool IsClientTelemetryEnabled()
{
bool isTelemetryEnabled = ConfigurationManager
.GetEnvironmentVariable<bool>(ClientTelemetryOptions
.EnvPropsClientTelemetryEnabled, false);
DefaultTrace.TraceInformation($"Telemetry Flag is set to {isTelemetryEnabled}");
return isTelemetryEnabled;
}
internal static bool IsClientTelemetryEnabled()
{
bool isTelemetryEnabled = ConfigurationManager
.GetEnvironmentVariable<bool>(ClientTelemetryOptions
.EnvPropsClientTelemetryEnabled, false);
DefaultTrace.TraceInformation($"Telemetry Flag is set to {isTelemetryEnabled}");
return isTelemetryEnabled;
}
internal static Uri GetVmMetadataUrl()
{
@ -120,23 +120,23 @@ namespace Microsoft.Azure.Cosmos.Telemetry
internal static TimeSpan GetScheduledTimeSpan()
{
if (scheduledTimeSpan.Equals(TimeSpan.Zero))
{
double scheduledTimeInSeconds = ClientTelemetryOptions.DefaultTimeStampInSeconds;
try
{
scheduledTimeInSeconds = ConfigurationManager
.GetEnvironmentVariable<double>(
ClientTelemetryOptions.EnvPropsClientTelemetrySchedulingInSeconds,
ClientTelemetryOptions.DefaultTimeStampInSeconds);
if (scheduledTimeInSeconds <= 0)
{
throw new ArgumentException("Telemetry Scheduled time can not be less than or equal to 0.");
}
}
catch (Exception ex)
{
DefaultTrace.TraceError($"Error while getting telemetry scheduling configuration : {ex.Message}. Falling back to default configuration i.e. {scheduledTimeInSeconds}" );
{
double scheduledTimeInSeconds = ClientTelemetryOptions.DefaultTimeStampInSeconds;
try
{
scheduledTimeInSeconds = ConfigurationManager
.GetEnvironmentVariable<double>(
ClientTelemetryOptions.EnvPropsClientTelemetrySchedulingInSeconds,
ClientTelemetryOptions.DefaultTimeStampInSeconds);
if (scheduledTimeInSeconds <= 0)
{
throw new ArgumentException("Telemetry Scheduled time can not be less than or equal to 0.");
}
}
catch (Exception ex)
{
DefaultTrace.TraceError($"Error while getting telemetry scheduling configuration : {ex.Message}. Falling back to default configuration i.e. {scheduledTimeInSeconds}" );
}
scheduledTimeSpan = TimeSpan.FromSeconds(scheduledTimeInSeconds);

Просмотреть файл

@ -34,10 +34,10 @@ namespace Microsoft.Azure.Cosmos.Telemetry
internal string Consistency { get; }
[JsonProperty(PropertyName = "statusCode")]
public int? StatusCode { get; }
public int? StatusCode { get; }
[JsonProperty(PropertyName = "subStatusCode")]
public string SubStatusCode { get; }
public string SubStatusCode { get; }
[JsonProperty(PropertyName = "metricInfo")]
internal MetricInfo MetricInfo { get; set; }
@ -54,7 +54,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
string containerName,
OperationType? operation,
ResourceType? resource,
int? statusCode,
int? statusCode,
string subStatusCode)
{
this.RegionsContacted = regionsContacted;
@ -67,7 +67,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
this.ContainerName = containerName;
this.Operation = operation?.ToOperationTypeString();
this.Resource = resource?.ToResourceTypeString();
this.StatusCode = statusCode;
this.StatusCode = statusCode;
this.SubStatusCode = subStatusCode;
}
@ -78,7 +78,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
string operation,
string resource,
string consistency,
int? statusCode,
int? statusCode,
string subStatusCode,
MetricInfo metricInfo)
{
@ -89,7 +89,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
this.Operation = operation;
this.Resource = resource;
this.Consistency = consistency;
this.StatusCode = statusCode;
this.StatusCode = statusCode;
this.SubStatusCode = subStatusCode;
this.MetricInfo = metricInfo;
}
@ -103,7 +103,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
this.Operation,
this.Resource,
this.Consistency,
this.StatusCode,
this.StatusCode,
this.SubStatusCode,
null);
}
@ -118,7 +118,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
hash = (hash * 7) ^ (this.ContainerName == null ? 0 : this.ContainerName.GetHashCode());
hash = (hash * 7) ^ (this.Operation == null ? 0 : this.Operation.GetHashCode());
hash = (hash * 7) ^ (this.Resource == null ? 0 : this.Resource.GetHashCode());
hash = (hash * 7) ^ (this.StatusCode == null ? 0 : this.StatusCode.GetHashCode());
hash = (hash * 7) ^ (this.StatusCode == null ? 0 : this.StatusCode.GetHashCode());
hash = (hash * 7) ^ (this.SubStatusCode == null ? 0 : this.SubStatusCode.GetHashCode());
return hash;
}
@ -133,7 +133,7 @@ namespace Microsoft.Azure.Cosmos.Telemetry
((this.ContainerName == null && payload.ContainerName == null) || (this.ContainerName != null && payload.ContainerName != null && this.ContainerName.Equals(payload.ContainerName))) &&
((this.Operation == null && payload.Operation == null) || (this.Operation != null && payload.Operation != null && this.Operation.Equals(payload.Operation))) &&
((this.Resource == null && payload.Resource == null) || (this.Resource != null && payload.Resource != null && this.Resource.Equals(payload.Resource))) &&
((this.StatusCode == null && payload.StatusCode == null) || (this.StatusCode != null && payload.StatusCode != null && this.StatusCode.Equals(payload.StatusCode))) &&
((this.StatusCode == null && payload.StatusCode == null) || (this.StatusCode != null && payload.StatusCode != null && this.StatusCode.Equals(payload.StatusCode))) &&
((this.SubStatusCode == null && payload.SubStatusCode == null) || (this.SubStatusCode != null && payload.SubStatusCode != null && this.SubStatusCode.Equals(payload.SubStatusCode)));
return isequal;

Просмотреть файл

@ -62,11 +62,11 @@ namespace Microsoft.Azure.Cosmos.Tracing
/// Gets additional datum associated with this trace.
/// </summary>
IReadOnlyDictionary<string, object> Data { get; }
/// <summary>
/// Consolidated Region contacted Information of this and children nodes
/// </summary>
IReadOnlyList<(string, Uri)> RegionsContacted { get; }
/// </summary>
IReadOnlyList<(string, Uri)> RegionsContacted { get; }
/// <summary>
/// Starts a Trace and adds it as a child to this instance.
@ -113,12 +113,12 @@ namespace Microsoft.Azure.Cosmos.Tracing
/// Adds a trace children that is already completed.
/// </summary>
/// <param name="trace">Existing trace.</param>
void AddChild(ITrace trace);
void AddChild(ITrace trace);
/// <summary>
/// Update region contacted information to the parent Itrace
/// </summary>
/// <param name="traceDatum"></param>
/// <param name="traceDatum"></param>
void UpdateRegionContacted(TraceDatum traceDatum);
}
}

Просмотреть файл

@ -1,168 +1,168 @@
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tracing
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
using Microsoft.Azure.Cosmos.Tracing.TraceData;
internal sealed class Trace : ITrace
{
private static readonly IReadOnlyDictionary<string, object> EmptyDictionary = new Dictionary<string, object>();
private readonly List<ITrace> children;
private readonly Lazy<Dictionary<string, object>> data;
private readonly Stopwatch stopwatch;
private readonly ISet<(string, Uri)> regionContactedInternal;
private Trace(
string name,
TraceLevel level,
TraceComponent component,
Trace parent,
ISet<(string, Uri)> regionContactedInternal)
{
this.Name = name ?? throw new ArgumentNullException(nameof(name));
this.Id = Guid.NewGuid();
this.StartTime = DateTime.UtcNow;
this.stopwatch = Stopwatch.StartNew();
this.Level = level;
this.Component = component;
this.Parent = parent;
this.children = new List<ITrace>();
this.data = new Lazy<Dictionary<string, object>>();
this.regionContactedInternal = regionContactedInternal;
}
public string Name { get; }
public Guid Id { get; }
public DateTime StartTime { get; }
public TimeSpan Duration => this.stopwatch.Elapsed;
public TraceLevel Level { get; }
public TraceComponent Component { get; }
public ITrace Parent { get; }
public IReadOnlyList<ITrace> Children => this.children;
public IReadOnlyDictionary<string, object> Data => this.data.IsValueCreated ? this.data.Value : Trace.EmptyDictionary;
public IReadOnlyList<(string, Uri)> RegionsContacted
{
get
{
lock (this.regionContactedInternal)
{
return this.regionContactedInternal.ToList();
}
}
}
/// <summary>
/// Update region contacted information to this node
/// </summary>
/// <param name="traceDatum"></param>
public void UpdateRegionContacted(TraceDatum traceDatum)
{
if (traceDatum is ClientSideRequestStatisticsTraceDatum clientSideRequestStatisticsTraceDatum)
{
if (clientSideRequestStatisticsTraceDatum.RegionsContacted == null ||
clientSideRequestStatisticsTraceDatum.RegionsContacted.Count == 0)
{
return;
}
lock (this.regionContactedInternal)
{
this.regionContactedInternal.UnionWith(clientSideRequestStatisticsTraceDatum.RegionsContacted);
}
}
}
public void Dispose()
{
this.stopwatch.Stop();
}
public ITrace StartChild(
string name)
{
return this.StartChild(
name,
level: TraceLevel.Verbose,
component: this.Component);
}
public ITrace StartChild(
string name,
TraceComponent component,
TraceLevel level)
{
Trace child = new Trace(
name: name,
level: level,
component: component,
parent: this,
regionContactedInternal: this.regionContactedInternal);
this.AddChild(child);
return child;
}
public void AddChild(ITrace child)
{
lock (this.children)
{
this.children.Add(child);
}
}
public static Trace GetRootTrace(string name)
{
return Trace.GetRootTrace(
name,
component: TraceComponent.Unknown,
level: TraceLevel.Verbose);
}
public static Trace GetRootTrace(
string name,
TraceComponent component,
TraceLevel level)
{
return new Trace(
name: name,
level: level,
component: component,
parent: null,
regionContactedInternal: new HashSet<(string, Uri)>());
}
public void AddDatum(string key, TraceDatum traceDatum)
{
this.data.Value.Add(key, traceDatum);
this.UpdateRegionContacted(traceDatum);
}
public void AddDatum(string key, object value)
{
this.data.Value.Add(key, value);
}
public void AddOrUpdateDatum(string key, object value)
{
this.data.Value[key] = value;
}
}
}
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tracing
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
using Microsoft.Azure.Cosmos.Tracing.TraceData;
internal sealed class Trace : ITrace
{
private static readonly IReadOnlyDictionary<string, object> EmptyDictionary = new Dictionary<string, object>();
private readonly List<ITrace> children;
private readonly Lazy<Dictionary<string, object>> data;
private readonly Stopwatch stopwatch;
private readonly ISet<(string, Uri)> regionContactedInternal;
private Trace(
string name,
TraceLevel level,
TraceComponent component,
Trace parent,
ISet<(string, Uri)> regionContactedInternal)
{
this.Name = name ?? throw new ArgumentNullException(nameof(name));
this.Id = Guid.NewGuid();
this.StartTime = DateTime.UtcNow;
this.stopwatch = Stopwatch.StartNew();
this.Level = level;
this.Component = component;
this.Parent = parent;
this.children = new List<ITrace>();
this.data = new Lazy<Dictionary<string, object>>();
this.regionContactedInternal = regionContactedInternal;
}
public string Name { get; }
public Guid Id { get; }
public DateTime StartTime { get; }
public TimeSpan Duration => this.stopwatch.Elapsed;
public TraceLevel Level { get; }
public TraceComponent Component { get; }
public ITrace Parent { get; }
public IReadOnlyList<ITrace> Children => this.children;
public IReadOnlyDictionary<string, object> Data => this.data.IsValueCreated ? this.data.Value : Trace.EmptyDictionary;
public IReadOnlyList<(string, Uri)> RegionsContacted
{
get
{
lock (this.regionContactedInternal)
{
return this.regionContactedInternal.ToList();
}
}
}
/// <summary>
/// Update region contacted information to this node
/// </summary>
/// <param name="traceDatum"></param>
public void UpdateRegionContacted(TraceDatum traceDatum)
{
if (traceDatum is ClientSideRequestStatisticsTraceDatum clientSideRequestStatisticsTraceDatum)
{
if (clientSideRequestStatisticsTraceDatum.RegionsContacted == null ||
clientSideRequestStatisticsTraceDatum.RegionsContacted.Count == 0)
{
return;
}
lock (this.regionContactedInternal)
{
this.regionContactedInternal.UnionWith(clientSideRequestStatisticsTraceDatum.RegionsContacted);
}
}
}
public void Dispose()
{
this.stopwatch.Stop();
}
public ITrace StartChild(
string name)
{
return this.StartChild(
name,
level: TraceLevel.Verbose,
component: this.Component);
}
public ITrace StartChild(
string name,
TraceComponent component,
TraceLevel level)
{
Trace child = new Trace(
name: name,
level: level,
component: component,
parent: this,
regionContactedInternal: this.regionContactedInternal);
this.AddChild(child);
return child;
}
public void AddChild(ITrace child)
{
lock (this.children)
{
this.children.Add(child);
}
}
public static Trace GetRootTrace(string name)
{
return Trace.GetRootTrace(
name,
component: TraceComponent.Unknown,
level: TraceLevel.Verbose);
}
public static Trace GetRootTrace(
string name,
TraceComponent component,
TraceLevel level)
{
return new Trace(
name: name,
level: level,
component: component,
parent: null,
regionContactedInternal: new HashSet<(string, Uri)>());
}
public void AddDatum(string key, TraceDatum traceDatum)
{
this.data.Value.Add(key, traceDatum);
this.UpdateRegionContacted(traceDatum);
}
public void AddDatum(string key, object value)
{
this.data.Value.Add(key, value);
}
public void AddOrUpdateDatum(string key, object value)
{
this.data.Value[key] = value;
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,79 +1,79 @@
// ----------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Performance.Tests.Benchmarks
{
using System;
using BenchmarkDotNet.Attributes;
using Microsoft.Azure.Cosmos.Diagnostics;
using Microsoft.Azure.Cosmos.Telemetry;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Cosmos.Tracing.TraceData;
[MemoryDiagnoser]
public class RegionContactedInDiagnosticsBenchmark
{
private readonly CosmosTraceDiagnostics noOpTracediagnostics;
private readonly CosmosTraceDiagnostics diagnosticsWithData;
public RegionContactedInDiagnosticsBenchmark()
{
ITrace trace = NoOpTrace.Singleton;
this.noOpTracediagnostics = new Diagnostics.CosmosTraceDiagnostics(trace);
this.diagnosticsWithData = new Diagnostics.CosmosTraceDiagnostics(this.CreateTestTraceTree());
}
private ITrace CreateTestTraceTree()
{
ITrace trace;
using (trace = Trace.GetRootTrace("Root Trace", TraceComponent.Unknown, TraceLevel.Info))
using (ITrace firstLevel = trace.StartChild("First level Node", TraceComponent.Unknown, TraceLevel.Info))
using (ITrace secondLevel = firstLevel.StartChild("Second level Node", TraceComponent.Unknown, TraceLevel.Info))
using (ITrace thirdLevel = secondLevel.StartChild("Third level Node", TraceComponent.Unknown, TraceLevel.Info))
thirdLevel.AddDatum("Client Side Request Stats", this.GetDatumObject(Regions.CentralUS));
using (ITrace secondLevel = trace.StartChild("Second level Node", TraceComponent.Unknown, TraceLevel.Info))
secondLevel.AddDatum("Client Side Request Stats", this.GetDatumObject(Regions.CentralIndia, Regions.EastUS2));
using (ITrace firstLevel = trace.StartChild("First level Node", TraceComponent.Unknown, TraceLevel.Info))
firstLevel.AddDatum("Client Side Request Stats", this.GetDatumObject(Regions.FranceCentral));
return trace;
}
private TraceDatum GetDatumObject(string regionName1, string regionName2 = null)
{
ClientSideRequestStatisticsTraceDatum datum = new ClientSideRequestStatisticsTraceDatum(DateTime.UtcNow);
Uri uri1 = new Uri("http://someUri1.com");
datum.RegionsContacted.Add((regionName1, uri1));
if (regionName2 != null)
{
Uri uri2 = new Uri("http://someUri2.com");
datum.RegionsContacted.Add((regionName2, uri2));
}
return datum;
}
[Benchmark]
public void CollectRegionContactedWithMultiLevelTraceTest()
{
this.CollectRegionContacted(cosmosDiagnostics: this.diagnosticsWithData);
}
[Benchmark]
public void CollectRegionContactedWithNoOpsTraceTest()
{
this.CollectRegionContacted(cosmosDiagnostics: this.noOpTracediagnostics);
}
internal void CollectRegionContacted(CosmosDiagnostics cosmosDiagnostics)
{
if (cosmosDiagnostics == null)
{
throw new ArgumentNullException(nameof(cosmosDiagnostics));
}
ClientTelemetryHelper.GetContactedRegions(cosmosDiagnostics);
}
}
}
// ----------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Performance.Tests.Benchmarks
{
using System;
using BenchmarkDotNet.Attributes;
using Microsoft.Azure.Cosmos.Diagnostics;
using Microsoft.Azure.Cosmos.Telemetry;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Cosmos.Tracing.TraceData;
[MemoryDiagnoser]
public class RegionContactedInDiagnosticsBenchmark
{
private readonly CosmosTraceDiagnostics noOpTracediagnostics;
private readonly CosmosTraceDiagnostics diagnosticsWithData;
public RegionContactedInDiagnosticsBenchmark()
{
ITrace trace = NoOpTrace.Singleton;
this.noOpTracediagnostics = new Diagnostics.CosmosTraceDiagnostics(trace);
this.diagnosticsWithData = new Diagnostics.CosmosTraceDiagnostics(this.CreateTestTraceTree());
}
private ITrace CreateTestTraceTree()
{
ITrace trace;
using (trace = Trace.GetRootTrace("Root Trace", TraceComponent.Unknown, TraceLevel.Info))
using (ITrace firstLevel = trace.StartChild("First level Node", TraceComponent.Unknown, TraceLevel.Info))
using (ITrace secondLevel = firstLevel.StartChild("Second level Node", TraceComponent.Unknown, TraceLevel.Info))
using (ITrace thirdLevel = secondLevel.StartChild("Third level Node", TraceComponent.Unknown, TraceLevel.Info))
thirdLevel.AddDatum("Client Side Request Stats", this.GetDatumObject(Regions.CentralUS));
using (ITrace secondLevel = trace.StartChild("Second level Node", TraceComponent.Unknown, TraceLevel.Info))
secondLevel.AddDatum("Client Side Request Stats", this.GetDatumObject(Regions.CentralIndia, Regions.EastUS2));
using (ITrace firstLevel = trace.StartChild("First level Node", TraceComponent.Unknown, TraceLevel.Info))
firstLevel.AddDatum("Client Side Request Stats", this.GetDatumObject(Regions.FranceCentral));
return trace;
}
private TraceDatum GetDatumObject(string regionName1, string regionName2 = null)
{
ClientSideRequestStatisticsTraceDatum datum = new ClientSideRequestStatisticsTraceDatum(DateTime.UtcNow);
Uri uri1 = new Uri("http://someUri1.com");
datum.RegionsContacted.Add((regionName1, uri1));
if (regionName2 != null)
{
Uri uri2 = new Uri("http://someUri2.com");
datum.RegionsContacted.Add((regionName2, uri2));
}
return datum;
}
[Benchmark]
public void CollectRegionContactedWithMultiLevelTraceTest()
{
this.CollectRegionContacted(cosmosDiagnostics: this.diagnosticsWithData);
}
[Benchmark]
public void CollectRegionContactedWithNoOpsTraceTest()
{
this.CollectRegionContacted(cosmosDiagnostics: this.noOpTracediagnostics);
}
internal void CollectRegionContacted(CosmosDiagnostics cosmosDiagnostics)
{
if (cosmosDiagnostics == null)
{
throw new ArgumentNullException(nameof(cosmosDiagnostics));
}
ClientTelemetryHelper.GetContactedRegions(cosmosDiagnostics);
}
}
}

Просмотреть файл

@ -96,33 +96,33 @@ namespace Microsoft.Azure.Cosmos.Tests
Assert.AreEqual(20, numberOfCacheRefreshes);
Assert.AreEqual(5, await cache.GetAsync(2, -1, () => refreshFunc(2, CancellationToken.None), CancellationToken.None));
}
/// <summary>
/// The scenario tested here is that when a generator throws an
/// exception it removes itself from the cache. This behavior
/// is to prevent indefinite caching of items which are never used.
/// </summary>
[TestMethod]
public async Task TestRemoveWhenGeneratorThrowsException()
{
}
/// <summary>
/// The scenario tested here is that when a generator throws an
/// exception it removes itself from the cache. This behavior
/// is to prevent indefinite caching of items which are never used.
/// </summary>
[TestMethod]
public async Task TestRemoveWhenGeneratorThrowsException()
{
AsyncCache<int, int> cache = new AsyncCache<int, int>();
using SemaphoreSlim resetEventSlim = new SemaphoreSlim(0, 1);
using SemaphoreSlim resetEventSlim = new SemaphoreSlim(0, 1);
Func<Task<int>> generatorFunc = () => Task.Run(async () =>
{
await resetEventSlim.WaitAsync();
throw new Exception(nameof(TestRemoveWhenGeneratorThrowsException));
#pragma warning disable CS0162 // Unreachable code detected
return 1;
#pragma warning restore CS0162 // Unreachable code detected
});
Task<int> getTask = cache.GetAsync(key: 1, obsoleteValue: -1, singleValueInitFunc: generatorFunc, cancellationToken: default);
resetEventSlim.Release();
Exception ex = await Assert.ThrowsExceptionAsync<Exception>(() => getTask);
Assert.AreEqual(nameof(TestRemoveWhenGeneratorThrowsException), ex.Message);
Assert.IsFalse(cache.Keys.Contains(1));
await resetEventSlim.WaitAsync();
throw new Exception(nameof(TestRemoveWhenGeneratorThrowsException));
#pragma warning disable CS0162 // Unreachable code detected
return 1;
#pragma warning restore CS0162 // Unreachable code detected
});
Task<int> getTask = cache.GetAsync(key: 1, obsoleteValue: -1, singleValueInitFunc: generatorFunc, cancellationToken: default);
resetEventSlim.Release();
Exception ex = await Assert.ThrowsExceptionAsync<Exception>(() => getTask);
Assert.AreEqual(nameof(TestRemoveWhenGeneratorThrowsException), ex.Message);
Assert.IsFalse(cache.Keys.Contains(1));
}
/// <summary>

Просмотреть файл

@ -304,79 +304,79 @@ namespace Microsoft.Azure.Cosmos.Tests
string urlEncoded = AuthorizationHelper.UrlEncodeBase64SpanInPlace(singleInvalidChar, 0);
Assert.AreEqual(result, urlEncoded);
}
}
}
[TestMethod]
public void AuthorizationTokenLengthTest()
{
// Master Token (limit 1024)
this.ValidateTokenParsing(Constants.Properties.MasterToken, 100, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 1024 + 1, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 8*1024, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, (8*1024) + 1, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 16*1024, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, (16*1024) + 1, shouldParse: false);
// Resource Token (limit 8*1024)
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 100, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 1024 + 1, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 8 * 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, (8 * 1024) + 1, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 16 * 1024, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, (16 * 1024) + 1, shouldParse: false);
// AAD Token (limit 16*1024)
this.ValidateTokenParsing(Constants.Properties.AadToken, 100, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 1024 + 1, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 8 * 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, (8 * 1024) + 1, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 16 * 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, (16 * 1024) + 1, shouldParse: false);
}
private void ValidateTokenParsing(string tokenType, int length, bool shouldParse)
{
string token = this.GenerateSampleToken(tokenType, length, out string expectedParsedToken);
try
{
AuthorizationHelper.ParseAuthorizationToken(
token,
out ReadOnlyMemory<char> type,
out ReadOnlyMemory<char> version,
out ReadOnlyMemory<char> parsedToken);
if (shouldParse)
{
Assert.AreEqual(tokenType, type.ToString());
Assert.AreEqual("1.0", version.ToString());
Assert.AreEqual(expectedParsedToken, parsedToken.ToString());
}
else
{
Assert.Fail($"Parsing token of type [{tokenType}] and length [{length}] should have failed.");
}
}
catch (Exception exception)
{
if (shouldParse)
{
Assert.Fail($"Parsing token of type [{tokenType}] and length [{length}] should have succeeded.\n{exception}");
}
Assert.AreEqual(typeof(UnauthorizedException), exception.GetType());
StringAssert.Contains(exception.Message, RMResources.InvalidAuthHeaderFormat);
}
}
private string GenerateSampleToken(string tokenType, int length, out string tokenValue)
{
string tokenPrefix = $"type%3d{tokenType}%26ver%3d1.0%26sig%3d";
tokenValue = new string('a', length - tokenPrefix.Length);
return tokenPrefix + tokenValue;
public void AuthorizationTokenLengthTest()
{
// Master Token (limit 1024)
this.ValidateTokenParsing(Constants.Properties.MasterToken, 100, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 1024 + 1, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 8*1024, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, (8*1024) + 1, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, 16*1024, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.MasterToken, (16*1024) + 1, shouldParse: false);
// Resource Token (limit 8*1024)
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 100, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 1024 + 1, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 8 * 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, (8 * 1024) + 1, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, 16 * 1024, shouldParse: false);
this.ValidateTokenParsing(Constants.Properties.ResourceToken, (16 * 1024) + 1, shouldParse: false);
// AAD Token (limit 16*1024)
this.ValidateTokenParsing(Constants.Properties.AadToken, 100, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 1024 + 1, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 8 * 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, (8 * 1024) + 1, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, 16 * 1024, shouldParse: true);
this.ValidateTokenParsing(Constants.Properties.AadToken, (16 * 1024) + 1, shouldParse: false);
}
private void ValidateTokenParsing(string tokenType, int length, bool shouldParse)
{
string token = this.GenerateSampleToken(tokenType, length, out string expectedParsedToken);
try
{
AuthorizationHelper.ParseAuthorizationToken(
token,
out ReadOnlyMemory<char> type,
out ReadOnlyMemory<char> version,
out ReadOnlyMemory<char> parsedToken);
if (shouldParse)
{
Assert.AreEqual(tokenType, type.ToString());
Assert.AreEqual("1.0", version.ToString());
Assert.AreEqual(expectedParsedToken, parsedToken.ToString());
}
else
{
Assert.Fail($"Parsing token of type [{tokenType}] and length [{length}] should have failed.");
}
}
catch (Exception exception)
{
if (shouldParse)
{
Assert.Fail($"Parsing token of type [{tokenType}] and length [{length}] should have succeeded.\n{exception}");
}
Assert.AreEqual(typeof(UnauthorizedException), exception.GetType());
StringAssert.Contains(exception.Message, RMResources.InvalidAuthHeaderFormat);
}
}
private string GenerateSampleToken(string tokenType, int length, out string tokenValue)
{
string tokenPrefix = $"type%3d{tokenType}%26ver%3d1.0%26sig%3d";
tokenValue = new string('a', length - tokenPrefix.Length);
return tokenPrefix + tokenValue;
}
}
}

Просмотреть файл

@ -1,197 +1,197 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
[TestClass]
public class BatchExecUtilsUnitTests
{
private Random random = new Random();
[TestMethod]
[Owner("abpai")]
public async Task StreamToBytesAsyncSeekableAsync()
{
const int bytesLength = 10;
byte[] bytes = new byte[bytesLength];
this.random.NextBytes(bytes);
{
Stream stream = new MemoryStream(bytes);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
{
Stream stream = new MemoryStream(bytes, 2, 5, writable: false, publiclyVisible: true);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes.Skip(2).Take(5).ToArray()));
}
{
Stream stream = new MemoryStream(bytes, 2, 5, writable: false, publiclyVisible: false);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes.Skip(2).Take(5).ToArray()));
}
{
Stream stream = new MemoryStream(bytes.Length * 2);
await stream.WriteAsync(bytes, 0, bytes.Length);
stream.Position = 0;
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
{
Stream stream = new TestSeekableStream(bytes, maxLengthToReturnPerRead: 3);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
}
[TestMethod]
[Owner("abpai")]
public async Task StreamToBytesAsyncNonSeekableAsync()
{
byte[] bytes = new byte[10];
this.random.NextBytes(bytes);
TestNonSeekableStream stream = new TestNonSeekableStream(bytes, maxLengthToReturnPerRead: 3);
{
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, cancellationToken: CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
}
/// <summary>
/// Seekable stream that is not a derived class of MemoryStream for testing.
/// Caller controls max count actually set into the buffer during Read()
/// to simulate Socket like read.
/// </summary>
private class TestSeekableStream : Stream
{
private readonly int maxLengthToReturnPerRead;
private readonly MemoryStream memoryStream;
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => true;
public override long Length => this.memoryStream.Length;
public override long Position
{
get => this.memoryStream.Position;
set => this.memoryStream.Position = value;
}
public TestSeekableStream(byte[] bytes, int maxLengthToReturnPerRead)
{
this.memoryStream = new MemoryStream(bytes);
this.maxLengthToReturnPerRead = maxLengthToReturnPerRead;
}
public override void Flush()
{
this.memoryStream.Flush();
}
public override int Read(byte[] buffer, int offset, int count)
{
count = Math.Min(count, this.maxLengthToReturnPerRead);
return this.memoryStream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
return this.memoryStream.Seek(offset, origin);
}
public override void SetLength(long value)
{
this.memoryStream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
this.memoryStream.Write(buffer, offset, count);
}
}
/// <summary>
/// Non-seekable stream to test Read() where count actually set into the buffer can be controlled to simulate Socket like read.
/// </summary>
private class TestNonSeekableStream : Stream
{
private byte[] data;
private int currentIndex;
private readonly int maxLengthToReturnPerRead;
public TestNonSeekableStream(byte[] data, int maxLengthToReturnPerRead)
{
this.data = data;
this.maxLengthToReturnPerRead = maxLengthToReturnPerRead;
}
public void Reset()
{
this.currentIndex = 0;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
int copyCount = Math.Min(count, Math.Min(this.data.Length - this.currentIndex, this.maxLengthToReturnPerRead));
for (int i = 0; i < copyCount; i++)
{
buffer[offset + i] = this.data[this.currentIndex + i];
}
this.currentIndex += copyCount;
return copyCount;
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
[TestClass]
public class BatchExecUtilsUnitTests
{
private Random random = new Random();
[TestMethod]
[Owner("abpai")]
public async Task StreamToBytesAsyncSeekableAsync()
{
const int bytesLength = 10;
byte[] bytes = new byte[bytesLength];
this.random.NextBytes(bytes);
{
Stream stream = new MemoryStream(bytes);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
{
Stream stream = new MemoryStream(bytes, 2, 5, writable: false, publiclyVisible: true);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes.Skip(2).Take(5).ToArray()));
}
{
Stream stream = new MemoryStream(bytes, 2, 5, writable: false, publiclyVisible: false);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes.Skip(2).Take(5).ToArray()));
}
{
Stream stream = new MemoryStream(bytes.Length * 2);
await stream.WriteAsync(bytes, 0, bytes.Length);
stream.Position = 0;
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
{
Stream stream = new TestSeekableStream(bytes, maxLengthToReturnPerRead: 3);
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
}
[TestMethod]
[Owner("abpai")]
public async Task StreamToBytesAsyncNonSeekableAsync()
{
byte[] bytes = new byte[10];
this.random.NextBytes(bytes);
TestNonSeekableStream stream = new TestNonSeekableStream(bytes, maxLengthToReturnPerRead: 3);
{
Memory<byte> actual = await BatchExecUtils.StreamToMemoryAsync(stream, cancellationToken: CancellationToken.None);
Assert.IsTrue(actual.Span.SequenceEqual(bytes));
}
}
/// <summary>
/// Seekable stream that is not a derived class of MemoryStream for testing.
/// Caller controls max count actually set into the buffer during Read()
/// to simulate Socket like read.
/// </summary>
private class TestSeekableStream : Stream
{
private readonly int maxLengthToReturnPerRead;
private readonly MemoryStream memoryStream;
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => true;
public override long Length => this.memoryStream.Length;
public override long Position
{
get => this.memoryStream.Position;
set => this.memoryStream.Position = value;
}
public TestSeekableStream(byte[] bytes, int maxLengthToReturnPerRead)
{
this.memoryStream = new MemoryStream(bytes);
this.maxLengthToReturnPerRead = maxLengthToReturnPerRead;
}
public override void Flush()
{
this.memoryStream.Flush();
}
public override int Read(byte[] buffer, int offset, int count)
{
count = Math.Min(count, this.maxLengthToReturnPerRead);
return this.memoryStream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
return this.memoryStream.Seek(offset, origin);
}
public override void SetLength(long value)
{
this.memoryStream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
this.memoryStream.Write(buffer, offset, count);
}
}
/// <summary>
/// Non-seekable stream to test Read() where count actually set into the buffer can be controlled to simulate Socket like read.
/// </summary>
private class TestNonSeekableStream : Stream
{
private byte[] data;
private int currentIndex;
private readonly int maxLengthToReturnPerRead;
public TestNonSeekableStream(byte[] data, int maxLengthToReturnPerRead)
{
this.data = data;
this.maxLengthToReturnPerRead = maxLengthToReturnPerRead;
}
public void Reset()
{
this.currentIndex = 0;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
int copyCount = Math.Min(count, Math.Min(this.data.Length - this.currentIndex, this.maxLengthToReturnPerRead));
for (int i = 0; i < copyCount; i++)
{
buffer[offset + i] = this.data[this.currentIndex + i];
}
this.currentIndex += copyCount;
return copyCount;
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}
}

Просмотреть файл

@ -1,253 +1,253 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.RecordIO;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
internal class BatchRequestPayloadReader
{
private List<ItemBatchOperation> operations = new List<ItemBatchOperation>();
internal async Task<List<ItemBatchOperation>> ReadPayloadAsync(Stream payload)
{
#pragma warning disable CS0618 // Type or member is obsolete
await payload.ReadRecordIOAsync(
record =>
{
Result r = this.ReadOperation(record, out ItemBatchOperation operation);
if (r != Result.Success)
{
return r;
}
this.operations.Add(operation);
return r;
},
resizer: new MemorySpanResizer<byte>((int)payload.Length));
#pragma warning restore CS0618 // Type or member is obsolete
return this.operations;
}
private Result ReadOperation(Memory<byte> input, out ItemBatchOperation operation)
{
RowBuffer row = new RowBuffer(input.Length);
if (!row.ReadFrom(input.Span, HybridRowVersion.V1, BatchSchemaProvider.BatchLayoutResolver))
{
operation = null;
return Result.Failure;
}
RowReader reader = new RowReader(ref row);
return BatchRequestPayloadReader.ReadOperation(ref reader, this.operations.Count, out operation);
}
private static Result ReadOperation(ref RowReader reader, int operationIndex, out ItemBatchOperation operation)
{
operation = null;
OperationType operationType = OperationType.Invalid;
string partitionKeyJson = null;
byte[] effectivePartitionKey = null;
string id = null;
byte[] binaryId = null;
byte[] resourceBody = null;
Cosmos.IndexingDirective? indexingDirective = null;
string ifMatch = null;
string ifNoneMatch = null;
int? ttlInSeconds = null;
while (reader.Read())
{
Result r;
switch (reader.Path)
{
case "operationType":
r = reader.ReadInt32(out int operationTypeInt);
if (r != Result.Success)
{
return r;
}
operationType = (OperationType)operationTypeInt;
break;
case "resourceType":
r = reader.ReadInt32(out int resourceType);
if (r != Result.Success)
{
return r;
}
Assert.AreEqual(ResourceType.Document, (ResourceType)resourceType);
break;
case "partitionKey":
r = reader.ReadString(out partitionKeyJson);
if (r != Result.Success)
{
return r;
}
break;
case "effectivePartitionKey":
r = reader.ReadBinary(out effectivePartitionKey);
if (r != Result.Success)
{
return r;
}
break;
case "id":
r = reader.ReadString(out id);
if (r != Result.Success)
{
return r;
}
break;
case "binaryId":
r = reader.ReadBinary(out binaryId);
if (r != Result.Success)
{
return r;
}
break;
case "resourceBody":
r = reader.ReadBinary(out resourceBody);
if (r != Result.Success)
{
return r;
}
break;
case "indexingDirective":
r = reader.ReadString(out string indexingDirectiveStr);
if (r != Result.Success)
{
return r;
}
if (!Enum.TryParse<Cosmos.IndexingDirective>(indexingDirectiveStr, out Cosmos.IndexingDirective indexingDirectiveEnum))
{
return Result.Failure;
}
indexingDirective = indexingDirectiveEnum;
break;
case "ifMatch":
r = reader.ReadString(out ifMatch);
if (r != Result.Success)
{
return r;
}
break;
case "ifNoneMatch":
r = reader.ReadString(out ifNoneMatch);
if (r != Result.Success)
{
return r;
}
break;
case "timeToLiveInSeconds":
r = reader.ReadInt32(out int ttl);
if (r != Result.Success)
{
return r;
}
ttlInSeconds = ttl;
break;
}
}
// Ensure the mandatory fields were populated
if (operationType == OperationType.Invalid)
{
return Result.Failure;
}
TransactionalBatchItemRequestOptions requestOptions = null;
if (indexingDirective.HasValue || ifMatch != null || ifNoneMatch != null || binaryId != null || effectivePartitionKey != null || ttlInSeconds.HasValue)
{
requestOptions = new TransactionalBatchItemRequestOptions();
if (indexingDirective.HasValue)
{
requestOptions.IndexingDirective = indexingDirective;
}
if (ifMatch != null)
{
requestOptions.IfMatchEtag = ifMatch;
}
else if (ifNoneMatch != null)
{
requestOptions.IfNoneMatchEtag = ifNoneMatch;
}
if (binaryId != null || effectivePartitionKey != null || ttlInSeconds.HasValue)
{
Dictionary<string, object> properties = new Dictionary<string, object>();
if (binaryId != null)
{
properties.Add(WFConstants.BackendHeaders.BinaryId, binaryId);
}
if (effectivePartitionKey != null)
{
properties.Add(WFConstants.BackendHeaders.EffectivePartitionKey, effectivePartitionKey);
}
if (ttlInSeconds.HasValue)
{
properties.Add(WFConstants.BackendHeaders.TimeToLiveInSeconds, ttlInSeconds.ToString());
}
requestOptions.Properties = properties;
}
}
Documents.PartitionKey parsedPartitionKey = null;
if (partitionKeyJson != null)
{
parsedPartitionKey = Documents.PartitionKey.FromJsonString(partitionKeyJson);
}
operation = new ItemBatchOperation(
operationType: operationType,
operationIndex: operationIndex,
partitionKey: Cosmos.PartitionKey.Null, // ParsedPartitionKey is used for validation
id: id,
requestOptions: requestOptions)
{
ParsedPartitionKey = parsedPartitionKey,
ResourceBody = resourceBody
};
return Result.Success;
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.RecordIO;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
internal class BatchRequestPayloadReader
{
private List<ItemBatchOperation> operations = new List<ItemBatchOperation>();
internal async Task<List<ItemBatchOperation>> ReadPayloadAsync(Stream payload)
{
#pragma warning disable CS0618 // Type or member is obsolete
await payload.ReadRecordIOAsync(
record =>
{
Result r = this.ReadOperation(record, out ItemBatchOperation operation);
if (r != Result.Success)
{
return r;
}
this.operations.Add(operation);
return r;
},
resizer: new MemorySpanResizer<byte>((int)payload.Length));
#pragma warning restore CS0618 // Type or member is obsolete
return this.operations;
}
private Result ReadOperation(Memory<byte> input, out ItemBatchOperation operation)
{
RowBuffer row = new RowBuffer(input.Length);
if (!row.ReadFrom(input.Span, HybridRowVersion.V1, BatchSchemaProvider.BatchLayoutResolver))
{
operation = null;
return Result.Failure;
}
RowReader reader = new RowReader(ref row);
return BatchRequestPayloadReader.ReadOperation(ref reader, this.operations.Count, out operation);
}
private static Result ReadOperation(ref RowReader reader, int operationIndex, out ItemBatchOperation operation)
{
operation = null;
OperationType operationType = OperationType.Invalid;
string partitionKeyJson = null;
byte[] effectivePartitionKey = null;
string id = null;
byte[] binaryId = null;
byte[] resourceBody = null;
Cosmos.IndexingDirective? indexingDirective = null;
string ifMatch = null;
string ifNoneMatch = null;
int? ttlInSeconds = null;
while (reader.Read())
{
Result r;
switch (reader.Path)
{
case "operationType":
r = reader.ReadInt32(out int operationTypeInt);
if (r != Result.Success)
{
return r;
}
operationType = (OperationType)operationTypeInt;
break;
case "resourceType":
r = reader.ReadInt32(out int resourceType);
if (r != Result.Success)
{
return r;
}
Assert.AreEqual(ResourceType.Document, (ResourceType)resourceType);
break;
case "partitionKey":
r = reader.ReadString(out partitionKeyJson);
if (r != Result.Success)
{
return r;
}
break;
case "effectivePartitionKey":
r = reader.ReadBinary(out effectivePartitionKey);
if (r != Result.Success)
{
return r;
}
break;
case "id":
r = reader.ReadString(out id);
if (r != Result.Success)
{
return r;
}
break;
case "binaryId":
r = reader.ReadBinary(out binaryId);
if (r != Result.Success)
{
return r;
}
break;
case "resourceBody":
r = reader.ReadBinary(out resourceBody);
if (r != Result.Success)
{
return r;
}
break;
case "indexingDirective":
r = reader.ReadString(out string indexingDirectiveStr);
if (r != Result.Success)
{
return r;
}
if (!Enum.TryParse<Cosmos.IndexingDirective>(indexingDirectiveStr, out Cosmos.IndexingDirective indexingDirectiveEnum))
{
return Result.Failure;
}
indexingDirective = indexingDirectiveEnum;
break;
case "ifMatch":
r = reader.ReadString(out ifMatch);
if (r != Result.Success)
{
return r;
}
break;
case "ifNoneMatch":
r = reader.ReadString(out ifNoneMatch);
if (r != Result.Success)
{
return r;
}
break;
case "timeToLiveInSeconds":
r = reader.ReadInt32(out int ttl);
if (r != Result.Success)
{
return r;
}
ttlInSeconds = ttl;
break;
}
}
// Ensure the mandatory fields were populated
if (operationType == OperationType.Invalid)
{
return Result.Failure;
}
TransactionalBatchItemRequestOptions requestOptions = null;
if (indexingDirective.HasValue || ifMatch != null || ifNoneMatch != null || binaryId != null || effectivePartitionKey != null || ttlInSeconds.HasValue)
{
requestOptions = new TransactionalBatchItemRequestOptions();
if (indexingDirective.HasValue)
{
requestOptions.IndexingDirective = indexingDirective;
}
if (ifMatch != null)
{
requestOptions.IfMatchEtag = ifMatch;
}
else if (ifNoneMatch != null)
{
requestOptions.IfNoneMatchEtag = ifNoneMatch;
}
if (binaryId != null || effectivePartitionKey != null || ttlInSeconds.HasValue)
{
Dictionary<string, object> properties = new Dictionary<string, object>();
if (binaryId != null)
{
properties.Add(WFConstants.BackendHeaders.BinaryId, binaryId);
}
if (effectivePartitionKey != null)
{
properties.Add(WFConstants.BackendHeaders.EffectivePartitionKey, effectivePartitionKey);
}
if (ttlInSeconds.HasValue)
{
properties.Add(WFConstants.BackendHeaders.TimeToLiveInSeconds, ttlInSeconds.ToString());
}
requestOptions.Properties = properties;
}
}
Documents.PartitionKey parsedPartitionKey = null;
if (partitionKeyJson != null)
{
parsedPartitionKey = Documents.PartitionKey.FromJsonString(partitionKeyJson);
}
operation = new ItemBatchOperation(
operationType: operationType,
operationIndex: operationIndex,
partitionKey: Cosmos.PartitionKey.Null, // ParsedPartitionKey is used for validation
id: id,
requestOptions: requestOptions)
{
ParsedPartitionKey = parsedPartitionKey,
ResourceBody = resourceBody
};
return Result.Success;
}
}
}

Просмотреть файл

@ -1,115 +1,115 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Layouts;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.RecordIO;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
internal class BatchResponsePayloadWriter
{
private List<TransactionalBatchOperationResult> results;
public BatchResponsePayloadWriter(List<TransactionalBatchOperationResult> results)
{
this.results = results;
}
internal async Task<MemoryStream> GeneratePayloadAsync()
{
MemoryStream responseStream = new MemoryStream();
Assert.AreEqual(Result.Success, await responseStream.WriteRecordIOAsync(default(Segment), this.WriteOperationResult));
responseStream.Position = 0;
return responseStream;
}
private Result WriteOperationResult(long index, out ReadOnlyMemory<byte> buffer)
{
if (index >= this.results.Count)
{
buffer = ReadOnlyMemory<byte>.Empty;
return Result.Success;
}
RowBuffer row = new RowBuffer(2 * 1024);
row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchResultLayout, BatchSchemaProvider.BatchLayoutResolver);
Result r = RowWriter.WriteBuffer(ref row, this.results[(int)index], BatchResponsePayloadWriter.WriteResult);
if (r != Result.Success)
{
buffer = null;
return r;
}
MemoryStream output = new MemoryStream(row.Length);
row.WriteTo(output);
buffer = new Memory<byte>(output.GetBuffer(), 0, (int)output.Length);
return r;
}
private static Result WriteResult(ref RowWriter writer, TypeArgument typeArg, TransactionalBatchOperationResult result)
{
Result r = writer.WriteInt32("statusCode", (int)result.StatusCode);
if (r != Result.Success)
{
return r;
}
if (result.SubStatusCode != SubStatusCodes.Unknown)
{
r = writer.WriteInt32("subStatusCode", (int)result.SubStatusCode);
if (r != Result.Success)
{
return r;
}
}
if (result.ETag != null)
{
r = writer.WriteString("eTag", result.ETag);
if (r != Result.Success)
{
return r;
}
}
if (result.ResourceStream != null)
{
r = writer.WriteBinary("resourceBody", BatchResponsePayloadWriter.StreamToBytes(result.ResourceStream));
if (r != Result.Success)
{
return r;
}
}
r = writer.WriteUInt32("retryAfterMilliseconds", (uint)result.RetryAfter.TotalMilliseconds);
if (r != Result.Success)
{
return r;
}
r = writer.WriteFloat64("requestCharge", result.RequestCharge);
if (r != Result.Success)
{
return r;
}
return Result.Success;
}
private static byte[] StreamToBytes(Stream stream)
{
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
return bytes;
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Serialization.HybridRow;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.IO;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.Layouts;
using Microsoft.Azure.Cosmos.Serialization.HybridRow.RecordIO;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
internal class BatchResponsePayloadWriter
{
private List<TransactionalBatchOperationResult> results;
public BatchResponsePayloadWriter(List<TransactionalBatchOperationResult> results)
{
this.results = results;
}
internal async Task<MemoryStream> GeneratePayloadAsync()
{
MemoryStream responseStream = new MemoryStream();
Assert.AreEqual(Result.Success, await responseStream.WriteRecordIOAsync(default(Segment), this.WriteOperationResult));
responseStream.Position = 0;
return responseStream;
}
private Result WriteOperationResult(long index, out ReadOnlyMemory<byte> buffer)
{
if (index >= this.results.Count)
{
buffer = ReadOnlyMemory<byte>.Empty;
return Result.Success;
}
RowBuffer row = new RowBuffer(2 * 1024);
row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchResultLayout, BatchSchemaProvider.BatchLayoutResolver);
Result r = RowWriter.WriteBuffer(ref row, this.results[(int)index], BatchResponsePayloadWriter.WriteResult);
if (r != Result.Success)
{
buffer = null;
return r;
}
MemoryStream output = new MemoryStream(row.Length);
row.WriteTo(output);
buffer = new Memory<byte>(output.GetBuffer(), 0, (int)output.Length);
return r;
}
private static Result WriteResult(ref RowWriter writer, TypeArgument typeArg, TransactionalBatchOperationResult result)
{
Result r = writer.WriteInt32("statusCode", (int)result.StatusCode);
if (r != Result.Success)
{
return r;
}
if (result.SubStatusCode != SubStatusCodes.Unknown)
{
r = writer.WriteInt32("subStatusCode", (int)result.SubStatusCode);
if (r != Result.Success)
{
return r;
}
}
if (result.ETag != null)
{
r = writer.WriteString("eTag", result.ETag);
if (r != Result.Success)
{
return r;
}
}
if (result.ResourceStream != null)
{
r = writer.WriteBinary("resourceBody", BatchResponsePayloadWriter.StreamToBytes(result.ResourceStream));
if (r != Result.Success)
{
return r;
}
}
r = writer.WriteUInt32("retryAfterMilliseconds", (uint)result.RetryAfter.TotalMilliseconds);
if (r != Result.Success)
{
return r;
}
r = writer.WriteFloat64("requestCharge", result.RequestCharge);
if (r != Result.Success)
{
return r;
}
return Result.Success;
}
private static byte[] StreamToBytes(Stream stream)
{
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
return bytes;
}
}
}

Просмотреть файл

@ -1,221 +1,221 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
[TestClass]
public class BatchSchemaTests
{
[TestMethod]
[Owner("abpai")]
public async Task BatchRequestSerializationAsync()
{
const string partitionKey1 = "pk1";
using CosmosClient cosmosClient = MockCosmosUtil.CreateMockCosmosClient();
ContainerInternal containerCore = (ContainerInlineCore)cosmosClient.GetDatabase("db").GetContainer("cont");
ItemBatchOperation[] operations = new ItemBatchOperation[]
{
new ItemBatchOperation(
operationType: OperationType.Create,
operationIndex: 0,
containerCore:containerCore)
{
ResourceBody = new byte[] { 0x41, 0x42 }
},
new ItemBatchOperation(
id: "id2",
operationType: OperationType.Replace,
operationIndex: 1,
containerCore:containerCore,
requestOptions: new TransactionalBatchItemRequestOptions()
{
IfMatchEtag = "theCondition"
})
};
ServerBatchRequest batchRequest = await SinglePartitionKeyServerBatchRequest.CreateAsync(
new Cosmos.PartitionKey(partitionKey1),
new ArraySegment<ItemBatchOperation>(operations),
serializerCore: MockCosmosUtil.Serializer,
trace: NoOpTrace.Singleton,
cancellationToken: CancellationToken.None);
Assert.AreEqual(2, batchRequest.Operations.Count);
using (MemoryStream payload = batchRequest.TransferBodyStream())
{
Assert.IsNotNull(payload);
List<ItemBatchOperation> readOperations = await new BatchRequestPayloadReader().ReadPayloadAsync(payload);
Assert.AreEqual(2, readOperations.Count);
ItemBatchOperationEqualityComparer comparer = new ItemBatchOperationEqualityComparer();
Assert.IsTrue(comparer.Equals(operations[0], readOperations[0]));
Assert.IsTrue(comparer.Equals(operations[1], readOperations[1]));
}
}
[TestMethod]
[Owner("abpai")]
public async Task BatchResponseDeserializationAsync()
{
using CosmosClient cosmosClient = MockCosmosUtil.CreateMockCosmosClient();
ContainerInternal containerCore = (ContainerInlineCore)cosmosClient.GetDatabase("db").GetContainer("cont");
List<TransactionalBatchOperationResult> results = new List<TransactionalBatchOperationResult>
{
new TransactionalBatchOperationResult(HttpStatusCode.Conflict),
new TransactionalBatchOperationResult(HttpStatusCode.OK)
{
ResourceStream = new MemoryStream(new byte[] { 0x41, 0x42 }, index: 0, count: 2, writable: false, publiclyVisible: true),
RequestCharge = 2.5,
ETag = "1234",
RetryAfter = TimeSpan.FromMilliseconds(360)
}
};
MemoryStream responseContent = await new BatchResponsePayloadWriter(results).GeneratePayloadAsync();
CosmosSerializer serializer = new CosmosJsonDotNetSerializer();
SinglePartitionKeyServerBatchRequest batchRequest = await SinglePartitionKeyServerBatchRequest.CreateAsync(
partitionKey: Cosmos.PartitionKey.None,
operations: new ArraySegment<ItemBatchOperation>(
new ItemBatchOperation[]
{
new ItemBatchOperation(OperationType.Read, operationIndex: 0, id: "someId", containerCore: containerCore),
new ItemBatchOperation(OperationType.Read, operationIndex: 0, id: "someId", containerCore: containerCore)
}),
serializerCore: MockCosmosUtil.Serializer,
trace: NoOpTrace.Singleton,
cancellationToken: CancellationToken.None);
ResponseMessage response = new ResponseMessage((HttpStatusCode)StatusCodes.MultiStatus) { Content = responseContent };
response.Headers.Session = Guid.NewGuid().ToString();
response.Headers.ActivityId = Guid.NewGuid().ToString();
TransactionalBatchResponse batchResponse = await TransactionalBatchResponse.FromResponseMessageAsync(
response,
batchRequest,
MockCosmosUtil.Serializer,
true,
NoOpTrace.Singleton,
CancellationToken.None);
Assert.IsNotNull(batchRequest);
Assert.AreEqual(HttpStatusCode.Conflict, batchResponse.StatusCode);
Assert.AreEqual(2, batchResponse.Count);
Assert.AreEqual(response.Headers.Session, batchResponse[0].SessionToken);
Assert.AreEqual(response.Headers.Session, batchResponse[1].SessionToken);
Assert.AreEqual(response.Headers.ActivityId, batchResponse[0].ActivityId);
Assert.AreEqual(response.Headers.ActivityId, batchResponse[1].ActivityId);
CosmosBatchOperationResultEqualityComparer comparer = new CosmosBatchOperationResultEqualityComparer();
Assert.IsTrue(comparer.Equals(results[0], batchResponse[0]));
Assert.IsTrue(comparer.Equals(results[1], batchResponse[1]));
}
private class ItemBatchOperationEqualityComparer : IEqualityComparer<ItemBatchOperation>
{
public bool Equals(ItemBatchOperation x, ItemBatchOperation y)
{
return x.Id == y.Id
&& x.OperationType == y.OperationType
&& x.OperationIndex == y.OperationIndex
&& this.Equals(x.RequestOptions, y.RequestOptions)
&& x.ResourceBody.Span.SequenceEqual(y.ResourceBody.Span);
}
private bool Equals(TransactionalBatchItemRequestOptions x, TransactionalBatchItemRequestOptions y)
{
if (x == null && y == null)
{
return true;
}
else if (x != null && y != null)
{
RequestMessage xMessage = new RequestMessage();
RequestMessage yMessage = new RequestMessage();
x.PopulateRequestOptions(xMessage);
y.PopulateRequestOptions(yMessage);
foreach (string headerName in xMessage.Headers)
{
if (xMessage.Headers[headerName] != yMessage.Headers[headerName])
{
return false;
}
}
return true;
}
return false;
}
public int GetHashCode(ItemBatchOperation obj)
{
int hashCode = 1660235553;
hashCode = (hashCode * -1521134295) + EqualityComparer<string>.Default.GetHashCode(obj.Id);
hashCode = (hashCode * -1521134295) + obj.OperationType.GetHashCode();
hashCode = (hashCode * -1521134295) + EqualityComparer<TransactionalBatchItemRequestOptions>.Default.GetHashCode(obj.RequestOptions);
hashCode = (hashCode * -1521134295) + obj.OperationIndex.GetHashCode();
hashCode = (hashCode * -1521134295) + EqualityComparer<Memory<byte>>.Default.GetHashCode(obj.ResourceBody);
return hashCode;
}
}
private class CosmosBatchOperationResultEqualityComparer : IEqualityComparer<TransactionalBatchOperationResult>
{
public bool Equals(TransactionalBatchOperationResult x, TransactionalBatchOperationResult y)
{
return x.StatusCode == y.StatusCode
&& x.SubStatusCode == y.SubStatusCode
&& x.ETag == y.ETag
&& x.RequestCharge == y.RequestCharge
&& x.RetryAfter == y.RetryAfter
&& this.Equals(x.ResourceStream, y.ResourceStream);
}
private bool Equals(Stream x, Stream y)
{
if (x == null && y == null)
{
return true;
}
else if (x != null && y != null)
{
if (x.Length != y.Length)
{
return false;
}
return ((MemoryStream)x).GetBuffer().SequenceEqual(((MemoryStream)y).GetBuffer());
}
return false;
}
public int GetHashCode(TransactionalBatchOperationResult obj)
{
int hashCode = 1176625765;
hashCode = (hashCode * -1521134295) + obj.StatusCode.GetHashCode();
hashCode = (hashCode * -1521134295) + EqualityComparer<string>.Default.GetHashCode(obj.ETag);
hashCode = (hashCode * -1521134295) + EqualityComparer<double>.Default.GetHashCode(obj.RequestCharge);
hashCode = (hashCode * -1521134295) + EqualityComparer<TimeSpan>.Default.GetHashCode(obj.RetryAfter);
hashCode = (hashCode * -1521134295) + EqualityComparer<SubStatusCodes>.Default.GetHashCode(obj.SubStatusCode);
return hashCode;
}
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Tracing;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
[TestClass]
public class BatchSchemaTests
{
[TestMethod]
[Owner("abpai")]
public async Task BatchRequestSerializationAsync()
{
const string partitionKey1 = "pk1";
using CosmosClient cosmosClient = MockCosmosUtil.CreateMockCosmosClient();
ContainerInternal containerCore = (ContainerInlineCore)cosmosClient.GetDatabase("db").GetContainer("cont");
ItemBatchOperation[] operations = new ItemBatchOperation[]
{
new ItemBatchOperation(
operationType: OperationType.Create,
operationIndex: 0,
containerCore:containerCore)
{
ResourceBody = new byte[] { 0x41, 0x42 }
},
new ItemBatchOperation(
id: "id2",
operationType: OperationType.Replace,
operationIndex: 1,
containerCore:containerCore,
requestOptions: new TransactionalBatchItemRequestOptions()
{
IfMatchEtag = "theCondition"
})
};
ServerBatchRequest batchRequest = await SinglePartitionKeyServerBatchRequest.CreateAsync(
new Cosmos.PartitionKey(partitionKey1),
new ArraySegment<ItemBatchOperation>(operations),
serializerCore: MockCosmosUtil.Serializer,
trace: NoOpTrace.Singleton,
cancellationToken: CancellationToken.None);
Assert.AreEqual(2, batchRequest.Operations.Count);
using (MemoryStream payload = batchRequest.TransferBodyStream())
{
Assert.IsNotNull(payload);
List<ItemBatchOperation> readOperations = await new BatchRequestPayloadReader().ReadPayloadAsync(payload);
Assert.AreEqual(2, readOperations.Count);
ItemBatchOperationEqualityComparer comparer = new ItemBatchOperationEqualityComparer();
Assert.IsTrue(comparer.Equals(operations[0], readOperations[0]));
Assert.IsTrue(comparer.Equals(operations[1], readOperations[1]));
}
}
[TestMethod]
[Owner("abpai")]
public async Task BatchResponseDeserializationAsync()
{
using CosmosClient cosmosClient = MockCosmosUtil.CreateMockCosmosClient();
ContainerInternal containerCore = (ContainerInlineCore)cosmosClient.GetDatabase("db").GetContainer("cont");
List<TransactionalBatchOperationResult> results = new List<TransactionalBatchOperationResult>
{
new TransactionalBatchOperationResult(HttpStatusCode.Conflict),
new TransactionalBatchOperationResult(HttpStatusCode.OK)
{
ResourceStream = new MemoryStream(new byte[] { 0x41, 0x42 }, index: 0, count: 2, writable: false, publiclyVisible: true),
RequestCharge = 2.5,
ETag = "1234",
RetryAfter = TimeSpan.FromMilliseconds(360)
}
};
MemoryStream responseContent = await new BatchResponsePayloadWriter(results).GeneratePayloadAsync();
CosmosSerializer serializer = new CosmosJsonDotNetSerializer();
SinglePartitionKeyServerBatchRequest batchRequest = await SinglePartitionKeyServerBatchRequest.CreateAsync(
partitionKey: Cosmos.PartitionKey.None,
operations: new ArraySegment<ItemBatchOperation>(
new ItemBatchOperation[]
{
new ItemBatchOperation(OperationType.Read, operationIndex: 0, id: "someId", containerCore: containerCore),
new ItemBatchOperation(OperationType.Read, operationIndex: 0, id: "someId", containerCore: containerCore)
}),
serializerCore: MockCosmosUtil.Serializer,
trace: NoOpTrace.Singleton,
cancellationToken: CancellationToken.None);
ResponseMessage response = new ResponseMessage((HttpStatusCode)StatusCodes.MultiStatus) { Content = responseContent };
response.Headers.Session = Guid.NewGuid().ToString();
response.Headers.ActivityId = Guid.NewGuid().ToString();
TransactionalBatchResponse batchResponse = await TransactionalBatchResponse.FromResponseMessageAsync(
response,
batchRequest,
MockCosmosUtil.Serializer,
true,
NoOpTrace.Singleton,
CancellationToken.None);
Assert.IsNotNull(batchRequest);
Assert.AreEqual(HttpStatusCode.Conflict, batchResponse.StatusCode);
Assert.AreEqual(2, batchResponse.Count);
Assert.AreEqual(response.Headers.Session, batchResponse[0].SessionToken);
Assert.AreEqual(response.Headers.Session, batchResponse[1].SessionToken);
Assert.AreEqual(response.Headers.ActivityId, batchResponse[0].ActivityId);
Assert.AreEqual(response.Headers.ActivityId, batchResponse[1].ActivityId);
CosmosBatchOperationResultEqualityComparer comparer = new CosmosBatchOperationResultEqualityComparer();
Assert.IsTrue(comparer.Equals(results[0], batchResponse[0]));
Assert.IsTrue(comparer.Equals(results[1], batchResponse[1]));
}
private class ItemBatchOperationEqualityComparer : IEqualityComparer<ItemBatchOperation>
{
public bool Equals(ItemBatchOperation x, ItemBatchOperation y)
{
return x.Id == y.Id
&& x.OperationType == y.OperationType
&& x.OperationIndex == y.OperationIndex
&& this.Equals(x.RequestOptions, y.RequestOptions)
&& x.ResourceBody.Span.SequenceEqual(y.ResourceBody.Span);
}
private bool Equals(TransactionalBatchItemRequestOptions x, TransactionalBatchItemRequestOptions y)
{
if (x == null && y == null)
{
return true;
}
else if (x != null && y != null)
{
RequestMessage xMessage = new RequestMessage();
RequestMessage yMessage = new RequestMessage();
x.PopulateRequestOptions(xMessage);
y.PopulateRequestOptions(yMessage);
foreach (string headerName in xMessage.Headers)
{
if (xMessage.Headers[headerName] != yMessage.Headers[headerName])
{
return false;
}
}
return true;
}
return false;
}
public int GetHashCode(ItemBatchOperation obj)
{
int hashCode = 1660235553;
hashCode = (hashCode * -1521134295) + EqualityComparer<string>.Default.GetHashCode(obj.Id);
hashCode = (hashCode * -1521134295) + obj.OperationType.GetHashCode();
hashCode = (hashCode * -1521134295) + EqualityComparer<TransactionalBatchItemRequestOptions>.Default.GetHashCode(obj.RequestOptions);
hashCode = (hashCode * -1521134295) + obj.OperationIndex.GetHashCode();
hashCode = (hashCode * -1521134295) + EqualityComparer<Memory<byte>>.Default.GetHashCode(obj.ResourceBody);
return hashCode;
}
}
private class CosmosBatchOperationResultEqualityComparer : IEqualityComparer<TransactionalBatchOperationResult>
{
public bool Equals(TransactionalBatchOperationResult x, TransactionalBatchOperationResult y)
{
return x.StatusCode == y.StatusCode
&& x.SubStatusCode == y.SubStatusCode
&& x.ETag == y.ETag
&& x.RequestCharge == y.RequestCharge
&& x.RetryAfter == y.RetryAfter
&& this.Equals(x.ResourceStream, y.ResourceStream);
}
private bool Equals(Stream x, Stream y)
{
if (x == null && y == null)
{
return true;
}
else if (x != null && y != null)
{
if (x.Length != y.Length)
{
return false;
}
return ((MemoryStream)x).GetBuffer().SequenceEqual(((MemoryStream)y).GetBuffer());
}
return false;
}
public int GetHashCode(TransactionalBatchOperationResult obj)
{
int hashCode = 1176625765;
hashCode = (hashCode * -1521134295) + obj.StatusCode.GetHashCode();
hashCode = (hashCode * -1521134295) + EqualityComparer<string>.Default.GetHashCode(obj.ETag);
hashCode = (hashCode * -1521134295) + EqualityComparer<double>.Default.GetHashCode(obj.RequestCharge);
hashCode = (hashCode * -1521134295) + EqualityComparer<TimeSpan>.Default.GetHashCode(obj.RetryAfter);
hashCode = (hashCode * -1521134295) + EqualityComparer<SubStatusCodes>.Default.GetHashCode(obj.SubStatusCode);
return hashCode;
}
}
}
}

Просмотреть файл

@ -1,470 +1,470 @@
//-----------------------------------------------------------------------
// <copyright file="CrossPartitionQueryTests.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Json;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
/// <summary>
/// Tests for CrossPartitionQueryTests.
/// </summary>
[TestClass]
[TestCategory("Quarantine") /* Used to filter out quarantined tests in gated runs */]
public class BinaryEncodingOverTheWireTests
{
private static readonly string[] NoDocuments = new string[] { };
private static readonly CosmosClient GatewayClient = new CosmosClient(
ConfigurationManager.AppSettings["GatewayEndpoint"],
ConfigurationManager.AppSettings["MasterKey"],
new CosmosClientOptions() { ConnectionMode = ConnectionMode.Gateway });
private static readonly CosmosClient DirectHttpsClient = new CosmosClient(
ConfigurationManager.AppSettings["GatewayEndpoint"],
ConfigurationManager.AppSettings["MasterKey"],
new CosmosClientOptions() { ConnectionMode = ConnectionMode.Direct, ConnectionProtocol = Documents.Client.Protocol.Https });
private static readonly CosmosClient RntbdClient = new CosmosClient(
ConfigurationManager.AppSettings["GatewayEndpoint"],
ConfigurationManager.AppSettings["MasterKey"],
new CosmosClientOptions() { ConnectionMode = ConnectionMode.Direct, ConnectionProtocol = Documents.Client.Protocol.Tcp });
private static readonly CosmosClient[] Clients = new CosmosClient[] { GatewayClient, DirectHttpsClient, RntbdClient };
private static readonly CosmosClient Client = RntbdClient;
private static readonly AsyncLazy<Database> Database = new AsyncLazy<Database>(async () =>
{
return await Client.CreateDatabaseAsync(Guid.NewGuid().ToString());
});
private static async Task<Container> CreateContainerAsync()
{
return (await Database.Value).CreateContainerAsync(
Guid.NewGuid().ToString() + "collection",
"/id",
10000).Result;
}
private static async Task<Tuple<Container, List<JToken>>> CreateCollectionAndIngestDocuments(IEnumerable<string> documents)
{
Container container = await BinaryEncodingOverTheWireTests.CreateContainerAsync();
List<JToken> insertedDocuments = new List<JToken>();
Random rand = new Random(1234);
foreach (string serializedItem in documents.OrderBy(x => rand.Next()).Take(100))
{
JToken item = JToken.Parse(serializedItem);
item["id"] = Guid.NewGuid().ToString();
JToken createdItem = await container.CreateItemAsync<JToken>(item, new PartitionKey(item["id"].ToString()));
insertedDocuments.Add(createdItem);
}
return new Tuple<Container, List<JToken>>(container, insertedDocuments);
}
internal delegate Task Query(CosmosClient cosmosClient, Container container, List<JToken> items);
/// <summary>
/// Task that wraps boiler plate code for query tests (collection create -> ingest documents -> query documents -> delete collections).
/// Note that this function will take the cross product connectionModes and collectionTypes.
/// </summary>
/// <param name="connectionModes">The connection modes to use.</param>
/// <param name="collectionTypes">The type of collections to create.</param>
/// <param name="documents">The documents to ingest</param>
/// <param name="query">
/// The callback for the queries.
/// All the standard arguments will be passed in.
/// Please make sure that this function is idempotent, since a collection will be reused for each connection mode.
/// </param>
/// <param name="partitionKey">The partition key for the partition collection.</param>
/// <param name="testArgs">The optional args that you want passed in to the query.</param>
/// <returns>A task to await on.</returns>
private static async Task CreateIngestQueryDelete(
IEnumerable<string> documents,
Query query)
{
Tuple<Container, List<JToken>> collectionAndDocuments = await BinaryEncodingOverTheWireTests.CreateCollectionAndIngestDocuments(documents);
List<Task> queryTasks = new List<Task>();
foreach (CosmosClient cosmosClient in BinaryEncodingOverTheWireTests.Clients)
{
queryTasks.Add(query(cosmosClient, collectionAndDocuments.Item1, collectionAndDocuments.Item2));
}
await Task.WhenAll(queryTasks);
await collectionAndDocuments.Item1.DeleteContainerAsync();
}
private static async Task NoOp()
{
await Task.Delay(0);
}
[TestMethod]
public void CheckThatAllTestsAreRunning()
{
// In general I don't want any of these tests being ignored or quarentined.
// Please work with me if it needs to be.
// I do not want these tests turned off for being "flaky", since they have been
// very stable and if they fail it's because something lower level is probably going wrong.
Assert.AreEqual(0, typeof(BinaryEncodingOverTheWireTests)
.GetMethods()
.Where(method => method.GetCustomAttributes(typeof(TestMethodAttribute), true).Length != 0)
.Where(method => method.GetCustomAttributes(typeof(TestCategoryAttribute), true).Length != 0)
.Count(), $"One the {nameof(BinaryEncodingOverTheWireTests)} is not being run.");
}
[TestMethod]
public async Task CombinedScriptsDataTest()
{
await this.TestCurratedDocs("CombinedScriptsData.json");
}
// For now we are skipping this test since the documents are too large to ingest and we get a rate size too large (HTTP 413).
#if TEST_COUNTRY
[TestMethod]
public async Task CountriesTest()
{
await this.TestCurratedDocs("countries");
}
#endif
[TestMethod]
public async Task DevTestCollTest()
{
await this.TestCurratedDocs("devtestcoll.json");
}
[TestMethod]
public async Task LastFMTest()
{
await this.TestCurratedDocs("lastfm");
}
[TestMethod]
public async Task LogDataTest()
{
await this.TestCurratedDocs("LogData.json");
}
[TestMethod]
public async Task MillionSong1KDocumentsTest()
{
await this.TestCurratedDocs("MillionSong1KDocuments.json");
}
[TestMethod]
public async Task MsnCollectionTest()
{
await this.TestCurratedDocs("MsnCollection.json");
}
[TestMethod]
public async Task NutritionDataTest()
{
await this.TestCurratedDocs("NutritionData");
}
[TestMethod]
public async Task RunsCollectionTest()
{
await this.TestCurratedDocs("runsCollection");
}
[TestMethod]
public async Task StatesCommitteesTest()
{
await this.TestCurratedDocs("states_committees.json");
}
[TestMethod]
public async Task StatesLegislatorsTest()
{
await this.TestCurratedDocs("states_legislators");
}
[TestMethod]
public async Task Store01Test()
{
await this.TestCurratedDocs("store01C.json");
}
[TestMethod]
public async Task TicinoErrorBucketsTest()
{
await this.TestCurratedDocs("TicinoErrorBuckets");
}
[TestMethod]
public async Task TwitterDataTest()
{
await this.TestCurratedDocs("twitter_data");
}
[TestMethod]
public async Task Ups1Test()
{
await this.TestCurratedDocs("ups1");
}
[TestMethod]
public async Task XpertEventsTest()
{
await this.TestCurratedDocs("XpertEvents");
}
private async Task TestCurratedDocs(string path)
{
IEnumerable<object> documents = BinaryEncodingOverTheWireTests.GetDocumentsFromCurratedDoc(path);
await BinaryEncodingOverTheWireTests.CreateIngestQueryDelete(
documents.Select(x => x.ToString()),
this.TestCurratedDocs);
}
private async Task TestCurratedDocs(CosmosClient cosmosClient, Container container, List<JToken> items)
{
HashSet<JToken> inputItems = new HashSet<JToken>(items, JsonTokenEqualityComparer.Value);
async Task AssertQueryDrainsCorrectlyAsync(FeedIterator<JToken> feedIterator)
{
while (feedIterator.HasMoreResults)
{
FeedResponse<JToken> feedResponse = await feedIterator.ReadNextAsync();
foreach (JToken item in feedResponse)
{
Assert.IsTrue(inputItems.Contains(item), "Documents differ from input documents");
}
}
}
FeedIterator<JToken> textFeedIterator = container.GetItemQueryIterator<JToken>(
queryDefinition: new QueryDefinition("SELECT * FROM c ORDER BY c._ts"),
requestOptions: new QueryRequestOptions()
{
CosmosSerializationFormatOptions = new CosmosSerializationFormatOptions(
"JsonText",
(content) => JsonNavigator.Create(content),
() => Cosmos.Json.JsonWriter.Create(JsonSerializationFormat.Text)),
});
await AssertQueryDrainsCorrectlyAsync(textFeedIterator);
FeedIterator<JToken> binaryFeedIterator = container.GetItemQueryIterator<JToken>(
queryDefinition: new QueryDefinition("SELECT * FROM c ORDER BY c._ts"),
requestOptions: new QueryRequestOptions()
{
CosmosSerializationFormatOptions = new CosmosSerializationFormatOptions(
"CosmosBinary",
(content) => JsonNavigator.Create(content),
() => Cosmos.Json.JsonWriter.Create(JsonSerializationFormat.Text)),
});
await AssertQueryDrainsCorrectlyAsync(binaryFeedIterator);
}
private static IEnumerable<object> GetDocumentsFromCurratedDoc(string path)
{
path = string.Format("TestJsons/{0}", path);
string json = TextFileConcatenation.ReadMultipartFile(path);
List<object> documents;
try
{
documents = JsonConvert.DeserializeObject<List<object>>(json);
}
catch (JsonSerializationException)
{
documents = new List<object>
{
JsonConvert.DeserializeObject<object>(json)
};
}
return documents;
}
public sealed class AsyncLazy<T> : Lazy<Task<T>>
{
public AsyncLazy(Func<T> valueFactory) :
base(() => Task.Factory.StartNew(valueFactory))
{ }
public AsyncLazy(Func<Task<T>> taskFactory) :
base(() => Task.Factory.StartNew(() => taskFactory()).Unwrap())
{ }
}
public sealed class JsonTokenEqualityComparer : IEqualityComparer<JToken>
{
public static JsonTokenEqualityComparer Value = new JsonTokenEqualityComparer();
public bool Equals(double double1, double double2)
{
return double1 == double2;
}
public bool Equals(string string1, string string2)
{
return string1.Equals(string2);
}
public bool Equals(bool bool1, bool bool2)
{
return bool1 == bool2;
}
public bool Equals(JArray jArray1, JArray jArray2)
{
if (jArray1.Count != jArray2.Count)
{
return false;
}
IEnumerable<Tuple<JToken, JToken>> pairwiseElements = jArray1
.Zip(jArray2, (first, second) => new Tuple<JToken, JToken>(first, second));
bool deepEquals = true;
foreach (Tuple<JToken, JToken> pairwiseElement in pairwiseElements)
{
deepEquals &= this.Equals(pairwiseElement.Item1, pairwiseElement.Item2);
}
return deepEquals;
}
public bool Equals(JObject jObject1, JObject jObject2)
{
if (jObject1.Count != jObject2.Count)
{
return false;
}
bool deepEquals = true;
foreach (KeyValuePair<string, JToken> kvp in jObject1)
{
string name = kvp.Key;
JToken value1 = kvp.Value;
JToken value2;
if (jObject2.TryGetValue(name, out value2))
{
deepEquals &= this.Equals(value1, value2);
}
else
{
return false;
}
}
return deepEquals;
}
public bool Equals(JToken jToken1, JToken jToken2)
{
if (object.ReferenceEquals(jToken1, jToken2))
{
return true;
}
if (jToken1 == null || jToken2 == null)
{
return false;
}
JsonType type1 = JTokenTypeToJsonType(jToken1.Type);
JsonType type2 = JTokenTypeToJsonType(jToken2.Type);
// If the types don't match
if (type1 != type2)
{
return false;
}
switch (type1)
{
case JsonType.Object:
return this.Equals((JObject)jToken1, (JObject)jToken2);
case JsonType.Array:
return this.Equals((JArray)jToken1, (JArray)jToken2);
case JsonType.Number:
// NOTE: Some double values in the test document cannot be represented exactly as double. These values get some
// additional decimals at the end. So instead of comparing for equality, we need to find the diff and check
// if it is within the acceptable limit. One example of such an value is 00324008.
return Math.Abs((double)jToken1 - (double)jToken2) <= 1E-9;
case JsonType.String:
// TODO: Newtonsoft reader treats string representing datetime as type Date and doing a ToString returns
// a string that is not in the original format. In case of our binary reader we treat datetime as string
// and return the original string, so this comparison doesn't work for datetime. For now, we are skipping
// date type comparison. Will enable it after fixing this discrepancy
if (jToken1.Type == JTokenType.Date || jToken2.Type == JTokenType.Date)
{
return true;
}
return this.Equals(jToken1.ToString(), jToken2.ToString());
case JsonType.Boolean:
return this.Equals((bool)jToken1, (bool)jToken2);
case JsonType.Null:
return true;
default:
throw new ArgumentException();
}
}
public int GetHashCode(JToken obj)
{
return 0;
}
private enum JsonType
{
Number,
String,
Null,
Array,
Object,
Boolean
}
private static JsonType JTokenTypeToJsonType(JTokenType type)
{
switch (type)
{
case JTokenType.Object:
return JsonType.Object;
case JTokenType.Array:
return JsonType.Array;
case JTokenType.Integer:
case JTokenType.Float:
return JsonType.Number;
case JTokenType.Guid:
case JTokenType.Uri:
case JTokenType.TimeSpan:
case JTokenType.Date:
case JTokenType.String:
return JsonType.String;
case JTokenType.Boolean:
return JsonType.Boolean;
case JTokenType.Null:
return JsonType.Null;
case JTokenType.None:
case JTokenType.Undefined:
case JTokenType.Constructor:
case JTokenType.Property:
case JTokenType.Comment:
case JTokenType.Raw:
case JTokenType.Bytes:
default:
throw new ArgumentException();
}
}
}
}
//-----------------------------------------------------------------------
// <copyright file="CrossPartitionQueryTests.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Json;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
/// <summary>
/// Tests for CrossPartitionQueryTests.
/// </summary>
[TestClass]
[TestCategory("Quarantine") /* Used to filter out quarantined tests in gated runs */]
public class BinaryEncodingOverTheWireTests
{
private static readonly string[] NoDocuments = new string[] { };
private static readonly CosmosClient GatewayClient = new CosmosClient(
ConfigurationManager.AppSettings["GatewayEndpoint"],
ConfigurationManager.AppSettings["MasterKey"],
new CosmosClientOptions() { ConnectionMode = ConnectionMode.Gateway });
private static readonly CosmosClient DirectHttpsClient = new CosmosClient(
ConfigurationManager.AppSettings["GatewayEndpoint"],
ConfigurationManager.AppSettings["MasterKey"],
new CosmosClientOptions() { ConnectionMode = ConnectionMode.Direct, ConnectionProtocol = Documents.Client.Protocol.Https });
private static readonly CosmosClient RntbdClient = new CosmosClient(
ConfigurationManager.AppSettings["GatewayEndpoint"],
ConfigurationManager.AppSettings["MasterKey"],
new CosmosClientOptions() { ConnectionMode = ConnectionMode.Direct, ConnectionProtocol = Documents.Client.Protocol.Tcp });
private static readonly CosmosClient[] Clients = new CosmosClient[] { GatewayClient, DirectHttpsClient, RntbdClient };
private static readonly CosmosClient Client = RntbdClient;
private static readonly AsyncLazy<Database> Database = new AsyncLazy<Database>(async () =>
{
return await Client.CreateDatabaseAsync(Guid.NewGuid().ToString());
});
private static async Task<Container> CreateContainerAsync()
{
return (await Database.Value).CreateContainerAsync(
Guid.NewGuid().ToString() + "collection",
"/id",
10000).Result;
}
private static async Task<Tuple<Container, List<JToken>>> CreateCollectionAndIngestDocuments(IEnumerable<string> documents)
{
Container container = await BinaryEncodingOverTheWireTests.CreateContainerAsync();
List<JToken> insertedDocuments = new List<JToken>();
Random rand = new Random(1234);
foreach (string serializedItem in documents.OrderBy(x => rand.Next()).Take(100))
{
JToken item = JToken.Parse(serializedItem);
item["id"] = Guid.NewGuid().ToString();
JToken createdItem = await container.CreateItemAsync<JToken>(item, new PartitionKey(item["id"].ToString()));
insertedDocuments.Add(createdItem);
}
return new Tuple<Container, List<JToken>>(container, insertedDocuments);
}
internal delegate Task Query(CosmosClient cosmosClient, Container container, List<JToken> items);
/// <summary>
/// Task that wraps boiler plate code for query tests (collection create -> ingest documents -> query documents -> delete collections).
/// Note that this function will take the cross product connectionModes and collectionTypes.
/// </summary>
/// <param name="connectionModes">The connection modes to use.</param>
/// <param name="collectionTypes">The type of collections to create.</param>
/// <param name="documents">The documents to ingest</param>
/// <param name="query">
/// The callback for the queries.
/// All the standard arguments will be passed in.
/// Please make sure that this function is idempotent, since a collection will be reused for each connection mode.
/// </param>
/// <param name="partitionKey">The partition key for the partition collection.</param>
/// <param name="testArgs">The optional args that you want passed in to the query.</param>
/// <returns>A task to await on.</returns>
private static async Task CreateIngestQueryDelete(
IEnumerable<string> documents,
Query query)
{
Tuple<Container, List<JToken>> collectionAndDocuments = await BinaryEncodingOverTheWireTests.CreateCollectionAndIngestDocuments(documents);
List<Task> queryTasks = new List<Task>();
foreach (CosmosClient cosmosClient in BinaryEncodingOverTheWireTests.Clients)
{
queryTasks.Add(query(cosmosClient, collectionAndDocuments.Item1, collectionAndDocuments.Item2));
}
await Task.WhenAll(queryTasks);
await collectionAndDocuments.Item1.DeleteContainerAsync();
}
private static async Task NoOp()
{
await Task.Delay(0);
}
[TestMethod]
public void CheckThatAllTestsAreRunning()
{
// In general I don't want any of these tests being ignored or quarentined.
// Please work with me if it needs to be.
// I do not want these tests turned off for being "flaky", since they have been
// very stable and if they fail it's because something lower level is probably going wrong.
Assert.AreEqual(0, typeof(BinaryEncodingOverTheWireTests)
.GetMethods()
.Where(method => method.GetCustomAttributes(typeof(TestMethodAttribute), true).Length != 0)
.Where(method => method.GetCustomAttributes(typeof(TestCategoryAttribute), true).Length != 0)
.Count(), $"One the {nameof(BinaryEncodingOverTheWireTests)} is not being run.");
}
[TestMethod]
public async Task CombinedScriptsDataTest()
{
await this.TestCurratedDocs("CombinedScriptsData.json");
}
// For now we are skipping this test since the documents are too large to ingest and we get a rate size too large (HTTP 413).
#if TEST_COUNTRY
[TestMethod]
public async Task CountriesTest()
{
await this.TestCurratedDocs("countries");
}
#endif
[TestMethod]
public async Task DevTestCollTest()
{
await this.TestCurratedDocs("devtestcoll.json");
}
[TestMethod]
public async Task LastFMTest()
{
await this.TestCurratedDocs("lastfm");
}
[TestMethod]
public async Task LogDataTest()
{
await this.TestCurratedDocs("LogData.json");
}
[TestMethod]
public async Task MillionSong1KDocumentsTest()
{
await this.TestCurratedDocs("MillionSong1KDocuments.json");
}
[TestMethod]
public async Task MsnCollectionTest()
{
await this.TestCurratedDocs("MsnCollection.json");
}
[TestMethod]
public async Task NutritionDataTest()
{
await this.TestCurratedDocs("NutritionData");
}
[TestMethod]
public async Task RunsCollectionTest()
{
await this.TestCurratedDocs("runsCollection");
}
[TestMethod]
public async Task StatesCommitteesTest()
{
await this.TestCurratedDocs("states_committees.json");
}
[TestMethod]
public async Task StatesLegislatorsTest()
{
await this.TestCurratedDocs("states_legislators");
}
[TestMethod]
public async Task Store01Test()
{
await this.TestCurratedDocs("store01C.json");
}
[TestMethod]
public async Task TicinoErrorBucketsTest()
{
await this.TestCurratedDocs("TicinoErrorBuckets");
}
[TestMethod]
public async Task TwitterDataTest()
{
await this.TestCurratedDocs("twitter_data");
}
[TestMethod]
public async Task Ups1Test()
{
await this.TestCurratedDocs("ups1");
}
[TestMethod]
public async Task XpertEventsTest()
{
await this.TestCurratedDocs("XpertEvents");
}
private async Task TestCurratedDocs(string path)
{
IEnumerable<object> documents = BinaryEncodingOverTheWireTests.GetDocumentsFromCurratedDoc(path);
await BinaryEncodingOverTheWireTests.CreateIngestQueryDelete(
documents.Select(x => x.ToString()),
this.TestCurratedDocs);
}
private async Task TestCurratedDocs(CosmosClient cosmosClient, Container container, List<JToken> items)
{
HashSet<JToken> inputItems = new HashSet<JToken>(items, JsonTokenEqualityComparer.Value);
async Task AssertQueryDrainsCorrectlyAsync(FeedIterator<JToken> feedIterator)
{
while (feedIterator.HasMoreResults)
{
FeedResponse<JToken> feedResponse = await feedIterator.ReadNextAsync();
foreach (JToken item in feedResponse)
{
Assert.IsTrue(inputItems.Contains(item), "Documents differ from input documents");
}
}
}
FeedIterator<JToken> textFeedIterator = container.GetItemQueryIterator<JToken>(
queryDefinition: new QueryDefinition("SELECT * FROM c ORDER BY c._ts"),
requestOptions: new QueryRequestOptions()
{
CosmosSerializationFormatOptions = new CosmosSerializationFormatOptions(
"JsonText",
(content) => JsonNavigator.Create(content),
() => Cosmos.Json.JsonWriter.Create(JsonSerializationFormat.Text)),
});
await AssertQueryDrainsCorrectlyAsync(textFeedIterator);
FeedIterator<JToken> binaryFeedIterator = container.GetItemQueryIterator<JToken>(
queryDefinition: new QueryDefinition("SELECT * FROM c ORDER BY c._ts"),
requestOptions: new QueryRequestOptions()
{
CosmosSerializationFormatOptions = new CosmosSerializationFormatOptions(
"CosmosBinary",
(content) => JsonNavigator.Create(content),
() => Cosmos.Json.JsonWriter.Create(JsonSerializationFormat.Text)),
});
await AssertQueryDrainsCorrectlyAsync(binaryFeedIterator);
}
private static IEnumerable<object> GetDocumentsFromCurratedDoc(string path)
{
path = string.Format("TestJsons/{0}", path);
string json = TextFileConcatenation.ReadMultipartFile(path);
List<object> documents;
try
{
documents = JsonConvert.DeserializeObject<List<object>>(json);
}
catch (JsonSerializationException)
{
documents = new List<object>
{
JsonConvert.DeserializeObject<object>(json)
};
}
return documents;
}
public sealed class AsyncLazy<T> : Lazy<Task<T>>
{
public AsyncLazy(Func<T> valueFactory) :
base(() => Task.Factory.StartNew(valueFactory))
{ }
public AsyncLazy(Func<Task<T>> taskFactory) :
base(() => Task.Factory.StartNew(() => taskFactory()).Unwrap())
{ }
}
public sealed class JsonTokenEqualityComparer : IEqualityComparer<JToken>
{
public static JsonTokenEqualityComparer Value = new JsonTokenEqualityComparer();
public bool Equals(double double1, double double2)
{
return double1 == double2;
}
public bool Equals(string string1, string string2)
{
return string1.Equals(string2);
}
public bool Equals(bool bool1, bool bool2)
{
return bool1 == bool2;
}
public bool Equals(JArray jArray1, JArray jArray2)
{
if (jArray1.Count != jArray2.Count)
{
return false;
}
IEnumerable<Tuple<JToken, JToken>> pairwiseElements = jArray1
.Zip(jArray2, (first, second) => new Tuple<JToken, JToken>(first, second));
bool deepEquals = true;
foreach (Tuple<JToken, JToken> pairwiseElement in pairwiseElements)
{
deepEquals &= this.Equals(pairwiseElement.Item1, pairwiseElement.Item2);
}
return deepEquals;
}
public bool Equals(JObject jObject1, JObject jObject2)
{
if (jObject1.Count != jObject2.Count)
{
return false;
}
bool deepEquals = true;
foreach (KeyValuePair<string, JToken> kvp in jObject1)
{
string name = kvp.Key;
JToken value1 = kvp.Value;
JToken value2;
if (jObject2.TryGetValue(name, out value2))
{
deepEquals &= this.Equals(value1, value2);
}
else
{
return false;
}
}
return deepEquals;
}
public bool Equals(JToken jToken1, JToken jToken2)
{
if (object.ReferenceEquals(jToken1, jToken2))
{
return true;
}
if (jToken1 == null || jToken2 == null)
{
return false;
}
JsonType type1 = JTokenTypeToJsonType(jToken1.Type);
JsonType type2 = JTokenTypeToJsonType(jToken2.Type);
// If the types don't match
if (type1 != type2)
{
return false;
}
switch (type1)
{
case JsonType.Object:
return this.Equals((JObject)jToken1, (JObject)jToken2);
case JsonType.Array:
return this.Equals((JArray)jToken1, (JArray)jToken2);
case JsonType.Number:
// NOTE: Some double values in the test document cannot be represented exactly as double. These values get some
// additional decimals at the end. So instead of comparing for equality, we need to find the diff and check
// if it is within the acceptable limit. One example of such an value is 00324008.
return Math.Abs((double)jToken1 - (double)jToken2) <= 1E-9;
case JsonType.String:
// TODO: Newtonsoft reader treats string representing datetime as type Date and doing a ToString returns
// a string that is not in the original format. In case of our binary reader we treat datetime as string
// and return the original string, so this comparison doesn't work for datetime. For now, we are skipping
// date type comparison. Will enable it after fixing this discrepancy
if (jToken1.Type == JTokenType.Date || jToken2.Type == JTokenType.Date)
{
return true;
}
return this.Equals(jToken1.ToString(), jToken2.ToString());
case JsonType.Boolean:
return this.Equals((bool)jToken1, (bool)jToken2);
case JsonType.Null:
return true;
default:
throw new ArgumentException();
}
}
public int GetHashCode(JToken obj)
{
return 0;
}
private enum JsonType
{
Number,
String,
Null,
Array,
Object,
Boolean
}
private static JsonType JTokenTypeToJsonType(JTokenType type)
{
switch (type)
{
case JTokenType.Object:
return JsonType.Object;
case JTokenType.Array:
return JsonType.Array;
case JTokenType.Integer:
case JTokenType.Float:
return JsonType.Number;
case JTokenType.Guid:
case JTokenType.Uri:
case JTokenType.TimeSpan:
case JTokenType.Date:
case JTokenType.String:
return JsonType.String;
case JTokenType.Boolean:
return JsonType.Boolean;
case JTokenType.Null:
return JsonType.Null;
case JTokenType.None:
case JTokenType.Undefined:
case JTokenType.Constructor:
case JTokenType.Property:
case JTokenType.Comment:
case JTokenType.Raw:
case JTokenType.Bytes:
default:
throw new ArgumentException();
}
}
}
}
}

Просмотреть файл

@ -20,12 +20,12 @@ namespace Microsoft.Azure.Cosmos.Tests
/// </summary>
[TestClass]
public class ClientTelemetryTests
{
{
[TestCleanup]
public void Cleanup()
{
{
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, null);
}
}
[TestMethod]
public async Task ParseAzureVMMetadataTest()
@ -121,13 +121,13 @@ namespace Microsoft.Azure.Cosmos.Tests
};
string json = JsonConvert.SerializeObject(new ClientTelemetryProperties("clientId", "", null, ConnectionMode.Direct, preferredRegion, 1), ClientTelemetryOptions.JsonSerializerSettings);
Assert.AreEqual("{\"clientId\":\"clientId\",\"processId\":\"\",\"connectionMode\":\"DIRECT\",\"preferredRegions\":[\"region1\"],\"aggregationIntervalInSec\":1,\"systemInfo\":[]}", json);
}
[TestMethod]
}
[TestMethod]
[ExpectedException(typeof(System.FormatException))]
public void CheckMisconfiguredTelemetry_should_fail()
{
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, "non-boolean");
{
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, "non-boolean");
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
}

Просмотреть файл

@ -1,464 +1,464 @@
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Handlers;
using Microsoft.Azure.Cosmos.Scripts;
using Microsoft.Azure.Cosmos.Telemetry;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Newtonsoft.Json;
[TestClass]
public class HandlerTests
{
[TestMethod]
public void HandlerOrder()
{
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
Type[] types = new Type[]
{
typeof(RequestInvokerHandler),
typeof(DiagnosticsHandler),
typeof(RetryHandler),
typeof(RouterHandler)
};
RequestHandler handler = client.RequestHandler;
foreach (Type type in types)
{
Assert.IsTrue(type.Equals(handler.GetType()));
handler = handler.InnerHandler;
}
Assert.IsNull(handler);
}
[TestMethod]
public void HandlerOrderIfTelemetryIsEnabled()
{
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, "true");
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
Type[] types = new Type[]
{
typeof(RequestInvokerHandler),
typeof(DiagnosticsHandler),
typeof(TelemetryHandler),
typeof(RetryHandler),
typeof(RouterHandler)
};
RequestHandler handler = client.RequestHandler;
foreach (Type type in types)
{
Assert.IsTrue(type.Equals(handler.GetType()));
handler = handler.InnerHandler;
}
Assert.IsNull(handler);
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, null);
}
[TestMethod]
public async Task TestPreProcessingHandler()
{
RequestHandler preProcessHandler = new PreProcessingTestHandler();
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient((builder) => builder.AddCustomHandlers(preProcessHandler));
Assert.IsTrue(typeof(RequestInvokerHandler).Equals(client.RequestHandler.GetType()));
Assert.IsTrue(typeof(PreProcessingTestHandler).Equals(client.RequestHandler.InnerHandler.GetType()));
Container container = client.GetDatabase("testdb")
.GetContainer("testcontainer");
HttpStatusCode[] testHttpStatusCodes = new HttpStatusCode[]
{
HttpStatusCode.OK
};
// User operations
foreach (HttpStatusCode code in testHttpStatusCodes)
{
ItemRequestOptions options = new ItemRequestOptions
{
Properties = new Dictionary<string, object>()
{
{ PreProcessingTestHandler.StatusCodeName, code },
}
};
ItemResponse<object> response = await container.ReadItemAsync<object>("id1", new Cosmos.PartitionKey("pk1"), options);
Console.WriteLine($"Got status code {response.StatusCode}");
Assert.AreEqual(code, response.StatusCode);
}
// Meta-data operations
foreach (HttpStatusCode code in testHttpStatusCodes)
{
ContainerRequestOptions options = new ContainerRequestOptions
{
Properties = new Dictionary<string, object>()
{
{ PreProcessingTestHandler.StatusCodeName, code }
}
};
ContainerResponse response = await container.DeleteContainerAsync(options);
Console.WriteLine($"Got status code {response.StatusCode}");
Assert.AreEqual(code, response.StatusCode);
}
}
[TestMethod]
public async Task RequestOptionsHandlerCanHandleRequestOptions()
{
const string PropertyKey = "propkey";
const string Condition = "*";
object propertyValue = Encoding.UTF8.GetBytes("test");
RequestOptions options = new ItemRequestOptions
{
Properties = new Dictionary<string, object>(new List<KeyValuePair<string, object>> {
new KeyValuePair<string, object>(PropertyKey, propertyValue)
}),
IfMatchEtag = Condition,
};
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(propertyValue, request.Properties[PropertyKey]);
Assert.AreEqual(Condition, request.Headers.GetValues(HttpConstants.HttpHeaders.IfMatch).First());
return TestHandler.ReturnSuccess();
});
CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = options;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task RequestOptionsConsistencyLevel()
{
List<Cosmos.ConsistencyLevel> cosmosLevels = Enum.GetValues(typeof(Cosmos.ConsistencyLevel)).Cast<Cosmos.ConsistencyLevel>().ToList();
List<Documents.ConsistencyLevel> documentLevels = Enum.GetValues(typeof(Documents.ConsistencyLevel)).Cast<Documents.ConsistencyLevel>().ToList();
CollectionAssert.AreEqual(cosmosLevels, documentLevels, new EnumComparer(), "Document consistency level is different from cosmos consistency level");
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient(accountConsistencyLevel: Cosmos.ConsistencyLevel.Strong);
foreach (Cosmos.ConsistencyLevel level in cosmosLevels)
{
List<RequestOptions> requestOptions = new List<RequestOptions>
{
new ItemRequestOptions
{
ConsistencyLevel = level
},
new QueryRequestOptions
{
ConsistencyLevel = level
},
new StoredProcedureRequestOptions
{
ConsistencyLevel = level
}
};
foreach (RequestOptions option in requestOptions)
{
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(level.ToString(), request.Headers[HttpConstants.HttpHeaders.ConsistencyLevel]);
return TestHandler.ReturnSuccess();
});
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"))
{
ResourceType = ResourceType.Document
};
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = option;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
}
}
[TestMethod]
public async Task QueryRequestOptionsDedicatedGatewayRequestOptions()
{
TimeSpan maxStaleness = TimeSpan.FromMinutes(5);
DedicatedGatewayRequestOptions dedicatedGatewayRequestOptions = new DedicatedGatewayRequestOptions
{
MaxIntegratedCacheStaleness = maxStaleness
};
List<RequestOptions> requestOptions = new List<RequestOptions>
{
new ItemRequestOptions
{
DedicatedGatewayRequestOptions = dedicatedGatewayRequestOptions
},
new QueryRequestOptions
{
DedicatedGatewayRequestOptions = dedicatedGatewayRequestOptions
},
};
foreach (RequestOptions option in requestOptions)
{
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(maxStaleness.TotalMilliseconds.ToString(CultureInfo.InvariantCulture), request.Headers[HttpConstants.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness]);
return TestHandler.ReturnSuccess();
});
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = option;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
}
[TestMethod]
public async Task QueryRequestOptionsSessionToken()
{
const string SessionToken = "SessionToken";
ItemRequestOptions options = new ItemRequestOptions
{
SessionToken = SessionToken
};
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(SessionToken, request.Headers.GetValues(HttpConstants.HttpHeaders.SessionToken).First());
return TestHandler.ReturnSuccess();
});
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = options;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task ConsistencyLevelClient()
{
List<Cosmos.ConsistencyLevel> cosmosLevels = Enum.GetValues(typeof(Cosmos.ConsistencyLevel)).Cast<Cosmos.ConsistencyLevel>().ToList();
foreach (Cosmos.ConsistencyLevel clientLevel in cosmosLevels)
{
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient(
accountConsistencyLevel: Cosmos.ConsistencyLevel.Strong,
customizeClientBuilder: builder => builder.WithConsistencyLevel(clientLevel));
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(clientLevel.ToString(), request.Headers[HttpConstants.HttpHeaders.ConsistencyLevel]);
return TestHandler.ReturnSuccess();
});
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: client.ClientOptions.ConsistencyLevel)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"))
{
ResourceType = ResourceType.Document
};
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.OperationType = OperationType.Read;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
}
[TestMethod]
public async Task ConsistencyLevelClientAndRequestOption()
{
Cosmos.ConsistencyLevel requestOptionLevel = Cosmos.ConsistencyLevel.BoundedStaleness;
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient(
accountConsistencyLevel: Cosmos.ConsistencyLevel.Strong,
customizeClientBuilder: builder => builder.WithConsistencyLevel(Cosmos.ConsistencyLevel.Eventual));
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(requestOptionLevel.ToString(), request.Headers[HttpConstants.HttpHeaders.ConsistencyLevel]);
return TestHandler.ReturnSuccess();
});
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"))
{
ResourceType = ResourceType.Document
};
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = new ItemRequestOptions() { ConsistencyLevel = requestOptionLevel };
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task RequestOptionsHandlerCanHandleDataPlaneRequestOptions()
{
const string Condition = "*";
const string SessionToken = "test";
ItemRequestOptions options = new ItemRequestOptions
{
IfNoneMatchEtag = Condition,
ConsistencyLevel = (Cosmos.ConsistencyLevel)ConsistencyLevel.Eventual,
SessionToken = SessionToken
};
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(Condition, request.Headers.GetValues(HttpConstants.HttpHeaders.IfNoneMatch).First());
Assert.AreEqual(ConsistencyLevel.Eventual.ToString(), request.Headers.GetValues(HttpConstants.HttpHeaders.ConsistencyLevel).First());
Assert.AreEqual(SessionToken, request.Headers.GetValues(HttpConstants.HttpHeaders.SessionToken).First());
return TestHandler.ReturnSuccess();
});
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = options;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task Test()
{
SomePayload t = new SomePayload()
{
V1 = "Value1",
V2 = "Value2",
};
JsonSerializer js = new JsonSerializer();
using (MemoryStream ms = new MemoryStream())
{
StreamWriter sw = new StreamWriter(ms);
JsonTextWriter tw = new JsonTextWriter(sw);
js.Serialize(tw, t);
ms.Seek(0, SeekOrigin.Begin);
HttpMethod method = HttpMethod.Get;
string ep = "https://httpbin.org/put";
HttpRequestMessage hrm = new HttpRequestMessage(method, ep)
{
Content = new StreamContent(ms)
};
for (int i = 0; i < 5; i++)
{
using (MemoryStream msCopy = new MemoryStream())
{
await hrm.Content.CopyToAsync(msCopy);
}
}
}
}
[TestMethod]
public void TestAggregateExceptionConverter()
{
string errorMessage = "BadRequest message";
IEnumerable<Exception> exceptions = new List<Exception>()
{
new DocumentClientException(errorMessage, innerException: null, statusCode: HttpStatusCode.BadRequest)
};
AggregateException ae = new AggregateException(message: "Test AE message", innerExceptions: exceptions);
ResponseMessage response = TransportHandler.AggregateExceptionConverter(ae, null);
Assert.IsNotNull(response);
Assert.AreEqual(HttpStatusCode.BadRequest, response.StatusCode);
Assert.IsTrue(response.ErrorMessage.Contains(errorMessage));
}
private class SomePayload
{
public string V1 { get; set; }
public string V2 { get; set; }
}
private class EnumComparer : IComparer
{
public int Compare(object x, object y)
{
if ((int)x == (int)y &&
string.Equals(x.ToString(), y.ToString()))
{
return 0;
}
return 1;
}
}
}
}
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace Microsoft.Azure.Cosmos.Tests
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Handlers;
using Microsoft.Azure.Cosmos.Scripts;
using Microsoft.Azure.Cosmos.Telemetry;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Newtonsoft.Json;
[TestClass]
public class HandlerTests
{
[TestMethod]
public void HandlerOrder()
{
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
Type[] types = new Type[]
{
typeof(RequestInvokerHandler),
typeof(DiagnosticsHandler),
typeof(RetryHandler),
typeof(RouterHandler)
};
RequestHandler handler = client.RequestHandler;
foreach (Type type in types)
{
Assert.IsTrue(type.Equals(handler.GetType()));
handler = handler.InnerHandler;
}
Assert.IsNull(handler);
}
[TestMethod]
public void HandlerOrderIfTelemetryIsEnabled()
{
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, "true");
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
Type[] types = new Type[]
{
typeof(RequestInvokerHandler),
typeof(DiagnosticsHandler),
typeof(TelemetryHandler),
typeof(RetryHandler),
typeof(RouterHandler)
};
RequestHandler handler = client.RequestHandler;
foreach (Type type in types)
{
Assert.IsTrue(type.Equals(handler.GetType()));
handler = handler.InnerHandler;
}
Assert.IsNull(handler);
Environment.SetEnvironmentVariable(ClientTelemetryOptions.EnvPropsClientTelemetryEnabled, null);
}
[TestMethod]
public async Task TestPreProcessingHandler()
{
RequestHandler preProcessHandler = new PreProcessingTestHandler();
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient((builder) => builder.AddCustomHandlers(preProcessHandler));
Assert.IsTrue(typeof(RequestInvokerHandler).Equals(client.RequestHandler.GetType()));
Assert.IsTrue(typeof(PreProcessingTestHandler).Equals(client.RequestHandler.InnerHandler.GetType()));
Container container = client.GetDatabase("testdb")
.GetContainer("testcontainer");
HttpStatusCode[] testHttpStatusCodes = new HttpStatusCode[]
{
HttpStatusCode.OK
};
// User operations
foreach (HttpStatusCode code in testHttpStatusCodes)
{
ItemRequestOptions options = new ItemRequestOptions
{
Properties = new Dictionary<string, object>()
{
{ PreProcessingTestHandler.StatusCodeName, code },
}
};
ItemResponse<object> response = await container.ReadItemAsync<object>("id1", new Cosmos.PartitionKey("pk1"), options);
Console.WriteLine($"Got status code {response.StatusCode}");
Assert.AreEqual(code, response.StatusCode);
}
// Meta-data operations
foreach (HttpStatusCode code in testHttpStatusCodes)
{
ContainerRequestOptions options = new ContainerRequestOptions
{
Properties = new Dictionary<string, object>()
{
{ PreProcessingTestHandler.StatusCodeName, code }
}
};
ContainerResponse response = await container.DeleteContainerAsync(options);
Console.WriteLine($"Got status code {response.StatusCode}");
Assert.AreEqual(code, response.StatusCode);
}
}
[TestMethod]
public async Task RequestOptionsHandlerCanHandleRequestOptions()
{
const string PropertyKey = "propkey";
const string Condition = "*";
object propertyValue = Encoding.UTF8.GetBytes("test");
RequestOptions options = new ItemRequestOptions
{
Properties = new Dictionary<string, object>(new List<KeyValuePair<string, object>> {
new KeyValuePair<string, object>(PropertyKey, propertyValue)
}),
IfMatchEtag = Condition,
};
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(propertyValue, request.Properties[PropertyKey]);
Assert.AreEqual(Condition, request.Headers.GetValues(HttpConstants.HttpHeaders.IfMatch).First());
return TestHandler.ReturnSuccess();
});
CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = options;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task RequestOptionsConsistencyLevel()
{
List<Cosmos.ConsistencyLevel> cosmosLevels = Enum.GetValues(typeof(Cosmos.ConsistencyLevel)).Cast<Cosmos.ConsistencyLevel>().ToList();
List<Documents.ConsistencyLevel> documentLevels = Enum.GetValues(typeof(Documents.ConsistencyLevel)).Cast<Documents.ConsistencyLevel>().ToList();
CollectionAssert.AreEqual(cosmosLevels, documentLevels, new EnumComparer(), "Document consistency level is different from cosmos consistency level");
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient(accountConsistencyLevel: Cosmos.ConsistencyLevel.Strong);
foreach (Cosmos.ConsistencyLevel level in cosmosLevels)
{
List<RequestOptions> requestOptions = new List<RequestOptions>
{
new ItemRequestOptions
{
ConsistencyLevel = level
},
new QueryRequestOptions
{
ConsistencyLevel = level
},
new StoredProcedureRequestOptions
{
ConsistencyLevel = level
}
};
foreach (RequestOptions option in requestOptions)
{
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(level.ToString(), request.Headers[HttpConstants.HttpHeaders.ConsistencyLevel]);
return TestHandler.ReturnSuccess();
});
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"))
{
ResourceType = ResourceType.Document
};
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = option;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
}
}
[TestMethod]
public async Task QueryRequestOptionsDedicatedGatewayRequestOptions()
{
TimeSpan maxStaleness = TimeSpan.FromMinutes(5);
DedicatedGatewayRequestOptions dedicatedGatewayRequestOptions = new DedicatedGatewayRequestOptions
{
MaxIntegratedCacheStaleness = maxStaleness
};
List<RequestOptions> requestOptions = new List<RequestOptions>
{
new ItemRequestOptions
{
DedicatedGatewayRequestOptions = dedicatedGatewayRequestOptions
},
new QueryRequestOptions
{
DedicatedGatewayRequestOptions = dedicatedGatewayRequestOptions
},
};
foreach (RequestOptions option in requestOptions)
{
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(maxStaleness.TotalMilliseconds.ToString(CultureInfo.InvariantCulture), request.Headers[HttpConstants.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness]);
return TestHandler.ReturnSuccess();
});
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = option;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
}
[TestMethod]
public async Task QueryRequestOptionsSessionToken()
{
const string SessionToken = "SessionToken";
ItemRequestOptions options = new ItemRequestOptions
{
SessionToken = SessionToken
};
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(SessionToken, request.Headers.GetValues(HttpConstants.HttpHeaders.SessionToken).First());
return TestHandler.ReturnSuccess();
});
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = options;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task ConsistencyLevelClient()
{
List<Cosmos.ConsistencyLevel> cosmosLevels = Enum.GetValues(typeof(Cosmos.ConsistencyLevel)).Cast<Cosmos.ConsistencyLevel>().ToList();
foreach (Cosmos.ConsistencyLevel clientLevel in cosmosLevels)
{
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient(
accountConsistencyLevel: Cosmos.ConsistencyLevel.Strong,
customizeClientBuilder: builder => builder.WithConsistencyLevel(clientLevel));
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(clientLevel.ToString(), request.Headers[HttpConstants.HttpHeaders.ConsistencyLevel]);
return TestHandler.ReturnSuccess();
});
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: client.ClientOptions.ConsistencyLevel)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"))
{
ResourceType = ResourceType.Document
};
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.OperationType = OperationType.Read;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
}
[TestMethod]
public async Task ConsistencyLevelClientAndRequestOption()
{
Cosmos.ConsistencyLevel requestOptionLevel = Cosmos.ConsistencyLevel.BoundedStaleness;
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient(
accountConsistencyLevel: Cosmos.ConsistencyLevel.Strong,
customizeClientBuilder: builder => builder.WithConsistencyLevel(Cosmos.ConsistencyLevel.Eventual));
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(requestOptionLevel.ToString(), request.Headers[HttpConstants.HttpHeaders.ConsistencyLevel]);
return TestHandler.ReturnSuccess();
});
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"))
{
ResourceType = ResourceType.Document
};
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = new ItemRequestOptions() { ConsistencyLevel = requestOptionLevel };
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task RequestOptionsHandlerCanHandleDataPlaneRequestOptions()
{
const string Condition = "*";
const string SessionToken = "test";
ItemRequestOptions options = new ItemRequestOptions
{
IfNoneMatchEtag = Condition,
ConsistencyLevel = (Cosmos.ConsistencyLevel)ConsistencyLevel.Eventual,
SessionToken = SessionToken
};
TestHandler testHandler = new TestHandler((request, cancellationToken) =>
{
Assert.AreEqual(Condition, request.Headers.GetValues(HttpConstants.HttpHeaders.IfNoneMatch).First());
Assert.AreEqual(ConsistencyLevel.Eventual.ToString(), request.Headers.GetValues(HttpConstants.HttpHeaders.ConsistencyLevel).First());
Assert.AreEqual(SessionToken, request.Headers.GetValues(HttpConstants.HttpHeaders.SessionToken).First());
return TestHandler.ReturnSuccess();
});
using CosmosClient client = MockCosmosUtil.CreateMockCosmosClient();
RequestInvokerHandler invoker = new RequestInvokerHandler(client, requestedClientConsistencyLevel: null)
{
InnerHandler = testHandler
};
RequestMessage requestMessage = new RequestMessage(HttpMethod.Get, new System.Uri("https://dummy.documents.azure.com:443/dbs"));
requestMessage.Headers.Add(HttpConstants.HttpHeaders.PartitionKey, "[]");
requestMessage.ResourceType = ResourceType.Document;
requestMessage.OperationType = OperationType.Read;
requestMessage.RequestOptions = options;
await invoker.SendAsync(requestMessage, new CancellationToken());
}
[TestMethod]
public async Task Test()
{
SomePayload t = new SomePayload()
{
V1 = "Value1",
V2 = "Value2",
};
JsonSerializer js = new JsonSerializer();
using (MemoryStream ms = new MemoryStream())
{
StreamWriter sw = new StreamWriter(ms);
JsonTextWriter tw = new JsonTextWriter(sw);
js.Serialize(tw, t);
ms.Seek(0, SeekOrigin.Begin);
HttpMethod method = HttpMethod.Get;
string ep = "https://httpbin.org/put";
HttpRequestMessage hrm = new HttpRequestMessage(method, ep)
{
Content = new StreamContent(ms)
};
for (int i = 0; i < 5; i++)
{
using (MemoryStream msCopy = new MemoryStream())
{
await hrm.Content.CopyToAsync(msCopy);
}
}
}
}
[TestMethod]
public void TestAggregateExceptionConverter()
{
string errorMessage = "BadRequest message";
IEnumerable<Exception> exceptions = new List<Exception>()
{
new DocumentClientException(errorMessage, innerException: null, statusCode: HttpStatusCode.BadRequest)
};
AggregateException ae = new AggregateException(message: "Test AE message", innerExceptions: exceptions);
ResponseMessage response = TransportHandler.AggregateExceptionConverter(ae, null);
Assert.IsNotNull(response);
Assert.AreEqual(HttpStatusCode.BadRequest, response.StatusCode);
Assert.IsTrue(response.ErrorMessage.Contains(errorMessage));
}
private class SomePayload
{
public string V1 { get; set; }
public string V2 { get; set; }
}
private class EnumComparer : IComparer
{
public int Compare(object x, object y)
{
if ((int)x == (int)y &&
string.Equals(x.ToString(), y.ToString()))
{
return 0;
}
return 1;
}
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу