Merge branch 'master' into dev
This commit is contained in:
Коммит
3b5fc7939d
|
@ -37,3 +37,7 @@ coverage
|
||||||
|
|
||||||
# Editor configuration
|
# Editor configuration
|
||||||
.vscode/*
|
.vscode/*
|
||||||
|
|
||||||
|
# Tests artefacts
|
||||||
|
fileservice_test_*
|
||||||
|
blobservice_test.tmp
|
||||||
|
|
|
@ -1,12 +1,23 @@
|
||||||
|
Tracking Breaking Changes in 1.4.0
|
||||||
|
|
||||||
|
BLOB
|
||||||
|
* Changed `/S` of SpeedSummary to `/s`.
|
||||||
|
|
||||||
|
FILE
|
||||||
|
* Changed `/S` of SpeedSummary to `/s`.
|
||||||
|
|
||||||
Tracking Breaking Changes in 1.3.0
|
Tracking Breaking Changes in 1.3.0
|
||||||
|
|
||||||
QUEUE
|
QUEUE
|
||||||
* Updated the `QueueMessageResult.dequeueCount` from `string` to `number`.
|
* Updated the `QueueMessageResult.dequeueCount` from `string` to `number`.
|
||||||
|
|
||||||
Tracking Breaking Changes in 1.2.0
|
Tracking Breaking Changes in 1.2.0
|
||||||
|
|
||||||
TABLE
|
TABLE
|
||||||
* Beginning with version 2015-12-11, the Atom feed is no longer supported as a payload format for Table service operations. Version 2015-12-11 and later versions support only JSON for the payload format.
|
* Beginning with version 2015-12-11, the Atom feed is no longer supported as a payload format for Table service operations. Version 2015-12-11 and later versions support only JSON for the payload format.
|
||||||
|
|
||||||
Tracking Breaking Changes in 1.0.0
|
Tracking Breaking Changes in 1.0.0
|
||||||
|
|
||||||
BLOB
|
BLOB
|
||||||
* The `blob` property of BlobResult has been renamed to `name` to keep consistent with other services API and the `listBlob` API.
|
* The `blob` property of BlobResult has been renamed to `name` to keep consistent with other services API and the `listBlob` API.
|
||||||
* Decoded the block name of LockListResult from base64 string to utf-8 string.
|
* Decoded the block name of LockListResult from base64 string to utf-8 string.
|
||||||
|
@ -15,6 +26,7 @@ QUEUE
|
||||||
* The `encodeMessage` flag of QueueService has been replaced by `messageEncoder` which support `TextBase64QueueMessageEncoder`, `BinaryBase64QueueMessageEncoder`, `TextXmlQueueMessageEncoder` and custom implementation of QueueMessageEncoder.
|
* The `encodeMessage` flag of QueueService has been replaced by `messageEncoder` which support `TextBase64QueueMessageEncoder`, `BinaryBase64QueueMessageEncoder`, `TextXmlQueueMessageEncoder` and custom implementation of QueueMessageEncoder.
|
||||||
|
|
||||||
Tracking Breaking Changes in 0.10.0
|
Tracking Breaking Changes in 0.10.0
|
||||||
|
|
||||||
ALL
|
ALL
|
||||||
* The `signedIdentifiers` parameter and result properties have been changed from array to hash map to avoid non unique signed identifier id.
|
* The `signedIdentifiers` parameter and result properties have been changed from array to hash map to avoid non unique signed identifier id.
|
||||||
|
|
||||||
|
@ -43,6 +55,7 @@ TABLE
|
||||||
* Renamed TableUtilities.entityGenerator.Entity to EntityProperty.
|
* Renamed TableUtilities.entityGenerator.Entity to EntityProperty.
|
||||||
|
|
||||||
Tracking Breaking Changes in 0.7.0
|
Tracking Breaking Changes in 0.7.0
|
||||||
|
|
||||||
ALL
|
ALL
|
||||||
* The generateDevelopmentStorageCredendentials function in the azure-storage.js is renamed to generateDevelopmentStorageCredentials.
|
* The generateDevelopmentStorageCredendentials function in the azure-storage.js is renamed to generateDevelopmentStorageCredentials.
|
||||||
|
|
||||||
|
@ -60,6 +73,7 @@ FILE
|
||||||
* The property names returned from getting share stats are changed to camelCase.
|
* The property names returned from getting share stats are changed to camelCase.
|
||||||
|
|
||||||
Tracking Breaking Changes in 0.5.0
|
Tracking Breaking Changes in 0.5.0
|
||||||
|
|
||||||
ALL
|
ALL
|
||||||
* The suffix "_HEADER" is removed from all the http header constants.
|
* The suffix "_HEADER" is removed from all the http header constants.
|
||||||
* The generateSharedAccessSignatureWithVersion function in each service is deprecated.
|
* The generateSharedAccessSignatureWithVersion function in each service is deprecated.
|
||||||
|
|
24
ChangeLog.md
24
ChangeLog.md
|
@ -1,6 +1,30 @@
|
||||||
Note: This is an Azure Storage only package. The all up Azure node sdk still has the old storage bits in there. In a future release, those storage bits will be removed and an npm dependency to this storage node sdk will
|
Note: This is an Azure Storage only package. The all up Azure node sdk still has the old storage bits in there. In a future release, those storage bits will be removed and an npm dependency to this storage node sdk will
|
||||||
be taken. This is a GA release and the changes described below indicate the changes from the Azure node SDK 0.9.8 available here - https://github.com/Azure/azure-sdk-for-node.
|
be taken. This is a GA release and the changes described below indicate the changes from the Azure node SDK 0.9.8 available here - https://github.com/Azure/azure-sdk-for-node.
|
||||||
|
|
||||||
|
2016.11 Version 1.4.0
|
||||||
|
|
||||||
|
ALL
|
||||||
|
* Added `ENOTFOUND` for secondary endpoint and `ECONNREFUSED` to `RetryPolicyFilter`.
|
||||||
|
* Added support for `text/html` error response body.
|
||||||
|
|
||||||
|
BLOB
|
||||||
|
* Fixed the issue that the ChunkAllocator maxCount is aligned with parallelOperationThreadCount.
|
||||||
|
* Changed `/S` of SpeedSummary to `/s`.
|
||||||
|
* Fixed the issue that `BlobService.createBlockBlobFromText` will hang when passed `null` or `undefined` `text` argument.
|
||||||
|
* Fixed the issue that `BlobService.createBlockBlobFromText` will always set `content-type` to `text/plain`.
|
||||||
|
|
||||||
|
QUEUE
|
||||||
|
* Allowed `QueueService.peekMessages` against secondary endpoint.
|
||||||
|
|
||||||
|
FILE
|
||||||
|
* Fixed the issue that the ChunkAllocator maxCount is aligned with parallelOperationThreadCount.
|
||||||
|
* Changed `/S` of SpeedSummary to `/s`.
|
||||||
|
|
||||||
|
2016.10 Version 1.3.2
|
||||||
|
|
||||||
|
BLOB
|
||||||
|
* Prevent a blockId from being generated with a decimal point.
|
||||||
|
|
||||||
2016.09 Version 1.3.1
|
2016.09 Version 1.3.1
|
||||||
|
|
||||||
ALL
|
ALL
|
||||||
|
|
|
@ -431,16 +431,11 @@ By default the unit tests are ran with Nock recording data. To run tests against
|
||||||
set NOCK_OFF=true
|
set NOCK_OFF=true
|
||||||
```
|
```
|
||||||
|
|
||||||
and set up the following environment variables for storage account credentials by
|
and set up the following environment variable for storage account credentials by
|
||||||
|
|
||||||
```Batchfile
|
```Batchfile
|
||||||
set AZURE_STORAGE_CONNECTION_STRING="valid storage connection string"
|
set AZURE_STORAGE_CONNECTION_STRING="valid storage connection string"
|
||||||
```
|
```
|
||||||
or
|
|
||||||
```Batchfile
|
|
||||||
set AZURE_STORAGE_ACCOUNT="valid storage account name"
|
|
||||||
set AZURE_STORAGE_ACCESS_KEY="valid storage account key"
|
|
||||||
```
|
|
||||||
|
|
||||||
To record the data in a test pass against real storage account for future Nock usage:
|
To record the data in a test pass against real storage account for future Nock usage:
|
||||||
|
|
||||||
|
|
|
@ -14,71 +14,50 @@
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
//
|
//
|
||||||
|
|
||||||
var fs = require('fs');
|
|
||||||
if (!fs.existsSync) {
|
|
||||||
fs.existsSync = require('path').existsSync;
|
|
||||||
}
|
|
||||||
|
|
||||||
var azure;
|
|
||||||
if (fs.existsSync('absolute path to azure-storage.js')) {
|
|
||||||
azure = require('absolute path to azure-storage');
|
|
||||||
} else {
|
|
||||||
azure = require('azure-storage');
|
|
||||||
}
|
|
||||||
|
|
||||||
var express = require('express');
|
var express = require('express');
|
||||||
|
var expressLayouts = require('express-ejs-layouts');
|
||||||
|
var path = require('path');
|
||||||
|
var azure = require('azure-storage');
|
||||||
var formidable = require('formidable');
|
var formidable = require('formidable');
|
||||||
var helpers = require('./helpers.js');
|
var helpers = require('./helpers.js');
|
||||||
|
|
||||||
var app = module.exports = express.createServer();
|
var app = express();
|
||||||
|
|
||||||
// Global request options, set the retryPolicy
|
// Global request options, set the retryPolicy
|
||||||
var blobClient = azure.createBlobService('UseDevelopmentStorage=true').withFilter(new azure.ExponentialRetryPolicyFilter());
|
var blobClient = azure.createBlobService('UseDevelopmentStorage=true').withFilter(new azure.ExponentialRetryPolicyFilter());
|
||||||
var containerName = 'webpi';
|
var containerName = 'webpi';
|
||||||
|
|
||||||
//Configuration
|
//Configuration
|
||||||
app.configure(function () {
|
app.set('views', path.join(__dirname + '/views'));
|
||||||
app.set('views', __dirname + '/views');
|
app.set('view engine', 'ejs');
|
||||||
app.set('view engine', 'ejs');
|
app.set('layout', 'layout');
|
||||||
app.use(express.methodOverride());
|
app.use(express.static(path.join(__dirname + '/public')));
|
||||||
// app.use(express.logger());
|
app.use(expressLayouts);
|
||||||
app.use(app.router);
|
|
||||||
app.use(express.static(__dirname + '/public'));
|
|
||||||
});
|
|
||||||
|
|
||||||
app.configure('development', function () {
|
app.set('development', function(){
|
||||||
app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
|
app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
|
||||||
});
|
});
|
||||||
|
|
||||||
app.configure('production', function () {
|
app.set('production', function(){
|
||||||
app.use(express.errorHandler());
|
app.use(express.errorHandler());
|
||||||
});
|
});
|
||||||
|
|
||||||
app.param(':id', function (req, res, next) {
|
app.param('id', function (req, res, next) {
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
|
|
||||||
//Routes
|
//Routes
|
||||||
app.get('/', function (req, res) {
|
app.get('/', function (req, res) {
|
||||||
res.render('index.ejs', { locals: {
|
res.render('index.ejs', { title: 'Welcome' });
|
||||||
title: 'Welcome'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
app.get('/Upload', function (req, res) {
|
app.get('/Upload', function (req, res) {
|
||||||
res.render('upload.ejs', { locals: {
|
res.render('upload.ejs', { title: 'Upload File' });
|
||||||
title: 'Upload File'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
app.get('/Display', function (req, res) {
|
app.get('/Display', function (req, res) {
|
||||||
blobClient.listBlobs(containerName, function (error, blobs) {
|
blobClient.listBlobsSegmented(containerName, null, function (error, blobs, result) {
|
||||||
res.render('display.ejs', { locals: {
|
res.render('display.ejs', { title: 'List of Blobs', serverBlobs: blobs.entries });
|
||||||
title: 'List of Blobs',
|
|
||||||
serverBlobs: blobs
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -117,7 +96,6 @@ app.post('/uploadhandler', function (req, res) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
helpers.renderError(res);
|
helpers.renderError(res);
|
||||||
} else {
|
} else {
|
||||||
setSAS(containerName, fields.itemName);
|
|
||||||
res.redirect('/Display');
|
res.redirect('/Display');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -145,25 +123,13 @@ blobClient.createContainerIfNotExists(containerName, function (error) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
function setSAS(containerName, blobName) {
|
|
||||||
var sharedAccessPolicy = {
|
|
||||||
AccessPolicy: {
|
|
||||||
Expiry: azure.date.minutesFromNow(3)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var blobUrl = blobClient.getBlobUrl(containerName, blobName, sharedAccessPolicy);
|
|
||||||
console.log("access the blob at ", blobUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
function setPermissions() {
|
function setPermissions() {
|
||||||
var options = { publicAccessLevel: azure.BlobUtilities.BlobContainerPublicAccessType.BLOB };
|
var options = { publicAccessLevel: azure.BlobUtilities.BlobContainerPublicAccessType.BLOB };
|
||||||
blobClient.setContainerAcl(containerName, null, options, function (error) {
|
blobClient.setContainerAcl(containerName, null, options, function (error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
console.log(error);
|
console.log(error);
|
||||||
} else {
|
}
|
||||||
app.listen(process.env.port || 1337);
|
|
||||||
console.log("Express server listening on port %d in %s mode", app.address().port, app.settings.env);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports = app;
|
|
@ -0,0 +1,82 @@
|
||||||
|
/**
|
||||||
|
* Module dependencies
|
||||||
|
*/
|
||||||
|
|
||||||
|
var app = require('../app');
|
||||||
|
var debug = require('debug')('blobuploader:server');
|
||||||
|
var http = require('http');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get port from environment and store in express app
|
||||||
|
*/
|
||||||
|
|
||||||
|
var port = normalizePort(process.env.PORT || '3000');
|
||||||
|
app.set('port', port);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create HTTP server
|
||||||
|
*/
|
||||||
|
|
||||||
|
var server = http.createServer(app);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Listen on provided port, on all network inferfaces
|
||||||
|
*/
|
||||||
|
|
||||||
|
server.listen(port);
|
||||||
|
server.on('error', onError);
|
||||||
|
server.on('listening', onListening);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a port into a number, string, or false.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function normalizePort(val) {
|
||||||
|
var port = parseInt(val, 10);
|
||||||
|
|
||||||
|
if (isNaN(port)){
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (port >= 0) {
|
||||||
|
return port;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event listener for HTTP server "error" event
|
||||||
|
*/
|
||||||
|
|
||||||
|
function onError(error) {
|
||||||
|
if (error.syscall !== 'listen') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
var bind = typeof port === 'string' ? 'Pipe ' + port : 'Port ' + port;
|
||||||
|
|
||||||
|
// handle specific listen errors with friendly messages
|
||||||
|
switch (error.code) {
|
||||||
|
case 'EACCES':
|
||||||
|
console.error(bind + ' requires elevated privileges');
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
case 'EADDRINUSE':
|
||||||
|
console.error(bind + ' is already in use');
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event listener for HTTP server "listening" event.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function onListening() {
|
||||||
|
var addr = server.address();
|
||||||
|
var bind = typeof addr === 'string' ? 'pipe ' + addr : 'port ' + addr.port;
|
||||||
|
debug('Listening on ' + bind);
|
||||||
|
}
|
|
@ -1,11 +1,16 @@
|
||||||
{
|
{
|
||||||
"name": "productmanager"
|
"name": "azurestorageblobuploadersample",
|
||||||
, "version": "0.0.1"
|
"version": "0.0.2",
|
||||||
, "private": true
|
"private": true,
|
||||||
, "dependencies": {
|
"scripts": {
|
||||||
"express": "2.x"
|
"start": "node ./bin/server.js"
|
||||||
, "ejs": ">= 0.4.3"
|
},
|
||||||
, "formidable": ">= 1.0.6"
|
"dependencies": {
|
||||||
, "azure": ">= 0.5.1"
|
"azure-storage": "",
|
||||||
|
"debug": "^2.2.0",
|
||||||
|
"ejs": "~2.3.3",
|
||||||
|
"express": "~4.13.1",
|
||||||
|
"express-ejs-layouts": "^1.1.0",
|
||||||
|
"formidable": "^1.0.17"
|
||||||
}
|
}
|
||||||
}
|
}
|
10
gruntfile.js
10
gruntfile.js
|
@ -18,7 +18,9 @@ module.exports = function(grunt) {
|
||||||
//init stuff
|
//init stuff
|
||||||
grunt.initConfig({
|
grunt.initConfig({
|
||||||
|
|
||||||
pkg: grunt.file.readJSON('package.json'),
|
nsp: {
|
||||||
|
package: grunt.file.readJSON('package.json')
|
||||||
|
},
|
||||||
|
|
||||||
mochaTest: {
|
mochaTest: {
|
||||||
test: {
|
test: {
|
||||||
|
@ -66,7 +68,7 @@ module.exports = function(grunt) {
|
||||||
],
|
],
|
||||||
options: {
|
options: {
|
||||||
destination: 'docs',
|
destination: 'docs',
|
||||||
template: 'node_modules/grunt-jsdoc/node_modules/ink-docstrap/template',
|
template: 'node_modules/ink-docstrap/template',
|
||||||
configure: 'jsdoc/jsdoc.json'
|
configure: 'jsdoc/jsdoc.json'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -91,10 +93,10 @@ module.exports = function(grunt) {
|
||||||
grunt.loadNpmTasks('grunt-jsdoc');
|
grunt.loadNpmTasks('grunt-jsdoc');
|
||||||
grunt.loadNpmTasks('grunt-mocha-test');
|
grunt.loadNpmTasks('grunt-mocha-test');
|
||||||
grunt.loadNpmTasks('grunt-devserver');
|
grunt.loadNpmTasks('grunt-devserver');
|
||||||
grunt.loadNpmTasks('grunt-nsp-package');
|
grunt.loadNpmTasks('grunt-nsp');
|
||||||
grunt.loadNpmTasks('grunt-contrib-jshint');
|
grunt.loadNpmTasks('grunt-contrib-jshint');
|
||||||
|
|
||||||
grunt.registerTask('doc', ['jsdoc', 'devserver']);
|
grunt.registerTask('doc', ['jsdoc', 'devserver']);
|
||||||
grunt.registerTask('validate', ['jshint', 'validate-package']);
|
grunt.registerTask('validate', ['jshint', 'nsp']);
|
||||||
grunt.registerTask('default', ['validate', 'mochaTest']);
|
grunt.registerTask('default', ['validate', 'mochaTest']);
|
||||||
};
|
};
|
|
@ -18,8 +18,14 @@
|
||||||
var util = require('util');
|
var util = require('util');
|
||||||
var _ = require('underscore');
|
var _ = require('underscore');
|
||||||
|
|
||||||
|
function captureStackTrace(targetObject, constructorOpt) {
|
||||||
|
if (Error.captureStackTrace) {
|
||||||
|
Error.captureStackTrace(targetObject, constructorOpt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function ArgumentError(argumentName, message) {
|
function ArgumentError(argumentName, message) {
|
||||||
Error.captureStackTrace(this, this.constructor);
|
captureStackTrace(this, this.constructor);
|
||||||
this.name = this.constructor.name;
|
this.name = this.constructor.name;
|
||||||
this.argumentName = argumentName;
|
this.argumentName = argumentName;
|
||||||
this.message = message || util.format('Invalid or missing argument supplied: %s', argumentName);
|
this.message = message || util.format('Invalid or missing argument supplied: %s', argumentName);
|
||||||
|
@ -27,7 +33,7 @@ function ArgumentError(argumentName, message) {
|
||||||
util.inherits(ArgumentError, Error);
|
util.inherits(ArgumentError, Error);
|
||||||
|
|
||||||
function ArgumentNullError(argumentName, message) {
|
function ArgumentNullError(argumentName, message) {
|
||||||
Error.captureStackTrace(this, this.constructor);
|
captureStackTrace(this, this.constructor);
|
||||||
this.name = this.constructor.name;
|
this.name = this.constructor.name;
|
||||||
this.argumentName = argumentName;
|
this.argumentName = argumentName;
|
||||||
this.message = message || util.format('Missing argument: %s', argumentName);
|
this.message = message || util.format('Missing argument: %s', argumentName);
|
||||||
|
@ -36,7 +42,7 @@ function ArgumentNullError(argumentName, message) {
|
||||||
util.inherits(ArgumentNullError, Error);
|
util.inherits(ArgumentNullError, Error);
|
||||||
|
|
||||||
function StorageError(message, properties) {
|
function StorageError(message, properties) {
|
||||||
Error.captureStackTrace(this, this.constructor);
|
captureStackTrace(this, this.constructor);
|
||||||
this.name = this.constructor.name;
|
this.name = this.constructor.name;
|
||||||
this.message = message;
|
this.message = message;
|
||||||
|
|
||||||
|
@ -48,7 +54,7 @@ function StorageError(message, properties) {
|
||||||
util.inherits(StorageError, Error);
|
util.inherits(StorageError, Error);
|
||||||
|
|
||||||
function TimeoutError(message) {
|
function TimeoutError(message) {
|
||||||
Error.captureStackTrace(this, this.constructor);
|
captureStackTrace(this, this.constructor);
|
||||||
this.name = this.constructor.name;
|
this.name = this.constructor.name;
|
||||||
this.message = message;
|
this.message = message;
|
||||||
}
|
}
|
||||||
|
@ -58,4 +64,5 @@ util.inherits(TimeoutError, Error);
|
||||||
module.exports.ArgumentError = ArgumentError;
|
module.exports.ArgumentError = ArgumentError;
|
||||||
module.exports.ArgumentNullError = ArgumentNullError;
|
module.exports.ArgumentNullError = ArgumentNullError;
|
||||||
module.exports.StorageError = StorageError;
|
module.exports.StorageError = StorageError;
|
||||||
module.exports.TimeoutError = TimeoutError;
|
module.exports.TimeoutError = TimeoutError;
|
||||||
|
module.exports.captureStackTrace = captureStackTrace;
|
|
@ -105,8 +105,8 @@ RetryPolicyFilter._handle = function (self, requestOptions, next) {
|
||||||
// If a request sent to the secondary location fails with 404 (Not Found), it is possible
|
// If a request sent to the secondary location fails with 404 (Not Found), it is possible
|
||||||
// that the resource replication is not finished yet. So, in case of 404 only in the secondary
|
// that the resource replication is not finished yet. So, in case of 404 only in the secondary
|
||||||
// location, the failure should still be retryable.
|
// location, the failure should still be retryable.
|
||||||
var secondaryNotFound = (retryRequestOptions.currentLocation === Constants.StorageLocation.SECONDARY) && (returnObject.response && returnObject.response.statusCode === 404);
|
var secondaryNotFound = (retryRequestOptions.currentLocation === Constants.StorageLocation.SECONDARY) && ((returnObject.response && returnObject.response.statusCode === 404) || (returnObject.error && returnObject.error.code === 'ENOTFOUND'));
|
||||||
|
|
||||||
var retryInfo = self.shouldRetry(secondaryNotFound ? 500 : (azureutil.objectIsNull(returnObject.response) ? 306 : returnObject.response.statusCode), retryRequestOptions);
|
var retryInfo = self.shouldRetry(secondaryNotFound ? 500 : (azureutil.objectIsNull(returnObject.response) ? 306 : returnObject.response.statusCode), retryRequestOptions);
|
||||||
retryRequestOptions.retryContext.retryCount++;
|
retryRequestOptions.retryContext.retryCount++;
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ RetryPolicyFilter._handle = function (self, requestOptions, next) {
|
||||||
// we should NOT retry within the SDK as the stream data is not valid anymore if we retry directly.
|
// we should NOT retry within the SDK as the stream data is not valid anymore if we retry directly.
|
||||||
if (!returnObject.outputStreamSent && returnObject.error && azureutil.objectIsNull(returnObject.retryable) &&
|
if (!returnObject.outputStreamSent && returnObject.error && azureutil.objectIsNull(returnObject.retryable) &&
|
||||||
((!azureutil.objectIsNull(returnObject.response) && retryInfo.retryable) ||
|
((!azureutil.objectIsNull(returnObject.response) && retryInfo.retryable) ||
|
||||||
(returnObject.error.code === 'ETIMEDOUT' || returnObject.error.code === 'ESOCKETTIMEDOUT' || returnObject.error.code === 'ECONNRESET' || returnObject.error.code === 'EAI_AGAIN'))) {
|
(returnObject.error.code === 'ECONNREFUSED' || returnObject.error.code === 'ETIMEDOUT' || returnObject.error.code === 'ESOCKETTIMEDOUT' || returnObject.error.code === 'ECONNRESET' || returnObject.error.code === 'EAI_AGAIN'))) {
|
||||||
|
|
||||||
if (retryRequestOptions.currentLocation === Constants.StorageLocation.PRIMARY) {
|
if (retryRequestOptions.currentLocation === Constants.StorageLocation.PRIMARY) {
|
||||||
lastPrimaryAttempt = returnObject.operationEndTime;
|
lastPrimaryAttempt = returnObject.operationEndTime;
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
var util = require('util');
|
var util = require('util');
|
||||||
|
|
||||||
var azureUtil = require('../util/util');
|
var azureUtil = require('../util/util');
|
||||||
|
var errors = require('../errors/errors');
|
||||||
var SR = require('../util/sr');
|
var SR = require('../util/sr');
|
||||||
var Constants = require('../util/constants');
|
var Constants = require('../util/constants');
|
||||||
|
|
||||||
|
@ -28,7 +29,7 @@ exports = module.exports;
|
||||||
exports.DEFAULT_PROTOCOL = Constants.HTTPS;
|
exports.DEFAULT_PROTOCOL = Constants.HTTPS;
|
||||||
|
|
||||||
var NoMatchError = function (msg, constr) {
|
var NoMatchError = function (msg, constr) {
|
||||||
Error.captureStackTrace(this, constr || this);
|
errors.captureStackTrace(this, constr || this);
|
||||||
this.message = msg || 'Error';
|
this.message = msg || 'Error';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -834,6 +834,8 @@ StorageServiceClient._parseResponse = function (response, xml2jsSettings, option
|
||||||
}
|
}
|
||||||
} else if (contentType.indexOf('application/xml') !== -1 || contentType.indexOf('application/atom+xml') !== -1) {
|
} else if (contentType.indexOf('application/xml') !== -1 || contentType.indexOf('application/atom+xml') !== -1) {
|
||||||
response.body = parseXml(response.body);
|
response.body = parseXml(response.body);
|
||||||
|
} else if (contentType.indexOf('text/html') !== -1) {
|
||||||
|
response.body = response.body;
|
||||||
} else {
|
} else {
|
||||||
throw new SyntaxError(SR.CONTENT_TYPE_MISSING, null);
|
throw new SyntaxError(SR.CONTENT_TYPE_MISSING, null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,7 +108,7 @@ SpeedSummary.prototype._getInternalSpeed = function(totalSize, elapsedTime, huma
|
||||||
}
|
}
|
||||||
var speed = totalSize / elapsedTime;
|
var speed = totalSize / elapsedTime;
|
||||||
if(humanReadable !== false) {
|
if(humanReadable !== false) {
|
||||||
speed = toHumanReadableSize(speed) + '/S';
|
speed = toHumanReadableSize(speed) + '/s';
|
||||||
}
|
}
|
||||||
return speed;
|
return speed;
|
||||||
};
|
};
|
||||||
|
|
|
@ -31,7 +31,7 @@ var Constants = {
|
||||||
/*
|
/*
|
||||||
* Specifies the value to use for UserAgent header.
|
* Specifies the value to use for UserAgent header.
|
||||||
*/
|
*/
|
||||||
USER_AGENT_PRODUCT_VERSION: '1.3.1',
|
USER_AGENT_PRODUCT_VERSION: '1.4.0',
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The number of default concurrent requests for parallel operation.
|
* The number of default concurrent requests for parallel operation.
|
||||||
|
|
|
@ -3463,7 +3463,7 @@ BlobService.prototype.listBlocks = function (container, blob, blocklisttype, opt
|
||||||
* Generate a random block id prefix
|
* Generate a random block id prefix
|
||||||
*/
|
*/
|
||||||
BlobService.prototype.generateBlockIdPrefix = function () {
|
BlobService.prototype.generateBlockIdPrefix = function () {
|
||||||
var prefix = Math.random().toString(16);
|
var prefix = Math.floor(Math.random() * 0x100000000).toString(16);
|
||||||
return azureutil.zeroPaddingString(prefix, 8);
|
return azureutil.zeroPaddingString(prefix, 8);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -4259,7 +4259,7 @@ BlobService.prototype._createBlobFromText = function (container, blob, blobType,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var contentLength = (Buffer.isBuffer(content) ? content.length : Buffer.byteLength(content));
|
var contentLength = azureutil.objectIsNull(content) ? 0 : ((Buffer.isBuffer(content) ? content.length : Buffer.byteLength(content)));
|
||||||
this._createBlob(container, blob, blobType, contentLength, options, creationCallback);
|
this._createBlob(container, blob, blobType, contentLength, options, creationCallback);
|
||||||
|
|
||||||
return options.speedSummary;
|
return options.speedSummary;
|
||||||
|
@ -4515,12 +4515,12 @@ BlobService.prototype._uploadBlobFromText = function (isNewBlob, container, blob
|
||||||
var options;
|
var options;
|
||||||
azureutil.normalizeArgs(optionsOrCallback, callback, function (o, c) { options = o; callback = c; });
|
azureutil.normalizeArgs(optionsOrCallback, callback, function (o, c) { options = o; callback = c; });
|
||||||
options.speedSummary = options.speedSummary || new SpeedSummary(blob);
|
options.speedSummary = options.speedSummary || new SpeedSummary(blob);
|
||||||
options[HeaderConstants.CONTENT_TYPE] = 'text/plain;charset="utf-8"';
|
options[HeaderConstants.CONTENT_TYPE] = (options.contentSettings && options.contentSettings.contentType) || 'text/plain;charset="utf-8"';
|
||||||
|
|
||||||
var self = this;
|
var self = this;
|
||||||
var startUpload = function () {
|
var startUpload = function () {
|
||||||
var operationFunc;
|
var operationFunc;
|
||||||
var length = Buffer.isBuffer(content) ? content.length : Buffer.byteLength(content);
|
var length = azureutil.objectIsNull(content) ? 0 : (Buffer.isBuffer(content) ? content.length : Buffer.byteLength(content));
|
||||||
|
|
||||||
if (blobType === BlobConstants.BlobTypes.BLOCK) {
|
if (blobType === BlobConstants.BlobTypes.BLOCK) {
|
||||||
// default to true, unless explicitly set to false
|
// default to true, unless explicitly set to false
|
||||||
|
@ -4622,7 +4622,7 @@ BlobService.prototype._putBlockBlob = function (container, blob, text, stream, l
|
||||||
if (!options.speedSummary) {
|
if (!options.speedSummary) {
|
||||||
options.speedSummary = new SpeedSummary(blob);
|
options.speedSummary = new SpeedSummary(blob);
|
||||||
}
|
}
|
||||||
|
|
||||||
var speedSummary = options.speedSummary;
|
var speedSummary = options.speedSummary;
|
||||||
speedSummary.totalSize = length;
|
speedSummary.totalSize = length;
|
||||||
|
|
||||||
|
@ -4999,7 +4999,7 @@ BlobService.prototype._uploadContentFromChunkStream = function (container, blob,
|
||||||
speedSummary.totalSize = streamLength;
|
speedSummary.totalSize = streamLength;
|
||||||
|
|
||||||
// initialize chunk allocator
|
// initialize chunk allocator
|
||||||
var allocator = new ChunkAllocator(sizeLimitation, options.parallelOperationThreadCount, { logger: this.logger });
|
var allocator = new ChunkAllocator(sizeLimitation, parallelOperationThreadCount, { logger: this.logger });
|
||||||
chunkStream.setOutputLength(streamLength);
|
chunkStream.setOutputLength(streamLength);
|
||||||
|
|
||||||
// if this is a FileReadStream, set the allocator on that stream
|
// if this is a FileReadStream, set the allocator on that stream
|
||||||
|
|
|
@ -3211,7 +3211,7 @@ FileService.prototype._createFileFromChunkStream = function(share, directory, fi
|
||||||
var parallelOperationThreadCount = options.parallelOperationThreadCount || this.parallelOperationThreadCount;
|
var parallelOperationThreadCount = options.parallelOperationThreadCount || this.parallelOperationThreadCount;
|
||||||
|
|
||||||
// initialize chunk allocator
|
// initialize chunk allocator
|
||||||
var allocator = new ChunkAllocator(sizeLimitation, options.parallelOperationThreadCount, { logger: this.logger });
|
var allocator = new ChunkAllocator(sizeLimitation, parallelOperationThreadCount, { logger: this.logger });
|
||||||
|
|
||||||
// if this is a FileReadStream, set the allocator on that stream
|
// if this is a FileReadStream, set the allocator on that stream
|
||||||
if (chunkStream._stream && chunkStream._stream.setMemoryAllocator) {
|
if (chunkStream._stream && chunkStream._stream.setMemoryAllocator) {
|
||||||
|
|
|
@ -1355,6 +1355,12 @@ QueueService.prototype._getOrPeekMessages = function (queue, optionsOrCallback,
|
||||||
.withQueryOption(QueryStringConstants.VISIBILITY_TIMEOUT, options.visibilityTimeout)
|
.withQueryOption(QueryStringConstants.VISIBILITY_TIMEOUT, options.visibilityTimeout)
|
||||||
.withQueryOption(QueryStringConstants.PEEK_ONLY, options.peekOnly);
|
.withQueryOption(QueryStringConstants.PEEK_ONLY, options.peekOnly);
|
||||||
|
|
||||||
|
if (options.peekOnly) {
|
||||||
|
// For peek message, it's a read-only action and can be performed against secondary endpoint.
|
||||||
|
options.requestLocationMode = RequestLocationMode.PRIMARY_OR_SECONDARY;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var messageEncoder = this.messageEncoder;
|
var messageEncoder = this.messageEncoder;
|
||||||
var processResponseCallback = function (responseObject, next) {
|
var processResponseCallback = function (responseObject, next) {
|
||||||
responseObject.queueMessageResults = null;
|
responseObject.queueMessageResults = null;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "azure-storage",
|
"name": "azure-storage",
|
||||||
"author": "Microsoft Corporation",
|
"author": "Microsoft Corporation",
|
||||||
"version": "1.3.1",
|
"version": "1.4.0",
|
||||||
"description": "Microsoft Azure Storage Client Library for Node.js",
|
"description": "Microsoft Azure Storage Client Library for Node.js",
|
||||||
"typings": "typings/azure-storage/azure-storage.d.ts",
|
"typings": "typings/azure-storage/azure-storage.d.ts",
|
||||||
"tags": [
|
"tags": [
|
||||||
|
@ -38,10 +38,11 @@
|
||||||
"grunt": "~0.4.2",
|
"grunt": "~0.4.2",
|
||||||
"grunt-contrib-jshint": "~0.11.0",
|
"grunt-contrib-jshint": "~0.11.0",
|
||||||
"grunt-devserver": "^0.6.0",
|
"grunt-devserver": "^0.6.0",
|
||||||
"grunt-jsdoc": "~0.5.1",
|
"grunt-jsdoc": "~2.1.0",
|
||||||
"grunt-mocha": "^0.4.12",
|
"grunt-mocha": "^0.4.12",
|
||||||
"grunt-mocha-test": "^0.12.7",
|
"grunt-mocha-test": "^0.12.7",
|
||||||
"grunt-nsp-package": "0.0.5",
|
"grunt-nsp": "^2.3.1",
|
||||||
|
"ink-docstrap": "^1.3.0",
|
||||||
"istanbul": "^0.3.22",
|
"istanbul": "^0.3.22",
|
||||||
"jshint": ">= 2.1.4",
|
"jshint": ">= 2.1.4",
|
||||||
"mocha": ">= 1.18.0",
|
"mocha": ">= 1.18.0",
|
||||||
|
|
|
@ -174,7 +174,7 @@ describe('exponentialretrypolicyfilter-tests', function () {
|
||||||
var fileSize = 100;
|
var fileSize = 100;
|
||||||
|
|
||||||
// Real stream length is smaller than the expected data length to mock the client timeout error to trigger the retry
|
// Real stream length is smaller than the expected data length to mock the client timeout error to trigger the retry
|
||||||
var fileBuffer = new Buffer( fileSize % 2 );
|
var fileBuffer = new Buffer( fileSize / 2 );
|
||||||
fileBuffer.fill(1);
|
fileBuffer.fill(1);
|
||||||
fs.writeFileSync(localTempFileName, fileBuffer);
|
fs.writeFileSync(localTempFileName, fileBuffer);
|
||||||
|
|
||||||
|
@ -185,7 +185,7 @@ describe('exponentialretrypolicyfilter-tests', function () {
|
||||||
|
|
||||||
fileService.createFile(shareName, '', fileName, fileSize, function(err) {
|
fileService.createFile(shareName, '', fileName, fileSize, function(err) {
|
||||||
assert.equal(err, null);
|
assert.equal(err, null);
|
||||||
|
|
||||||
// Expect 100 bytes to sent but the stream only have 50 bytes.
|
// Expect 100 bytes to sent but the stream only have 50 bytes.
|
||||||
// It'll result in ECONNRESET error and should NOT retry. If retry, it'll hang to wait for data from the stream but the stream is already closed as the data already sent out in the 1st failed request.
|
// It'll result in ECONNRESET error and should NOT retry. If retry, it'll hang to wait for data from the stream but the stream is already closed as the data already sent out in the 1st failed request.
|
||||||
fileService.createRangesFromStream(shareName, '', fileName, rfs.createReadStream(localTempFileName), 0, fileSize - 1, function(err, result, response){
|
fileService.createRangesFromStream(shareName, '', fileName, rfs.createReadStream(localTempFileName), 0, fileSize - 1, function(err, result, response){
|
||||||
|
|
|
@ -133,7 +133,7 @@ describe('speed-summary-tests', function() {
|
||||||
summary.totalSize = 100;
|
summary.totalSize = 100;
|
||||||
summary.completeSize = 10;
|
summary.completeSize = 10;
|
||||||
console.log(summary.getAverageSpeed(false));
|
console.log(summary.getAverageSpeed(false));
|
||||||
assert.equal(summary.getAverageSpeed(true), '10.00B/S');
|
assert.equal(summary.getAverageSpeed(true), '10.00B/s');
|
||||||
assert.equal(summary.getAverageSpeed(false).toString(), '10');
|
assert.equal(summary.getAverageSpeed(false).toString(), '10');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -142,7 +142,7 @@ describe('speed-summary-tests', function() {
|
||||||
var summary = new speedSummary('test');
|
var summary = new speedSummary('test');
|
||||||
summary.totalSize = 100;
|
summary.totalSize = 100;
|
||||||
summary.completeSize = 15;
|
summary.completeSize = 15;
|
||||||
assert.equal(summary.getSpeed(true), '0B/S');
|
assert.equal(summary.getSpeed(true), '0B/s');
|
||||||
assert.equal(summary.getSpeed(false).toString(), '0');
|
assert.equal(summary.getSpeed(false).toString(), '0');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -199,7 +199,6 @@ describe('blob-uploaddownload-tests', function () {
|
||||||
var blobName = 'blobs/' + testutil.generateId(blobNamesPrefix, blobNames, suite.isMocked);
|
var blobName = 'blobs/' + testutil.generateId(blobNamesPrefix, blobNames, suite.isMocked);
|
||||||
var blobText = 'Hello World!';
|
var blobText = 'Hello World!';
|
||||||
|
|
||||||
// Create the empty page blob
|
|
||||||
blobService.createBlockBlobFromText(containerName, blobName, blobText, function (err) {
|
blobService.createBlockBlobFromText(containerName, blobName, blobText, function (err) {
|
||||||
assert.equal(err, null);
|
assert.equal(err, null);
|
||||||
|
|
||||||
|
@ -212,6 +211,47 @@ describe('blob-uploaddownload-tests', function () {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('CreateEmptyBlob', function (done) {
|
||||||
|
var blobName = 'blobs/' + testutil.generateId(blobNamesPrefix, blobNames, suite.isMocked);
|
||||||
|
|
||||||
|
// Create the empty block blob
|
||||||
|
blobService.createBlockBlobFromText(containerName, blobName, null, function (err) {
|
||||||
|
assert.equal(err, null);
|
||||||
|
|
||||||
|
blobService.getBlobProperties(containerName, blobName, function (error, properties) {
|
||||||
|
assert.equal(error, null);
|
||||||
|
assert.equal(properties.container, containerName);
|
||||||
|
assert.equal(properties.name, blobName);
|
||||||
|
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('createBlockBlobFromText with specified content type', function (done) {
|
||||||
|
var blobName = 'blobs/' + testutil.generateId(blobNamesPrefix, blobNames, suite.isMocked);
|
||||||
|
var blobText = '<html><h1>THIS IS HTML</h1></html>';
|
||||||
|
var contentType = 'text/html';
|
||||||
|
var options = {
|
||||||
|
contentSettings: {
|
||||||
|
contentType: contentType
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
blobService.createBlockBlobFromText(containerName, blobName, blobText, options, function (err) {
|
||||||
|
assert.equal(err, null);
|
||||||
|
|
||||||
|
blobService.getBlobProperties(containerName, blobName, function (error, properties) {
|
||||||
|
assert.equal(error, null);
|
||||||
|
assert.equal(properties.container, containerName);
|
||||||
|
assert.equal(properties.name, blobName);
|
||||||
|
assert.equal(properties.contentSettings.contentType, contentType);
|
||||||
|
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// This test ensures that blocks can be created from files correctly
|
// This test ensures that blocks can be created from files correctly
|
||||||
// and was created to ensure that the request module does not magically add
|
// and was created to ensure that the request module does not magically add
|
||||||
|
@ -233,6 +273,13 @@ describe('blob-uploaddownload-tests', function () {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('returns correct error when specifying invalid content-length', function (done) {
|
||||||
|
blobService.createBlockFromStream('test', containerName, blockBlobName, rfs.createReadStream(blockFileName), 'invalidlength', function (error) {
|
||||||
|
assert.ok(error.message.indexOf('invalid content length') !== -1);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('blob-piping-tests', function() {
|
describe('blob-piping-tests', function() {
|
||||||
runOrSkip('should be able to upload block blob from piped stream', function (done) {
|
runOrSkip('should be able to upload block blob from piped stream', function (done) {
|
||||||
var blobName = testutil.generateId(blobNamesPrefix, blobNames, suite.isMocked);
|
var blobName = testutil.generateId(blobNamesPrefix, blobNames, suite.isMocked);
|
||||||
|
|
|
@ -585,21 +585,21 @@ describe('FileShare', function () {
|
||||||
var readWriteSharePolicy = {
|
var readWriteSharePolicy = {
|
||||||
AccessPolicy: {
|
AccessPolicy: {
|
||||||
Permissions: 'rw',
|
Permissions: 'rw',
|
||||||
Expiry: new Date('2016-10-01')
|
Expiry: new Date('2017-10-01')
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var readCreateSharePolicy = {
|
var readCreateSharePolicy = {
|
||||||
AccessPolicy: {
|
AccessPolicy: {
|
||||||
Permissions: 'rc',
|
Permissions: 'rc',
|
||||||
Expiry: new Date('2016-10-01')
|
Expiry: new Date('2017-10-01')
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var filePolicy = {
|
var filePolicy = {
|
||||||
AccessPolicy: {
|
AccessPolicy: {
|
||||||
Permissions: 'd',
|
Permissions: 'd',
|
||||||
Expiry: new Date('2016-10-10')
|
Expiry: new Date('2017-10-10')
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -5163,7 +5163,7 @@ declare module azurestorage {
|
||||||
function Binary(value: Buffer|string): EntityProperty<Buffer>;
|
function Binary(value: Buffer|string): EntityProperty<Buffer>;
|
||||||
function Boolean(value: boolean|string): EntityProperty<boolean>;
|
function Boolean(value: boolean|string): EntityProperty<boolean>;
|
||||||
function String(value: string): EntityProperty<string>;
|
function String(value: string): EntityProperty<string>;
|
||||||
function Guid(value: UUID|string|Buffer): EntityProperty<UUID>;
|
function Guid(value: string|Buffer|any): EntityProperty<any>;
|
||||||
function Double(value: number|string): EntityProperty<number>;
|
function Double(value: number|string): EntityProperty<number>;
|
||||||
function DateTime(value: Date|string): EntityProperty<Date>;
|
function DateTime(value: Date|string): EntityProperty<Date>;
|
||||||
}
|
}
|
||||||
|
@ -5340,7 +5340,7 @@ declare module azurestorage {
|
||||||
* @example
|
* @example
|
||||||
* var query = TableQuery.guidFilter('GuidField', QueryComparisons.EQUAL, guid.v1());
|
* var query = TableQuery.guidFilter('GuidField', QueryComparisons.EQUAL, guid.v1());
|
||||||
*/
|
*/
|
||||||
guidFilter(propertyName: string, operation: string, value: UUID | string): string;
|
guidFilter(propertyName: string, operation: string, value: string | any): string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a property filter condition string for a 'binary' value.
|
* Generates a property filter condition string for a 'binary' value.
|
||||||
|
@ -7156,7 +7156,7 @@ declare module azurestorage {
|
||||||
* };
|
* };
|
||||||
* var blobService = azure.createBlobService().withFilter(retryPolicy);
|
* var blobService = azure.createBlobService().withFilter(retryPolicy);
|
||||||
*/
|
*/
|
||||||
constructor(retryCount?: number, retryInterval?: number);
|
constructor(retryCount?: number, retryInterval?: number): RetryPolicyFilter;
|
||||||
|
|
||||||
shouldRetry(statusCode: number, retryData: RetryPolicyFilter.IRetryRequestOptions): {
|
shouldRetry(statusCode: number, retryData: RetryPolicyFilter.IRetryRequestOptions): {
|
||||||
retryInterval: number;
|
retryInterval: number;
|
||||||
|
@ -8776,7 +8776,7 @@ declare module azurestorage {
|
||||||
*/
|
*/
|
||||||
export function createQueueServiceWithSas(hostUri: string | StorageHost, sasToken: string): QueueService;
|
export function createQueueServiceWithSas(hostUri: string | StorageHost, sasToken: string): QueueService;
|
||||||
|
|
||||||
export function generateAccountSharedAccessSignature(storageAccountOrConnectionString: string, storageAccessKey: string, sharedAccessAccountPolicy: common.SharedAccessPolicy);
|
export function generateAccountSharedAccessSignature(storageAccountOrConnectionString: string, storageAccessKey: string, sharedAccessAccountPolicy: common.SharedAccessPolicy): string;
|
||||||
|
|
||||||
interface StorageError extends Error {
|
interface StorageError extends Error {
|
||||||
statusCode?: number;
|
statusCode?: number;
|
||||||
|
@ -8868,4 +8868,4 @@ declare module azurestorage {
|
||||||
export import RetryPolicyFilter = common.filters.retrypolicyfilter.RetryPolicyFilter;
|
export import RetryPolicyFilter = common.filters.retrypolicyfilter.RetryPolicyFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
export = azurestorage;
|
export = azurestorage;
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
// Generated by typings
|
|
||||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/56295f5058cac7ae458540423c50ac2dcf9fc711/node-uuid/node-uuid.d.ts
|
|
||||||
declare module __NodeUUID {
|
|
||||||
/**
|
|
||||||
* Overloads for node environment
|
|
||||||
* We need to duplicate some declarations because
|
|
||||||
* interface merging doesn't work with overloads
|
|
||||||
*/
|
|
||||||
interface UUID {
|
|
||||||
v1(options?: UUIDOptions): string;
|
|
||||||
v1(options?: UUIDOptions, buffer?: number[], offset?: number): number[];
|
|
||||||
v1(options?: UUIDOptions, buffer?: Buffer, offset?: number): Buffer;
|
|
||||||
|
|
||||||
v4(options?: UUIDOptions): string;
|
|
||||||
v4(options?: UUIDOptions, buffer?: number[], offset?: number): number[];
|
|
||||||
v4(options?: UUIDOptions, buffer?: Buffer, offset?: number): Buffer;
|
|
||||||
|
|
||||||
parse(id: string, buffer?: number[], offset?: number): number[];
|
|
||||||
parse(id: string, buffer?: Buffer, offset?: number): Buffer;
|
|
||||||
|
|
||||||
unparse(buffer: number[], offset?: number): string;
|
|
||||||
unparse(buffer: Buffer, offset?: number): string;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
{
|
|
||||||
"resolution": "main",
|
|
||||||
"tree": {
|
|
||||||
"src": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/56295f5058cac7ae458540423c50ac2dcf9fc711/node-uuid/node-uuid.d.ts",
|
|
||||||
"raw": "registry:dt/node-uuid#0.0.0+20160316155526",
|
|
||||||
"typings": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/56295f5058cac7ae458540423c50ac2dcf9fc711/node-uuid/node-uuid.d.ts"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,2 +1 @@
|
||||||
/// <reference path="globals/node-uuid/index.d.ts" />
|
|
||||||
/// <reference path="globals/node/index.d.ts" />
|
/// <reference path="globals/node/index.d.ts" />
|
||||||
|
|
Загрузка…
Ссылка в новой задаче