From 81b0dac3f57068d1fda03eeadfcff1378b29af0a Mon Sep 17 00:00:00 2001 From: XiaoningLiu Date: Thu, 22 Dec 2016 17:36:07 +0800 Subject: [PATCH] Added samples and Node.js version bundle scripts; Added npm run genjs command for quick JavaScript SDK generating; --- .gitignore | 3 + browser/azure-storage.blob.export.js | 73 +++ browser/azure-storage.file.export.js | 69 +++ browser/azure-storage.queue.export.js | 70 +++ browser/azure-storage.table.export.js | 70 +++ browser/bundle.js | 49 ++ browser/samples/cors.PNG | Bin 0 -> 13072 bytes browser/samples/sample-blob.html | 546 ++++++++++++++++++++++ browser/samples/sample-file.html | 646 ++++++++++++++++++++++++++ browser/samples/sample-queue.html | 465 ++++++++++++++++++ browser/samples/sample-table.html | 440 ++++++++++++++++++ package.json | 5 +- 12 files changed, 2435 insertions(+), 1 deletion(-) create mode 100644 browser/azure-storage.blob.export.js create mode 100644 browser/azure-storage.file.export.js create mode 100644 browser/azure-storage.queue.export.js create mode 100644 browser/azure-storage.table.export.js create mode 100644 browser/bundle.js create mode 100644 browser/samples/cors.PNG create mode 100644 browser/samples/sample-blob.html create mode 100644 browser/samples/sample-file.html create mode 100644 browser/samples/sample-queue.html create mode 100644 browser/samples/sample-table.html diff --git a/.gitignore b/.gitignore index dbed5b5..3d4ddc7 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,6 @@ coverage # Tests artefacts fileservice_test_* blobservice_test.tmp + +# Browserify bundle scripts +browser/bundle diff --git a/browser/azure-storage.blob.export.js b/browser/azure-storage.blob.export.js new file mode 100644 index 0000000..f78130b --- /dev/null +++ b/browser/azure-storage.blob.export.js @@ -0,0 +1,73 @@ +// +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +var AzureStorage = window.AzureStorage || {}; + +AzureStorage.generateDevelopmentStorageCredentials = function (proxyUri) { + var devStore = 'UseDevelopmentStorage=true;'; + if(proxyUri){ + devStore += 'DevelopmentStorageProxyUri=' + proxyUri; + } + + return devStore; +}; + +var BlobService = require('../lib/services/blob/blobservice'); + +AzureStorage.BlobService = BlobService; +AzureStorage.BlobUtilities = require('../lib/services/blob/blobutilities'); + +AzureStorage.createBlobService = function (storageAccountOrConnectionString, storageAccessKey, host) { + return new BlobService(storageAccountOrConnectionString, storageAccessKey, host, null); +}; + +AzureStorage.createBlobServiceWithSas = function (host, sasToken) { + return new BlobService(null, null, host, sasToken); +}; + +AzureStorage.createBlobServiceAnonymous = function (host) { + return new BlobService(null, null, host, null); +}; + +var azureCommon = require('../lib/common/common'); +var StorageServiceClient = azureCommon.StorageServiceClient; +var SharedKey = azureCommon.SharedKey; + +AzureStorage.generateAccountSharedAccessSignature = function(storageAccountOrConnectionString, storageAccessKey, sharedAccessAccountPolicy) +{ + var storageSettings = StorageServiceClient.getStorageSettings(storageAccountOrConnectionString, storageAccessKey); + var sharedKey = new SharedKey(storageSettings._name, storageSettings._key); + + return sharedKey.generateAccountSignedQueryString(sharedAccessAccountPolicy); +}; + +AzureStorage.Constants = azureCommon.Constants; +AzureStorage.StorageUtilities = azureCommon.StorageUtilities; +AzureStorage.AccessCondition = azureCommon.AccessCondition; + +AzureStorage.SR = azureCommon.SR; +AzureStorage.StorageServiceClient = StorageServiceClient; +AzureStorage.Logger = azureCommon.Logger; +AzureStorage.WebResource = azureCommon.WebResource; +AzureStorage.Validate = azureCommon.validate; +AzureStorage.date = azureCommon.date; + +// Other filters +AzureStorage.LinearRetryPolicyFilter = azureCommon.LinearRetryPolicyFilter; +AzureStorage.ExponentialRetryPolicyFilter = azureCommon.ExponentialRetryPolicyFilter; +AzureStorage.RetryPolicyFilter = azureCommon.RetryPolicyFilter; + +window.AzureStorage = AzureStorage; \ No newline at end of file diff --git a/browser/azure-storage.file.export.js b/browser/azure-storage.file.export.js new file mode 100644 index 0000000..aa4f963 --- /dev/null +++ b/browser/azure-storage.file.export.js @@ -0,0 +1,69 @@ +// +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +var AzureStorage = window.AzureStorage || {}; + +AzureStorage.generateDevelopmentStorageCredentials = function (proxyUri) { + var devStore = 'UseDevelopmentStorage=true;'; + if(proxyUri){ + devStore += 'DevelopmentStorageProxyUri=' + proxyUri; + } + + return devStore; +}; + +var FileService = require('../lib/services/file/fileservice'); + +AzureStorage.FileService = FileService; +AzureStorage.FileUtilities = require('../lib/services/file/fileutilities'); + +AzureStorage.createFileService = function (storageAccountOrConnectionString, storageAccessKey, host) { + return new FileService(storageAccountOrConnectionString, storageAccessKey, host); +}; + +AzureStorage.createFileServiceWithSas = function (hostUri, sasToken) { + return new FileService(null, null, hostUri, sasToken); +}; + +var azureCommon = require('../lib/common/common'); +var StorageServiceClient = azureCommon.StorageServiceClient; +var SharedKey = azureCommon.SharedKey; + +AzureStorage.generateAccountSharedAccessSignature = function(storageAccountOrConnectionString, storageAccessKey, sharedAccessAccountPolicy) +{ + var storageSettings = StorageServiceClient.getStorageSettings(storageAccountOrConnectionString, storageAccessKey); + var sharedKey = new SharedKey(storageSettings._name, storageSettings._key); + + return sharedKey.generateAccountSignedQueryString(sharedAccessAccountPolicy); +}; + +AzureStorage.Constants = azureCommon.Constants; +AzureStorage.StorageUtilities = azureCommon.StorageUtilities; +AzureStorage.AccessCondition = azureCommon.AccessCondition; + +AzureStorage.SR = azureCommon.SR; +AzureStorage.StorageServiceClient = StorageServiceClient; +AzureStorage.Logger = azureCommon.Logger; +AzureStorage.WebResource = azureCommon.WebResource; +AzureStorage.Validate = azureCommon.validate; +AzureStorage.date = azureCommon.date; + +// Other filters +AzureStorage.LinearRetryPolicyFilter = azureCommon.LinearRetryPolicyFilter; +AzureStorage.ExponentialRetryPolicyFilter = azureCommon.ExponentialRetryPolicyFilter; +AzureStorage.RetryPolicyFilter = azureCommon.RetryPolicyFilter; + +window.AzureStorage = AzureStorage; \ No newline at end of file diff --git a/browser/azure-storage.queue.export.js b/browser/azure-storage.queue.export.js new file mode 100644 index 0000000..4d43949 --- /dev/null +++ b/browser/azure-storage.queue.export.js @@ -0,0 +1,70 @@ +// +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +var AzureStorage = window.AzureStorage || {}; + +AzureStorage.generateDevelopmentStorageCredentials = function (proxyUri) { + var devStore = 'UseDevelopmentStorage=true;'; + if(proxyUri){ + devStore += 'DevelopmentStorageProxyUri=' + proxyUri; + } + + return devStore; +}; + +var QueueService = require('../lib/services/queue/queueservice'); + +AzureStorage.QueueService = QueueService; +AzureStorage.QueueUtilities = require('../lib/services/queue/queueutilities'); +AzureStorage.QueueMessageEncoder = require('../lib/services/queue/queuemessageencoder'); + +AzureStorage.createQueueService = function (storageAccountOrConnectionString, storageAccessKey, host) { + return new QueueService(storageAccountOrConnectionString, storageAccessKey, host); +}; + +AzureStorage.createQueueServiceWithSas = function(hostUri, sasToken) { + return new QueueService(null, null, hostUri, sasToken); +}; + +var azureCommon = require('../lib/common/common'); +var StorageServiceClient = azureCommon.StorageServiceClient; +var SharedKey = azureCommon.SharedKey; + +AzureStorage.generateAccountSharedAccessSignature = function(storageAccountOrConnectionString, storageAccessKey, sharedAccessAccountPolicy) +{ + var storageSettings = StorageServiceClient.getStorageSettings(storageAccountOrConnectionString, storageAccessKey); + var sharedKey = new SharedKey(storageSettings._name, storageSettings._key); + + return sharedKey.generateAccountSignedQueryString(sharedAccessAccountPolicy); +}; + +AzureStorage.Constants = azureCommon.Constants; +AzureStorage.StorageUtilities = azureCommon.StorageUtilities; +AzureStorage.AccessCondition = azureCommon.AccessCondition; + +AzureStorage.SR = azureCommon.SR; +AzureStorage.StorageServiceClient = StorageServiceClient; +AzureStorage.Logger = azureCommon.Logger; +AzureStorage.WebResource = azureCommon.WebResource; +AzureStorage.Validate = azureCommon.validate; +AzureStorage.date = azureCommon.date; + +// Other filters +AzureStorage.LinearRetryPolicyFilter = azureCommon.LinearRetryPolicyFilter; +AzureStorage.ExponentialRetryPolicyFilter = azureCommon.ExponentialRetryPolicyFilter; +AzureStorage.RetryPolicyFilter = azureCommon.RetryPolicyFilter; + +window.AzureStorage = AzureStorage; \ No newline at end of file diff --git a/browser/azure-storage.table.export.js b/browser/azure-storage.table.export.js new file mode 100644 index 0000000..6be0a40 --- /dev/null +++ b/browser/azure-storage.table.export.js @@ -0,0 +1,70 @@ +// +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +var AzureStorage = window.AzureStorage || {}; + +AzureStorage.generateDevelopmentStorageCredentials = function (proxyUri) { + var devStore = 'UseDevelopmentStorage=true;'; + if(proxyUri){ + devStore += 'DevelopmentStorageProxyUri=' + proxyUri; + } + + return devStore; +}; + +var TableService = require('../lib/services/table/tableservice'); +AzureStorage.TableService = TableService; +AzureStorage.TableQuery = require('../lib/services/table/tablequery'); +AzureStorage.TableBatch = require('../lib/services/table/tablebatch'); +AzureStorage.TableUtilities = require('../lib/services/table/tableutilities'); + +AzureStorage.createTableService = function (storageAccountOrConnectionString, storageAccessKey, host) { + return new TableService(storageAccountOrConnectionString, storageAccessKey, host); +}; + +AzureStorage.createTableServiceWithSas = function (hostUri, sasToken) { + return new TableService(null, null, hostUri, sasToken); +}; + +var azureCommon = require('../lib/common/common'); +var StorageServiceClient = azureCommon.StorageServiceClient; +var SharedKey = azureCommon.SharedKey; + +AzureStorage.generateAccountSharedAccessSignature = function(storageAccountOrConnectionString, storageAccessKey, sharedAccessAccountPolicy) +{ + var storageSettings = StorageServiceClient.getStorageSettings(storageAccountOrConnectionString, storageAccessKey); + var sharedKey = new SharedKey(storageSettings._name, storageSettings._key); + + return sharedKey.generateAccountSignedQueryString(sharedAccessAccountPolicy); +}; + +AzureStorage.Constants = azureCommon.Constants; +AzureStorage.StorageUtilities = azureCommon.StorageUtilities; +AzureStorage.AccessCondition = azureCommon.AccessCondition; + +AzureStorage.SR = azureCommon.SR; +AzureStorage.StorageServiceClient = StorageServiceClient; +AzureStorage.Logger = azureCommon.Logger; +AzureStorage.WebResource = azureCommon.WebResource; +AzureStorage.Validate = azureCommon.validate; +AzureStorage.date = azureCommon.date; + +// Other filters +AzureStorage.LinearRetryPolicyFilter = azureCommon.LinearRetryPolicyFilter; +AzureStorage.ExponentialRetryPolicyFilter = azureCommon.ExponentialRetryPolicyFilter; +AzureStorage.RetryPolicyFilter = azureCommon.RetryPolicyFilter; + +window.AzureStorage = AzureStorage; \ No newline at end of file diff --git a/browser/bundle.js b/browser/bundle.js new file mode 100644 index 0000000..eacf66c --- /dev/null +++ b/browser/bundle.js @@ -0,0 +1,49 @@ +// +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +var browserify = require('browserify'); +var factor = require('factor-bundle'); +var fs = require('fs'); +var path = require('path'); + +var outputFolder = 'bundle'; +var outputFolderPath = path.resolve(__dirname, outputFolder); + +console.log('Generating Azure Storage JavaScript Client Library to ' + outputFolderPath + ' ...\n'); + +if (!fs.existsSync(outputFolderPath)) { + fs.mkdirSync(outputFolderPath); +} + +var b = browserify([ + path.resolve(__dirname, 'azure-storage.blob.export.js'), + path.resolve(__dirname, 'azure-storage.file.export.js'), + path.resolve(__dirname, 'azure-storage.queue.export.js'), + path.resolve(__dirname, 'azure-storage.table.export.js') +], {require: ['stream', 'util', 'buffer']}); + +b.plugin(factor, { + outputs: [ + path.resolve(outputFolderPath, 'azure-storage.blob.js'), + path.resolve(outputFolderPath, 'azure-storage.file.js'), + path.resolve(outputFolderPath, 'azure-storage.queue.js'), + path.resolve(outputFolderPath, 'azure-storage.table.js') + ] +}); + +b.bundle().pipe( + fs.createWriteStream(path.resolve(outputFolderPath, 'azure-storage.common.js')) +); diff --git a/browser/samples/cors.PNG b/browser/samples/cors.PNG new file mode 100644 index 0000000000000000000000000000000000000000..4f6a97f5a12605924e7d4de002117b2f73d6ad30 GIT binary patch literal 13072 zcmeI2XIPWnzUHG~K}3|QfPf&qNtgbjAkuqp(tCi=gisVkrAZIHNGCL@AxIIG-lPi& zO$ZPWLP-b&0u$eT&b(*#wfCNV=9&-lVLs$ao;+=3J!{?TfB)`8>*=UbU1Pci005}e zpQ;!D02ic5uNPM@lfLQO<~k?6UGOtddjzN%VL_8N$X%4Rl>vad1jm1`2ye9j@-U<&+gdKj9F|vwZgbgs9gJLf4K*%ox=tS+|_AmSMj|# zb3fxXf&+Qu9_twEBTI_;%qjU3`7)i^vVqXe&PevYmfpE{>{rEh{$poV{MAVI71n>e3oq*} z`Hn%@V6ORwc{sPoctDP>V^jCs!?SDM-EC7hG~~O zEH}>D*~Vs%y#th9p5$fjY)Vi50eqSeik_3x!w`>Sv62Go1k^$5;)$Harem!rL^eSh?Ph0K1Se}@?L$aPPT{g8`NBa)8`h*|wnOd59(A8CWxZs0pegVJBeIhoqg0|f8 zkk01`0?mJPe!AI5g}!BIHb`o^-8b$Q1Woog(j&gkD-FLA(A7d6NZbr241<@wbSEyk zfA`q8(B6K8MJ9-3D8Du+oiK3AZ82UX`ad#$b_EBbhI=JBoI&_ga+|{#Kb1kry3;3Y z@|dhvTD6TnlsCFmng#FuJ^D0nB*f9!5ngWrJV&UT`M|iO zqMLkg-}wWW3)Uk%A-*A|I@j?9Uuu?IEfv>8H7l29S~*3r-Zm>Dj7&reK>|MfO1hR% zR=NN(1A=*zm9$;f+gB*Hyqzotw;LborWIY;Hng=&&0+Gl%S-SUp>+MKJlBG|vAU)^ zrKF&mQ2CY>yy2^ekwy`j@shXj4OZqtu=DL6x*W$@`xGXx!|mGzP@~>yYvY9) zM3CY!ds2E5)P z*|N(xUuluy_O6ec^20_tUZo+3pMtZ74b{vSVPX>#kA4o|uw%fkD}}4mVV*D^*sBF4 zeh4_iV_}8fREeprP^Cw4Fl`ESvZzBEBHhk9`srK`f8R#{j;l<4!v#~*88@$Zewh%S z)3F{n*kLF0x~xPn`tg3d{K1FEwVq}19I4$RZnjDlZ}>%&U_8>AC2r%Gpf?ZGQ3LL9 zVtV$+ACBfA%g6@O6oe%sY`~I*ViNFRL!n!rEw%{HCMU*Y%Lt~zn{Iq;L7#s9f*`^Y zSoEmxM%vy%(+7KZU7@x{--G3)nb^Yb*0G4xTl}r#LH4xSaNz8JTs6BURk*E5X|g6t zf)4K6Jm7x-Zrxn~NbeFmym1-a&7I;dIx=1$hdmY|=p!@MT{blKEvv$6>B!zst~pv) zSWf5nLo#)iJ8q%s*O?HvzrORbef3RhI=APX#@oy@T&t`4NMsMnjEI$%)xDAbP~}*B z-$j=oK)c;B4>B~YuaunYOIyt1ImEW>ajJ<_Fz@g4=!3DgcaivS7v^{)?)vO$%G#Yq zJ^F~bfTP2oRbN3%ds`QZnDt1*uU%hw2OAuYcm_I{s8?)W!H%FV8!u#)##^GI%T5En z@Ii+nYI}ZJnja=-pB3)n#GNn-{S#~Wb@bNmF{O8B+9(hHm=Jf~m^!))dEB^8T(b3G zFN)=hQMPAxxLvpHk@%yhb)qldD7ukg>{PEqK%OXd*4t~i%P9!i5|=0^S1buXhty;I zqw6ezXZc2aYoikrYqFZ25h5C~ZJoLvsW%iRUGY=%ud^vRN*5w5-Lu!KW%jtd(Wc%f zZlhP?mlm7%&qP9f3@BvVyx7}Ud(6cv&IPsh6s*@{JROd}k&x|~Uk_EwhC6*x4#$L_ zYFKRb`%B6_T+>g;l@omvnrb91yI#Zyr;boZ zOGE}~F(m7qd-yuT>a11$eEnDF;jnZy#^0M7k+G6SB@$^@7615Mv{$myaCZfh8B9bEYV?9R|xLW$8eES_Jdvo2u9jsP99lh!0K2@#Cb0arl@ zq$S}I>55*Wb=95T_+Ua-RG=eFYqoh}?(sVMo4#fzeTGe>W%G~w9I(MM+&*i}s!|~( zrHZsjUa{nIM-W#cbNxk!-`6<%`eu7t(Rnpest%{mh?`2-8M_p^UPm$F$X=&@5i~Jt zQ6%D;r{gI)w`0LB?Rcr@cPI8yfr7975V5t&ooQF_bXb5zi4=BY&|Bg>ZYyuTw-%*u zv@SrNYnWs@eqe1?kzOC2SP9RkswbwWrhCEiiYE>hx6T&FXKAlQuEg`UlS_a6abF7V z%LQKLAB%#mHr>l6t?j=e&lQ8AtutF}G6U;5l?n-$Q6;t8EFdsp2l~VN{6`oS(7sD!QN~2BE{e=%8;?JvOISipJWn zFHBef(<^?fR$NuWx|^JP)kD_zVWDGaey3%Q5;$>>8u%S2-U}ZMm+dl;&t2b!_0b6r z!K^C$F)kTsE~)&Nmt4}a!z13V2B-n^-T}YQ;s>!uv^I;99Ku||ekTj-dK3n^k82n6 zbm11@hzJ)16^CXYmAT!YBllIPi{{Q?<`05@Nmvrws)m+ui`!hw3SeF-IH~N*LKIrz zb&E>pY?l&|51NcfZyaUu_*NIKU0S%zxA{k8L%;TdxGo%Du{n_e0Zn_#=+x$BuJiGj z)(P&$3!Y}@LtY{9GH>nR+n=5)!z&Ib;P(Usr+e;w`_{a`K#L%pEVQaHE@YR!Gh!FJ zOGvG}Zu~N*xIt`qpg~2Nr8o?^V|v$Mj0H{*vTg}|)$x3^likTeBeVb0;isP}~z>ix!oW$wniB zy+usb=l?S01^&Gt=zSKoiYy46RVk65Z8{1YHC-jo=bYEsPmDr@ z8Sq)1T^jHrzGDtfNoM{N#(Ogp{OYmXk)o@GiETa(>Uh|xZ!K^3I z<7k?Sb2@IxuZdz$x(-hjA;*m zPkPdT*I5z{yDmxny@^c;%28WVE#jxD;ar~@zkW<3?ehp2lMQ&Np%9RlSYRTm0J_cP@vpAx0qZ6fttQk zWSm=&5sryB$TC<4r(Cwn#JQ7wR()av7rJswW2E6$!#BeTSC?6Sm`=$|Phw$QS?D*h zLCB_2e@NK6*^2?MaZ_mCA1V(ZzUKT=aP_F!imM81*C;g7Rf=q;jB#0a9$Ro!EVX6F zRM>zHY$+`I2Q_!mc?a~}k;)6$~u6|Kf z(8Mg1;9!k%Nh9P{-!O*xa}_~e#s1YmwZ4{qGl!(Dw`ddp?DxQZ8wj7YU4?SfIC+Wv z9sWu$xura2v*9Wh_!PGshX-eJfAaooW8lx)9XGc^>sxhKlkW^TVu_Lq!r6mkBZuLu z9z6OQ4abouIB1W*-E=l&APkPu(xeR?+I&HHr=EAM*UxC|k-HH$K}++bPn&byu1r>6 zIYdcFb?P$GIjT=c;*wURdZH<(A$G082=!;xsU2CHv3VX=kYEe#&cmDy43Sls@5_34fYcX;ks!;ALdk z^$L|K-^wd|YbQ5;hK&xVUp65V*vvz0zA2ly5Pbo#L*D15`n{NtdY;M%cyOH^ z2EMEdrVWwx3|FECJfQn`lz=qxv%Nc_+57lM+wE6LGErsoFHU=MAkhv}F~!mVK2w=A zA%QuenRzm1rH^Y_3eaCg#wjr)TathmzAjUyK81aq**)Tw>~vGok!%4T%6A4P7}YS!LW|4FV!#fZ&iuk-TI{JmCTVM z+5SMwUE)sW>Un!uc*H5W7dTwb>l8)1xn8HFMe3U>nK%?js*$rcYeWB?}OocR#pTv_dSwo^S-o5dKoeKJT~JFCB& z&wGvvwDpKz%Fs0uF1U5UtUAsLIE?Eo*vif;jam;Jvn|Z__YhetPwX2TqS5r_R`Zd)oSvPjdG~byJbC`?HO6PjD;5`)Ryc zMzfh8BJ1x>*rU7K!w4%-NlibafsfAw(=7GA_!((AS8y=zDH?cvP*)u|5$R9v6V%8| z7MUBVmo-v+)BilV&GLP{a4&x|EyTQ53Sh#?S&M|cy2`aa`z{H}J7?ZJP;H2A5-sLk z;bW_FtuK)^uD4l~VJO&7!lDKQY2FzbLVPQm2ehC0y5G}i%9sS@3EY3Q0-8LuK~3L+ zn8GtTrDOOQKDe5#G8av%M7vWRJd^)V?K2S&cnUOdi*o$D2YukI2k%c6FJ*hOam}kH z(Kt=7A;)P}vOINK>=yhuj4Z=A|E-a0`wslrpk_T~n-AJ2RH@POz%DajP^itzulbQ- zCOOx>B*x0sXO!y2!e8PyHX&b$l&>Qpg6Knur0%34J?5*0EYq7S|%tDb@`gt5KsW?G4<*}R+aQX42bU_2?e;!ve0Jc5yF zvu@CuJ)#@lRLx(|)zoi#IW>GO$XysDCimig)yPh~^e55pRT{+Sy zBxxx;%Xpy7VYBPy97zeMp%baVgwoP-8#KPrXs1baj+#c^tg(cJYwFbvR?fV87^!S+ z;Qotyl?$)rC;MRWyrA^A6gJ>>JGGk_Ffea*m4~17(0(Y8Q1ESr|6b)onuF4dp|r}0 z#(CiGKGbD^S^GaNn374t6`ZwjYDgH<^=F-bs=Fg>u$^Til!KTt7n!x3V3vPKg}A+eF?u?@Mx4HWZ~k8U?%Vz* z8ik1+JViU*?G^K^8!Sr~+uLzvjp!f7?Z0S|J6!=31YFmIzmtCLjU^lr_lYuOc9s9G z_J$u9_9UzkF*8;=&(%sdH`C<)QY;#WEx-o5DtLv|JY_V>|Gxg>U|gmt`sH!>^!TV$ zrw&`z`f}w2c6>S>`>`s$F8G{r2-fgCvouIaRSDc@nICLAVtRxm%(|S;B!@AS2mVr> zYXnwra(14ZR<2>Hd-rfemOiPjFzd`&wa)j|hGDE$c)IP76~E2Xh@RkezIwdFwLA;f zvYGWm){Ag2DdFkv;MUz9Z*%s}5+wqg*0GP-{prei{#dDbw$i+nY{C7&y)su@ocqb+ zfE9yG8`&z@mfVvKg*UggfX%Vm2gXTKUSUaMbu;Kl;Qm!F*z+{SP!wOtLz}BO+7?X2 z3B5ul?>PPZ_*%mZI`cUPFU;~xkzwO&J2Tmr_AeN}uYn9Bl3|yRpS!=cErU%3qa)tS z58)f0n(B0DB~?3NzMRL5&J>M~kBy#EQ`P1NXR^lLEy!A!k9x&?tY9_Wsl+hVpFk$I zaSk^xX6nrE4GcQ-HETPRxjWzXJ}-D?v2=8g&l4khB$w=m-F0RNKk>3EG5i$VSsPlI zVA=EINvCskCn3}Q`|iU;%NJ=p(!(VVx?^t~(%Yk63YXsanvUQIYXRTg&Kk>iXAbMV zU5%{Sh0jTT6&luLZy!*d^ zP~1m@l-S;FW^{==9;2qHw%|KDfyj1zxh-EKu)Y@Fe#tj1!iC6N4#zqt60Tea7&7nS!uHv3&g+ zP1aRWrQA6hZ!uHzUCXR$lxw5BUaNNH03L-rIt@6tkr!q;g`Zui5bue4Wu={MymAv2 zh^nmIM*ErB*Z-uvGGbMQcUf?1bcB5*`H`LvHLj)Fnf~gYkSz5$v)-mF?nOR-lfoQd&k0TqJ zaAcqc>!b9TM&2$L% z`_VoY-tM^w^j$-Zq;_hTVd8^eXs}cML?s1A(PruPm7}oo6OZi^mHJZw50oKdD?j~g zz3=gR{Xt!Y2fLT1R`8sC>#msIkgm7g2R+_yWP_jStU5=vYgf|!!eh#|7yMx}d&k)H z?DoRL7JtF631Bci%j59$go?M6zlRTZh8UvGFX#lvZcob&tFdhfI*{@+NT(*V(&GM_ zo-H4LDs#StJeD@|9Yyu*o7akPb5b{#r8RG_ntj1fkr&k(y&z5Y#~nU-lhou80cnil zm|;k3PapO>Xivx%*nIGP1^=~ALHdMtX@`g-BC!v{14gTf>}8}@{a7ffi$`8G8?F7c z@FqvIGwVjeJv4{@k!XLm$8e~LBIq{gu$n)Z=^I82VPsmy)l+0S|?i&&5Hv%ts zf}sJ}hT}lV`sX#DjbAR*U~UsdkO&Af=D_aYEaTB@7cJws?N#&w_AvfKMY8TkOw=n~ zeaA4Dkk(Lm|M9F^f&!k0*r*2`z=d|i)fNhlOt8UD)=SQ1_BVW4$uskm_&eEb9c=;* z%`D~HYQOfR%tcMemx$-}i4&EiLEc{0OGk{Igp5P3>439aJ3*l_mVNO_8mG(EJrm=X zh;kl_noV){bYqU$lC?RcgL!X>Jf~(FnjyT@dCsrM8$QLYluowljPwZhnKw+8VF^{K z6=rH*h&_9Nbxo&sD^S=K$wHC+aLA#?6QpAl7*LgLXZZ5)`uD7e)`W$_&c3Ow+w>6X z`?|`f70b^+;=K^CJO?GhP#3e-U|LNMGh7@5&z$pRNPuG@W@te#r6!o^lPjn;^@1i-teg6WZ06BuM$iI4jh@a6`?#Mt)NXgx?R|*B8{H58PdO(RFugD`Kk5(XX$*%K(+c* zXqNrhRaFjsjWrDk#LqSOMXq}KH5RgEkLKH_ZqnB<41uboP}ubL@;W#gDJdR$W{&D5+WhG@K3^i$d!1o^ zuR7G7CUv8)>VzK_=aoGNzWL#0FFl|dA5;Doxq1Aso2Qa+5%_mz!<+%&T}0RmPu9rM zhW;ATQ=Uz9tHb_WP$c?JWtGVAJIVX{bAn`&BeiIcDodsQ}kVJ*X@$J@uk23tF zw8d?RDm|$z?(;y023d6q8O`x!l|p&}sFiboT^1PQeBnFF>65 zUq;0IXN6m1gJ+s$*|>+pQ~e&p86{0_OQ<2&7B1J}0zXXM)>t9)R;F2xQzjOIAfJg; z%yrwex_65ydxf$>R&~}WlBV@bvU7!r+*HZ;0{elK?5Zs$&3x)4OX!az=dhUGVJ&?p zDXWIy*8`5#yOAQXn>aVOOfmNXCV7J{ZfR=8h`ro}W6H4;H$JG;(q#b^ z;0M0)m-1KpQfF@{toBfD_H>36+>E~?y?x0WsUvwmP49k7zH{0>IwQ!>wi%CUXxfx0 zK@?T+K}|1*S-1-~cxuXElwZZsjwZHB!i>ruX_OX)dddkH<$MP@FwxH|WfkP6@zcj& zxi;)>qvW|%XRn@x1=dHc_Iw!$30qx$G9q1a>3d>Y}tr%nIFD<>) zvX5h{aZSSIfmNe&J&JQHgtYzgz8|_?C1#=tG(lfv%7>JoJ7wx%D zm0=(1YiOsrLGzK<#Eph}p8w+Qt}i*fJ(-0H^v)kM%PQR@m^o91f*HOF{-Gel+?;2<~?|{)^EJCy?VP{v@Kk8U?xCXyB0c34V8YdL+X%-leNoX zDFfyCG`Dh28#zH+NblHkN~5`KqyEzJ2Wm#6Wv&K8-0kMvW8dx@H|4CwwB=;)-8UOG zlwUBPv5zh;lF3?5tG9;sy~^}CPrWB`N7LAD_?lCZ+x{o%%9PY4lnuzRx9EoV`v)nuUjVOx}3dLrhyVFn|JuS8f1vZxtrs`gG5^5#aDfI@mN0ilE>)Fnqs{r)} zQn6hcP3e^(wL-6*rYosl_&TkoA(pV25GjpR^%CB%j4dXwP5Ca#Hb@laNsw5!5JNs! zR=;Zd6*{YEE}A=8(4qQ3`QivQsH}kyXe#4oWwV=}=#(X}!rn#oj{P)g7D_+iZ>W?N)V0}Ka!u*$gM>hPh3gj`x_i?;}(QDRK!cIMeO zouSWp##raDRZ{XE%W=c!Cr*W!#z}r@#WOKofeCpS)u!tGHQnIjBTD&dW6%L~h=kHB z4N&3M&n%b>4X1ZM%%RrC1YP(PB`ecqvEddmP^vg30WDw`8q%kHKWVpw3;&rW^g`vM zwcY@Cfv(oG5UOc+3{ z)s;LG@i#|PTk>chfoSb2@n13d=eKtA?OyM7tgor-gcsVA)qIRxZI=Hcb^!pO{Y#5` zOh`KOo%*jY{!#hoSPTLm3`^f4H%GxuNmv%}Vv6!#FlyJlQaofM#^xfRr3(r{#HWZ% zh+i4>-$)~q2s=^@S)LDWzYKV=Q$-lq?tx!yPdpO3D+?>I1OS9T4xj?l78b0FHN_;2 zl`a5kK5hz-(!cC~3bX!8Ir?`E|JjJVNp3z*YOfLL7y8t`_}AIt-<|!>L&m>55M;V~ zIKRBYPr3!{b_Mb*WT%ZVJJ(>Vx{GLa)8o7Q!K|YuT^r)c%~;ass4XIr;4Jf4f2-w2 zol#N$B)PF=NO06|eYSSrD7hKWQPM!~zNyOmk1ApUi7*c<)#shHtzBq6Tv0P#ZEBpo z1ux-E8yOo)2JX8-_)?zg{ggsAGi3;H^BL}xMS~yP!uOOcn)>hK2th800}=j;D9&WE zC6l;Ni&p6=}^rZXFo^i29| z79|ILHlM_w6&2Ru9|V?O&p`rHa1Bj{V-T*wP%*PCBw#!PGGcJ=PLb>VM@g=6^@N{p z!>ha_VUtmXrEV!kD{fiAFQ>RUCCn@KGUSKjOI@K0j=|1+4Dosrq`2}dz)T$9dwbBm z%|1=TT=9)-WZ?dj;YL7f~Zd;J!>I8JcXb!AInh#KmkYJYv6|{eaLc7OQ887WRo(JJL*xKHIq6@{}iRR{;2`_3Z&B9zEUN^+;a+7CUTK%$m(l2H^OA+JK_zaX7E^5Y(Z)^3%DBtG-yzC> zjXrhdEPzz-P6lsXV~Ckq1xKZ@Aag#0EsJQUyPE;mAZU397aGIe?Wtu3+>Z4PjEVXN zO5K~{usuC{?|lzu*1Ss5V*J4>Rb;mx%GA)!=p$i?w)$nVR-U0#zJh%(GfwbvE+61Oa44m`oKZN3( zRZV#k5L3^m=xogu{56`TMa)=1o=Kj{bVX!XPlvzkk2w4$t?V>`4#O#~nr7ZPo$TiE zoW=XRzSi@HVEv*>S4#RQDZ^=%y`J~jVYc4-q@7p4mse81km;Oky6dQ&`9CXrtb@~l zHh=7W&1#e4)Ci@XmKT<3f`he>{HhshDXm$AIzA^;X!zQm(`<@J7tyqO!7riQ)3UG1 zSr>#9sR}86JoNQJrBhMZ)g*583Efu|SaxuNFiG-j7xVT^482LV3<|hEc_2Cv`AUmlnUH>DK?B#)m~F{X*Sqa|QY2O`LMX@c z360K_sqYDh{zl$mW20DTsGaUK7Ud@gqE+uctu=W*Q7@O2eN{$kS=+jz%W$?Of`w!M zxywM`*6#ve#zBV~T!6o`Pi9ya8d{c|mDBK0H}@0aoqsUYlB(*XwkECBa&RumsbavQ zt`I)v{6h1lXPxWZe_M`_S@Z(Z#tOx8S12#+8M*SC5=n@+H;@sEF(Vh8I#*e zga#}?kY!wEoLmp zVX+g&_?pWZ86?<{F*!LA>hnyIeTn#u2zHKZ9TTBuT7A-a4tT_R-4RQc=$a#Dh&v1i z55|iw#I}u&fRh}s?*(?By+$!L$P3~ATANQF!8p0>*#<(A^`Aagg+f&ESvS?sfaP-5$vTL#*d}I7p1)i&RSjADkdA(gl4cd?aR}q8 zZ6|ul1{yN}06~4joi=5s=qnq!qj^(8tR%LS|6S7=WgM2t8j7OD8K~g4g22)79*vud z18w?Z%`USF=>QGxD3kxr|8<<04~o%#hlY$!hT<1KXN*ZiQ>#v{@8)aSXvujmLw*|$ULbA0&xxLw*{Jlv>28>Ab5 z(6D}1Tv_pFG!^3G*Z)bnd6%L+>qw@ZkSX|774p6I6^N2@YmRv|1T>)}=a0Vxz&?SB zWrhsUO|EZ6HE4LkQ_k~hBJfeb?T@Xf=3SnFZ}7SChEWe5+fr?LA?ryHSiLlr$yU#F zfs|+fP3_3~w&z(v;0?g)<1q%148vpRmBa@fouxXd5~}|k?y>4T{-Q@sW}qfzN4$DiPf^& z8KhX47)KvYjKAJ9VgNkg1$k>1b07CMMATYtR{`OorjGorIvwlHknrZ?0K!M&)rN!I zCekpZYnO1)5eWNYI97@6o!1@vYBj8o^E~s+wa;g-;i3ZP358M)$%w~ksj2#$uMj0) zo_I@{w3EuBm8hrMtlY?d{+Fx&TeZ~HvGXhQ#i + + + + + Azure Storage JavaScript Client Library Sample for Blob Operations + + + +
+
+

Azure Storage JavaScript Client Library Sample for Blob Operations

+
+

In this sample, we will demonstrate common scenarios for Azure Blob Storage that includes creating, listing and deleting containers and blobs.

+
+

Azure Blob storage is a service for storing large amounts of unstructured object data, such as text or binary data, that can be accessed from anywhere in the world via HTTP or HTTPS. You can use Blob storage to expose data publicly to the world, or to store application data privately.

+ +
+
+ Note: You may need set up a HTTP server to host this sample for IE browser, because IndexedDB is only available on websites with http or https URL schemes in IE. Azure Storage JavaScript Client Library currently depends on IndexedDB. +
+
+ +

Contents:

+ + +

Step 1: Preparing an Azure Storage account with CORS rules set

+

Cross-origin resource sharing, or CORS, must be configured on the Azure Storage account to be accessed directly from JavaScript in the browser. + You are able to set the CORS rules for specific Azure Storage account on the Azure Portal. + The "Allowed origins" could be set to "*" to allow all the origins in this sample. + For more information about CORS, see Cross-Origin Resource Sharing (CORS).

+ + +

Step 2: Importing Azure Storage JavaScript Client Library

+

+ Importing azure-storage.common.js and azure-storage.blob.js in your HTML file for blob operations, and make sure azure-storage.common.js is in front of azure-storage.blob.js. +

+

+<script src="azure-storage.common.js"></script>
+<script src="azure-storage.blob.js"></script>
+
+ +

Step 3: Creating an Azure Storage Blob Service Object

+

+ The BlobService object lets you work with containers and blobs. + Following code creates a BlobService object with storage account and SAS Token. +

+
+var blobUri = 'https://' + 'STORAGE_ACCOUNT' + '.blob.core.windows.net';
+var blobService = AzureStorage.createBlobServiceWithSas(blobUri, 'SAS_TOKEN');
+
+

+ In Azure Storage JavaScript Client Library, a global variable AzureStorage is the start point where we can create service objects for blob/table/queue/file and access to the storage utilities. +

+
+
+ How to get full detailed API definitions? Currently, the JavaScript Client Library shares the same API definitions with Node.js SDK. + Please check API details on Azure Storage Node.js API reference documents. The JavaScript global variable AzureStorage is just like the object require('azure-storage') returns in Node.js. +
+
+
+
+ Warning: Azure Storage JavaScript Client Library also supports creating BlobService based on Storage Account Key for authentication besides SAS Token. + However, for security concerns, we recommend use of a limited time SAS Token, generated by a backend web server using a Stored Access Policy. +
+
+ +

Step 4: Container Operations

+

+ A container provides a grouping of blobs. All blobs must be in a container. An account can contain an unlimited number of containers. A container can store an unlimited number of blobs. Note that the container name must be lowercase. + BlobService object provides plenty of interfaces for container operations. +

+ +

List Containers

+

BlobService provides listContainersSegmented and listContainersSegmentedWithPrefix for retrieving the containers list under a storage account.

+
+blobService.listContainersSegmented(null, function (error, results) {
+    if (error) {
+        // List container error
+    } else {
+        for (var i = 0, container; container = results.entries[i]; i++) {
+            // Deal with container object
+        }
+    }
+});
+
+ +

Create Container

+

BlobService provides createContainer and createContainerIfNotExists for creating a container under a storage account.

+
+blobService.createContainerIfNotExists('mycontainer', function(error, result) {
+    if (error) {
+        // Create container error
+    } else {
+        // Create container successfully
+    }
+});
+
+ +

Delete Container

+

BlobService provides deleteContainer and deleteContainerIfExists for deleting a container under a storage account.

+
+blobService.deleteContainerIfExists('mycontainer', function(error, result) {
+    if (error) {
+        // Delete container error
+    } else {
+        // Delete container successfully
+    }
+});
+
+ +

Executable Example

+

+ The sample will try to create an Azure Storage blob service object based on SAS Token authorization. + Enter your Azure Storage account name and SAS Token here, and executable examples in following steps dependent on the settings here. + Make sure you have set the CORS rules for the Azure Storage blob service, and the SAS Token is in valid period. +

+

+ + +

+

In the following executable example, you can try to list all the containers under your storage account settings, and try to create or delete one container from your account.

+
    +
  • Click button to view the container list under your Azure Storage account

  • +
  • +

    Click button to create a container under your Azure Storage account:

    +

    +
  • +
  • Click "Delete" button in the container list to delete the container under your Azure Storage account

  • +
  • Click "Select" button to select and operate with the blobs in next step

  • +
+
+ +

Step 5: Blob Operations

+

Blob: A file of any type and size. Azure Storage offers three types of blobs: block blobs, page blobs, and append blobs.

+

Block blobs are ideal for storing text or binary files, such as documents and media files. Append blobs are similar to block blobs in that they are made up of blocks, but they are optimized for append operations, so they are useful for logging scenarios. A single block blob can contain up to 50,000 blocks of up to 100 MB each, for a total size of slightly more than 4.75 TB (100 MB X 50,000). A single append blob can contain up to 50,000 blocks of up to 4 MB each, for a total size of slightly more than 195 GB (4 MB X 50,000).

+

Page blobs can be up to 1 TB in size, and are more efficient for frequent read/write operations. Azure Virtual Machines use page blobs as OS and data disks.

+

For details about naming containers and blobs, see Naming and Referencing Containers, Blobs, and Metadata.

+ +

List Blobs

+

BlobService provides listBlobsSegmented and listBlobsSegmentedWithPrefix for retrieving the blobs list under a container.

+
+blobService.listBlobsSegmented('mycontainer', null, function (error, results) {
+    if (error) {
+        // List blobs error
+    } else {
+        for (var i = 0, blob; blob = results.entries[i]; i++) {
+            // Deal with blob object
+        }
+    }
+});
+
+

Upload Blob

+

BlobService provides uploadBlobByStream for uploading a blob from stream. + However, currently uploadBlobByStream only accepts a Node.js ReadableStream type which pure JavaScript doesn't provide. +

+ +
+
+ Note: After importing azure-storage.common.js, you could require 3 Node.js types: stream, util and buffer, then wrap a ReadableStream based on HTML5 FileReader. +
+
+
+// Provides a Stream for a file in webpage, inheriting from NodeJS Readable stream.
+var Buffer = require('buffer').Buffer;
+var Stream = require('stream');
+var util = require('util');
+
+function FileStream(file, opt) {
+    Stream.Readable.call(this, opt);
+
+    this.fileReader = new FileReader(file);
+    this.file = file;
+    this.size = file.size;
+    this.chunkSize = 1024 * 1024 * 4; // 4MB
+    this.offset = 0;
+    var _me = this;
+    
+    this.fileReader.onloadend = function loaded(event) {
+        var data = event.target.result;
+        var buf = Buffer.from(data);
+        _me.push(buf);
+    }
+}
+util.inherits(FileStream, Stream.Readable);
+FileStream.prototype._read = function() {
+    if (this.offset > this.size) {
+        this.push(null);
+    } else {
+        var end = this.offset + this.chunkSize;
+        var slice = this.file.slice(this.offset, end);
+        this.fileReader.readAsArrayBuffer(slice);
+        this.offset = end;
+    }
+};
+
+

Uploading blob from stream. You can set up the blob name as well as the size of this uploading session.

+
+// If one file has been selected in the HTML file input element
+var files = document.getElementById('fileinput').files;
+var file = files[0];
+var fileStream = new FileStream(file);
+
+blobService.createBlockBlobFromStream('mycontainer', file.name, fileStream, file.size, {}, function(error, result, response) {
+    if (error) {
+        // Upload blob failed
+    } else {
+        // Upload successfully
+    }
+});
+
+ +

Download Blob

+

+ BlobService provides interfaces for downloading a blob into browser memory. + Because of browser's sandbox limitation, we cannot save the downloaded data trunks into disk until we get all the data trunks of a blob into browser memory. + The browser's memory size is also limited especially for downloading huge blobs, so it's recommended to download a blob in browser with SAS Token authorized link directly. +

+

+ Shared access signatures (SAS) are a secure way to provide granular access to blobs and containers without providing your storage account name or keys. Shared access signatures are often used to provide limited access to your data, such as allowing a mobile app to access blobs. + The following code example generates a new shared access policy that allows the shared access signatures holder to perform read operations on the myblob blob, and expires 100 minutes after the time it is created. +

+
+
+ Note: You can choose to use the SAS Token in browser side, or generate a temporary SAS Token dynamically in your server side with Azure Storage C# or Node.js SDKs etc. according to your security requirements. +
+
+
+var downloadLink = blobService.getUrl('mycontainer', 'myblob', 'SAS_TOKEN');
+
+ +

Delete Blob

+

BlobService provides deleteContainer and deleteContainerIfExists for deleting a blob under a storage account.

+
+blobService.deleteBlobIfExists(container, blob, function(error, result) {
+    if (error) {
+        // Delete blob failed
+    } else {
+        // Delete blob successfully
+    }
+});
+
+

Executable Example

+

After clicked the "Select" button on the container list in last step, you are able to operate with the blobs under the selected container.

+ +
    +
  • Click button to view the blobs under your selected container

  • +
  • + Click button to upload a local file to current container after selecting a file: +

    +

  • +
  • Click "Delete" button to delete the blob

  • +
  • Click "Download" link to download a blob to local

  • +
+ +
+
Uploaded Bytes:
+
+
+ 0% +
+
+ +
+ +

Step 6: Creating your JavaScript Application based on Azure Storage JavaScript Client Library

+
    +
  • 1. Setting CORS rules for your selected Azure-Storage account blob service.
  • +
  • 2. Including "azure-storage.common.js" in the html file.
  • +
  • 3. Including functional file(s) needed, such as "azure-storage.blob.js" for blob operation.
  • +
  • 4. Using keyword "AzureStorage" to access to Azure storage JavaScript APIs.
  • +
  • 5. Referring to Azure Storage Node.js SDK documents for detailed API definitions which keep same with JavaScript APIs.
  • +
+

You can view the source code of this sample for detailed reference.

+
+ + + + + + + + + + \ No newline at end of file diff --git a/browser/samples/sample-file.html b/browser/samples/sample-file.html new file mode 100644 index 0000000..de3ae1a --- /dev/null +++ b/browser/samples/sample-file.html @@ -0,0 +1,646 @@ + + + + + + Azure Storage JavaScript Client Library Sample for File Operations + + + +
+
+

Azure Storage JavaScript Client Library Sample for File Operations

+
+

In this sample, we will demonstrate common scenarios for Azure File Storage that includes creating, listing and deleting file shares, directories and files.

+
+

Azure File storage is a service for storing large amounts of unstructured object data, such as text or binary data, that can be accessed from anywhere in the world via HTTP or HTTPS. You can use file storage to expose data publicly to the world, or to store application data privately.

+

With Azure File storage, you can migrate legacy applications that rely on file shares to Azure quickly and without costly rewrites. Applications running in Azure virtual machines or cloud services or from on-premises clients can mount a file share in the cloud, just as a desktop application mounts a typical SMB share. Any number of application components can then mount and access the File storage share simultaneously. In this sample, you are able to create a file service with storage account and SAS Token. Based on the file service, you could create a file share, list files, upload files and delete files.

+ +
+
+ Note: You may need set up a HTTP server to host this sample for IE browser, because IndexedDB is only available on websites with http or https URL schemes in IE. Azure Storage JavaScript Client Library currently depends on IndexedDB. +
+
+ +

Contents:

+ + +

Step 1: Preparing an Azure Storage account with CORS rules set

+

Cross-origin resource sharing, or CORS, must be configured on the Azure Storage account to be accessed directly from JavaScript in the browser. + You are able to set the CORS rules for specific Azure Storage account on the Azure Portal. + The "Allowed origins" could be set to "*" to allow all the origins in this sample. + For more information about CORS, see Cross-Origin Resource Sharing (CORS).

+ + +

Step 2: Importing Azure Storage JavaScript Files

+

+ Importing azure-storage.common.js and azure-storage.file.js in your HTML file for file operations, and make sure azure-storage.common.js is in front of azure-storage.file.js. +

+

+<script src="azure-storage.common.js"></script>
+<script src="azure-storage.file.js"></script>
+
+ +

Step 3: Creating an Azure Storage File Service Object

+

+ The FileService object lets you work with files and directories. + Following code creates a FileService object with storage account and SAS Token. +

+
+var fileUri = 'https://' + 'STORAGE_ACCOUNT' + '.file.core.windows.net';
+var fileService = AzureStorage.createFileServiceWithSas(fileUri, 'SAS_TOKEN');
+
+

+ In Azure Storage JavaScript Client Library, a global variable AzureStorage is the start point where we can create service objects for blob/table/queue/file and access to the storage utilities. +

+
+
+ How to get full detailed API definitions? Currently, the JavaScript Client Library shares the same API definitions with Node.js SDK. + Please check API details on Azure Storage Node.js API reference documents. The JavaScript global variable AzureStorage is just like the object require('azure-storage') returns in Node.js. +
+
+
+
+ Warning: Azure Storage JavaScript Client Library also supports creating FileService based on Storage Account Key for authentication besides SAS Token. + However, for security concerns, we recommend use of a limited time SAS Token, generated by a backend web server using a Stored Access Policy. +
+
+ +

Step 4: File Share Operations

+

Share: A File storage share is an SMB file share in Azure. All directories and files must be created in a parent share. An account can contain an unlimited number of shares, and a share can store an unlimited number of files, up to the 5 TB total capacity of the file share.

+ +

List File Shares

+

FileService provides listSharesSegmented and listSharesSegmentedWithPrefix for listing file shares under a storage account.

+
+fileService.listSharesSegmented(null, function(error, result) {
+    if(error) {
+        // List shares error
+    } else {
+        for (var i = 0, share; share = results.entries[i]; i++) {
+            // Deal with share object
+        }
+    }
+});
+
+ +

Create File Share

+

FileService provides createShare and createShareIfNotExists for creating a file share under a storage account.

+
+fileService.createShareIfNotExists('myshare', function(error, result) {
+    if(error) {
+        // Create share error
+    } else {
+        // Create share successfully
+    }
+});
+
+ +

Delete File Share

+

FileService provides deleteShare and deleteShareIfExists for deleting a file share under a storage account.

+
+fileService.deleteShareIfExists('myshare', function(error, result) {
+    if(error) {
+        // Delete share error
+    } else {
+        // Delete share successfully
+    }
+});
+
+ +

Executable Example

+

The sample will try to create an Azure Storage file service object based on SAS Token authorization. Enter your Azure Storage account name and SAS Token here. Make sure you have set the CORS rules for the Azure Storage file service, and the SAS Token is in valid period.

+ + +

Azure Storage file service provides plenty of interfaces for file operations. In following example, you can try to list all the file shares under your storage account, and try to create or delete one file share from your account.

+
    +
  • Click button to view the file share list under your Azure Storage account

  • +
  • +

    Click button to create a file share under your Azure Storage account

    +

    +
  • +
  • Click "Delete" button to delete the file share under your Azure Storage account

  • +
  • Click "Select" button to operate with the directories and files in next step

  • +
+
+ +

Step 5: Directory and File Operations

+

Directory in storage is an optional hierarchy of directories.

+

File: A file in the share. A file may be up to 1 TB in size.

+ +

List Files and Directories

+

FileService provides listFilesAndDirectoriesSegmented for listing directories and files under a file share.

+
+fileService.listFilesAndDirectoriesSegmented('myfileshare', '', null, function(error, result, response) {
+    if(error) {
+        // List table entity error
+    } else {
+        for (var i = 0, dir; dir = results.entries.directories[i]; i++) {
+            // Deal with directory object
+        }
+        for (var i = 0, file; file = results.entries.files[i]; i++) {
+            // Deal with file object
+        }
+    }
+});
+
+ +

Create Directory

+

FileService provides createDirectory and createDirectoryIfNotExists for creating directories under a file share.

+
+fileService.createDirectoryIfNotExists('myfileshare', 'mydirectory', function(error, result, response) {
+    if(error) {
+        // Create directory error
+    } else {
+        // Create directory successfully
+    }
+});
+
+ +

Delete Directory

+

FileService provides deleteDirectory and deleteDirectoryIfExists for deleting directories under a file share.

+
+fileService.deleteDirectoryIfExists('myfileshare', 'mydirectory', function(error, result, response) {
+    if(error) {
+        // Delete directory error
+    } else {
+        // Delete directory successfully
+    }
+});
+
+ +

Upload File

+

FileService provides createFileFromStream for uploading a file from stream. + However, currently createFileFromStream only accepts a Node.js ReadableStream type which pure JavaScript doesn't provide. +

+
+
+ Note: After importing azure-storage.common.js, you could require 3 Node.js types: stream, util and buffer, then wrap a ReadableStream based on HTML5 FileReader. +
+
+
+// Provides a Stream for a file in webpage, inheriting from NodeJS Readable stream.
+var Stream = require('stream');
+var util = require('util');
+var Buffer = require('buffer').Buffer;
+
+function FileStream(file, opt) {
+    Stream.Readable.call(this, opt);
+
+    this.fileReader = new FileReader(file);
+    this.file = file;
+    this.size = file.size;
+    this.chunkSize = 1024 * 1024 * 4; // 4MB
+    this.offset = 0;
+    var _me = this;
+    
+    this.fileReader.onloadend = function loaded(event) {
+        var data = event.target.result;
+        var buf = Buffer.from(data);
+        _me.push(buf);
+    }
+}
+util.inherits(FileStream, Stream.Readable);
+FileStream.prototype._read = function() {
+    if (this.offset > this.size) {
+        this.push(null);
+    } else {
+        var end = this.offset + this.chunkSize;
+        var slice = this.file.slice(this.offset, end);
+        this.fileReader.readAsArrayBuffer(slice);
+        this.offset = end;
+    }
+};
+
+

Uploading file from stream. You can set up the file name as well as the size of this uploading session.

+
+// If one file has been selected in the HTML file input element
+var files = document.getElementById('fileinput').files;
+var file = files[0];
+var fileStream = new FileStream(file);
+
+fileService.createFileFromStream('myfileshare', 'mydirectory', file.name, fileStream, file.size, {}, function(error, result, response) {
+    if (error) {
+        // Upload file failed
+    } else {
+        // Upload successfully
+    }
+});
+
+ +

Download File

+

+ FileService provides interfaces for downloading a file into browser memory directly. + Because of browser's sandbox limitation, we cannot save the downloaded data trunks into disk until we get all the data trunks of a file into browser memory. + The browser's memory size is also limited especially for downloading huge files, so it's recommended to download a file in browser with SAS Token authorized link directly. +

+

+ Shared access signatures (SAS) are a secure way to provide granular access to files and directories without providing your storage account name or keys. Shared access signatures are often used to provide limited access to your data, such as allowing a mobile app to access files. + The following code example generates a new shared access policy that allows the shared access signatures holder to perform read operations on the myfile file, and expires 100 minutes after the time it is created. +

+
+
+ Note: You can choose to use the SAS Token in browser side, or generate a temporary SAS Token dynamically in your server side with Azure Storage C# or Node.js SDKs etc. according to your security requirements. +
+
+
+var downloadLink = fileService.getUrl('myshare', 'mydirectory', 'myfile', 'SAS_TOKEN');
+
+ +

Delete File

+

FileService provides deleteFile and deleteFileIfExists for deleting files under a file share.

+
+fileService.deleteFileIfExists('myfileshare', 'mydirectory', 'myfile', function(error, result, response) {
+    if(error) {
+        // Delete file error
+    } else {
+        // Delete file successfully
+    }
+});
+
+ +

Executable Example

+

After clicked the "Select" button on the file share list, you are able to operate with the directories and files under the selected file share.

+

    +
  • Click button to view the directories and files under your selected file share

  • +
  • Click button to create a directory share under your current directory

    +

    +
  • +
  • Click button to return to upper level directory

  • +
  • Click button to upload a local file to current directory

    +

  • +
  • Click "Delete" button to delete the directory or file

  • +
  • Click "Download" link to download a file to local

  • +
+ + Current Path: +
Uploaded Bytes:
+
+
+ 0% +
+
+
+ +

Step 6: Creating your JavaScript Application based on Azure Storage JavaScript Client Library

+
    +
  • 1. Setting CORS rules for your selected Azure-Storage account file service.
  • +
  • 2. Including "azure-storage.common.js" in the html file.
  • +
  • 3. Including functional file(s) needed, such as "azure-storage.file.js" for file operation.
  • +
  • 4. Using keyword "AzureStorage" to access to Azure storage JavaScript APIs.
  • +
  • 5. Referring to Azure Storage Node.js SDK documents for detailed API definitions which keep same with JavaScript APIs.
  • +
+

You can view the source code of this sample for detailed reference.

+
+ + + + + + + + + \ No newline at end of file diff --git a/browser/samples/sample-queue.html b/browser/samples/sample-queue.html new file mode 100644 index 0000000..2e060e4 --- /dev/null +++ b/browser/samples/sample-queue.html @@ -0,0 +1,465 @@ + + + + + + Azure Storage JavaScript Client Library Sample for Queue Operations + + + +
+
+

Azure Storage JavaScript Client Library Sample for Queue Operations

+
+

In this sample, we will demonstrate common scenarios for Azure Queue Storage that includes creating, listing and deleting queues and messages.

+
+ +

Azure Storage queue service provides cloud messaging between application components. In designing applications for scale, application components are often decoupled, so that they can scale independently. Queue storage delivers asynchronous messaging for communication between application components, whether they are running in the cloud, on the desktop, on an on-premises server, or on a mobile device. Queue storage also supports managing asynchronous tasks and building process work flows. +

+ +
+
+ Note: You may need set up a HTTP server to host this sample for IE browser, because IndexedDB is only available on websites with http or https URL schemes in IE. Azure Storage JavaScript Client Library currently depends on IndexedDB. +
+
+ +

Contents:

+ + +

Step 1: Preparing an Azure Storage account with CORS rules set

+

Cross-origin resource sharing, or CORS, must be configured on the Azure Storage account to be accessed directly from JavaScript in the browser. + You are able to set the CORS rules for specific Azure Storage account on the Azure Portal. + The "Allowed origins" could be set to "*" to allow all the origins in this sample. + For more information about CORS, see Cross-Origin Resource Sharing (CORS).

+ + +

Step 2: Importing Azure Storage JavaScript Client Library

+

+ Importing azure-storage.common.js and azure-storage.queue.js in your HTML file for queue operations, and make sure azure-storage.common.js is in front of azure-storage.queue.js. +

+

+<script src="azure-storage.common.js"></script>
+<script src="azure-storage.queue.js"></script>
+
+ +

Step 3: Creating an Azure Storage Queue Service Object

+

+ The QueueService object lets you work with queues and messages. + Following code creates a QueueService object with storage account and SAS Token. +

+
+var queueUri = 'https://' + 'STORAGE_ACCOUNT' + '.queue.core.windows.net';
+var queueService = AzureStorage.createQueueServiceWithSas(queueUri, 'SAS_TOKEN');
+
+

+ In Azure Storage JavaScript Client Library, a global variable AzureStorage is the start point where we can create service objects for blob/table/queue/file and access to the storage utilities. +

+
+
+ How to get full detailed API definitions? Currently, the JavaScript Client Library shares the same API definitions with Node.js SDK. + Please check API details on Azure Storage Node.js API reference documents. The JavaScript global variable AzureStorage is just like the object require('azure-storage') returns in Node.js. +
+
+
+
+ Warning: Azure Storage JavaScript Client Library also supports creating QueueService based on Storage Account Key for authentication besides SAS Token. + However, for security concerns, we recommend use of a limited time SAS Token, generated by a backend web server using a Stored Access Policy. +
+
+ +

Step 4: Queue Operations

+

+ Azure Queue storage is a service for storing large numbers of messages that can be accessed from anywhere in the world via authenticated calls using HTTP or HTTPS. A single queue message can be up to 64 KB in size, and a queue can contain millions of messages, up to the total capacity limit of a storage account. +

+ +

List Queues

+

QueueService provides listQueuesSegmented and listQueuesSegmentedWithPrefix for retrieving the queue list under your storage account.

+
+queueService.listQueuesSegmented(null, function (error, results) {
+    if (error) {
+        // List queue error
+    } else {
+        for (var i = 0, queue; queue = results.entries[i]; i++) {
+            // Deal with queue object
+        }
+    }
+});
+
+ +

Create Queue

+

QueueService provides createQueue and createQueueIfNotExists for creating a queue under a storage account.

+
+queueService.createQueueIfNotExists('myqueue', function(error, result) {
+    if (error) {
+        // Create queue error
+    } else {
+        // Create queue successfully
+    }
+});
+
+ +

Delete Queue

+

QueueService provides deleteQueue and deleteQueueIfExists for deleting a queue under a storage account.

+
+queueService.deleteQueueIfExists('myqueue', function(error, result) {
+    if (error) {
+        // Delete queue error
+    } else {
+        // Delete queue successfully
+    }
+});
+
+ +

Executable Example

+

The sample will try to create an Azure Storage queue service object based on SAS Token authorization. Enter your Azure Storage account name and SAS Token here. Make sure you have set the CORS rules for the Azure Storage queue service, and the SAS Token is in valid period.

+ + +

Azure Storage queue service provides plenty of interfaces for queue operations. In following example, you can try to list all the queues under your storage account, and try to create or delete one queue from your account.

+
    +
  • Click button to view the queue list under your Azure Storage account

  • +
  • Click button to create a queue under your Azure Storage account:

    +

    +
  • +
  • Click "Delete" button to delete the queue under your Azure Storage account

  • +
  • Click "Select" button to select a queue and operate with the queue messages in next step

  • +
+
+ +

Step 5: Message Operations

+

A storage Message, in any format, of up to 64 KB. The maximum time that a message can remain in the queue is 7 days.

+
+
+ Note: Azure Storage JavaScript Client Library provides var encoder = new AzureStorage.QueueMessageEncoder.TextBase64QueueMessageEncoder() which is a Base64 encoder and docoder. + If a message content string is encoded with encoder.encode(), remember to decode it with encoder.decode() after peek the message. +
+
+ +

Peek Messages

+

QueueService provides peekMessage and peekMessages for retrieving the messages list under a queue.

+
+queueService.peekMessages('myqueue', {numOfMessages: 32}, function (error, results) {
+    if (error) {
+        // Peek messages error
+    } else {
+        for (var i = 0, message; message = results[i]; i++) {
+            // Deal with message object
+        }
+    }
+});
+
+ +

Create Message

+

QueueService provides createMessage for creating a new message to a queue.

+
+var encoder = new AzureStorage.QueueMessageEncoder.TextBase64QueueMessageEncoder();
+queueService.createMessage('myqueue', encoder.encode('mymessage'), function (error, results, response) {
+    if (error) {
+        // Create message error
+    } else {
+        // Create message successfully
+    }
+});
+
+ +

Dequeue Message

+

QueueService provides getMessages and deleteMessage for dequeuing next message in a queue.

+
+queueService.getMessages('myqueue', function(error, result, response) {
+  if(!error){
+    // Message text is in messages[0].messageText
+    var message = result[0];
+    queueService.deleteMessage('myqueue', message.messageId, message.popReceipt, function(error, response){
+      if(!error){
+        //message deleted
+      }
+    });
+  }
+});
+
+ +

Update Message

+

QueueService provides getMessages and updateMessage for updating next message in a queue.

+
+var encoder = new AzureStorage.QueueMessageEncoder.TextBase64QueueMessageEncoder();
+queueService.getMessages('myqueue', function(error, result, response) {
+  if(!error){
+    // Got the message
+    var message = result[0];
+    queueService.updateMessage('myqueue', message.messageId, message.popReceipt, 10, {messageText: encoder.encode('new text')}, function(error, result, response){
+      if(!error){
+        // Message updated successfully
+      }
+    });
+  }
+});
+
+ +

Executable Example

+

After clicked the "Select" button on the queue list in last step, you are able to operate with the queue messages under the selected queue.

+

    +
  • Click button to refresh the message list in your selected queue

  • +
  • Click button to create a message in your selected queue:

    +

    +
  • +
  • Click button to update the top queue message in your selected queue (Dequeued messages will be invisible for 30s by default.):

    +

    +
  • +
  • Click button to dequeue the top queue message in your selected queue:

  • +
+ +
+ +

Step 6: Creating your JavaScript Application based on Azure Storage JavaScript Client Library

+
    +
  • 1. Setting CORS rules for your selected Azure-Storage account queue service.
  • +
  • 2. Including "azure-storage.common.js" in the html file.
  • +
  • 3. Including functional file(s) needed, such as "azure-storage.queue.js" for queue operation.
  • +
  • 4. Using keyword "AzureStorage" to access to Azure storage JavaScript APIs.
  • +
  • 5. Referring to Azure Storage Node.js SDK documents for detailed API definitions which keep same with JavaScript APIs.
  • +
+

You can view the source code of this sample for detailed reference.

+
+ + + + + + + \ No newline at end of file diff --git a/browser/samples/sample-table.html b/browser/samples/sample-table.html new file mode 100644 index 0000000..d0737bd --- /dev/null +++ b/browser/samples/sample-table.html @@ -0,0 +1,440 @@ + + + + + + Azure Storage JavaScript Client Library Sample for Table Operations + + + +
+
+

Azure Storage JavaScript Client Library Sample for Table Operations

+
+

In this sample, we will demonstrate common scenarios for Azure Table Storage that includes creating, listing and deleting tables and entities.

+
+ +

Azure Storage table is a service that stores structured NoSQL data in the cloud. Table storage is a key/attribute store with a schemaless design. Because Table storage is schemaless, it's easy to adapt your data as the needs of your application evolve. Access to data is fast and cost-effective for all kinds of applications. Table storage is typically significantly lower in cost than traditional SQL for similar volumes of data. +

+ +
+
+ Note: You may need set up a HTTP server to host this sample for IE browser, because IndexedDB is only available on websites with http or https URL schemes in IE. Azure Storage JavaScript Client Library currently depends on IndexedDB. +
+
+ +

Contents:

+ + +

Step 1: Preparing an Azure Storage account with CORS rules set

+

Cross-origin resource sharing, or CORS, must be configured on the Azure Storage account to be accessed directly from JavaScript in the browser. + You are able to set the CORS rules for specific Azure Storage account on the Azure Portal. + The "Allowed origins" could be set to "*" to allow all the origins in this sample. + For more information about CORS, see Cross-Origin Resource Sharing (CORS).

+ + +

Step 2: Importing Azure Storage JavaScript Client Library

+

+ Importing azure-storage.common.js and azure-storage.table.js in your HTML file for table operations, and make sure azure-storage.common.js is in front of azure-storage.table.js. +

+

+<script src="azure-storage.common.js"></script>
+<script src="azure-storage.table.js"></script>
+
+ +

Step 3: Creating an Azure Storage Table Service Object

+

+ The TableService object lets you work with table and entities. + Following code creates a TableService object with storage account and SAS Token. +

+
+var tableUri = 'https://' + 'STORAGE_ACCOUNT' + '.table.core.windows.net';
+var tableService = AzureStorage.createTableServiceWithSas(tableUri, 'SAS_TOKEN');
+
+

+ In Azure Storage JavaScript Client Library, a global variable AzureStorage is the start point where we can create service objects for blob/table/queue/file and access to the storage utilities. +

+
+
+ How to get full detailed API definitions? Currently, the JavaScript Client Library shares the same API definitions with Node.js SDK. + Please check API details on Azure Storage Node.js API reference documents. The JavaScript global variable AzureStorage is just like the object require('azure-storage') returns in Node.js. +
+
+
+
+ Warning: Azure Storage JavaScript Client Library also supports creating TableService based on Storage Account Key for authentication besides SAS Token. + However, for security concerns, we recommend use of a limited time SAS Token, generated by a backend web server using a Stored Access Policy. +
+
+ +

Step 4: Table Operations

+

+ Table: A table is a collection of entities. Tables don't enforce a schema on entities, which means a single table can contain entities that have different sets of properties. The number of tables that a storage account can contain is limited only by the storage account capacity limit. +

+ +

List Tables

+

TableService provides listTablesSegmented and listTablesSegmentedWithPrefix for retrieving the table list under your storage account.

+
+tableService.listTablesSegmented(null, {maxResults : 200}, function (error, results) {
+    if (error) {
+        // List tables error
+    } else {
+        for (var i = 0, table; table = results.entries[i]; i++) {
+            // Deal with table object
+        }
+    }
+});
+
+ +

Create Table

+

TableService provides createTable and createTableIfNotExists for creating a table under a storage account.

+
+tableService.createTableIfNotExists('mytable', function(error, result) {
+    if (error) {
+        // Create table error
+    } else {
+        // Create table successfully
+    }
+});
+
+ +

Delete Table

+

TableService provides deleteTable and deleteTableIfExists for deleting a table under a storage account.

+
+tableService.deleteTableIfExists('mytable', function(error, result) {
+    if (error) {
+        // Delete table error
+    } else {
+        // Delete table successfully
+    }
+});
+
+ +

Executable Example

+

The sample will try to create an Azure Storage table service object based on SAS Token authorization. Enter your Azure Storage account name and SAS Token here. Make sure you have set the CORS rules for the Azure Storage table service, and the SAS Token is in valid period.

+ + +

Azure Storage table service provides plenty of interfaces for table operations. In following example, you can try to list all the tables under your storage account, and try to create or delete one table from your account.

+
    +
  • Click button to view the table list under your Azure Storage account

  • +
  • Click button to create a table under your Azure Storage account

    +

    +
  • +
  • Click "Delete" button to delete the table under your Azure Storage account

  • +
  • Click "Select" button to operate with the table entities in next step

  • +
+
+ +

Step 5: Table Entities Operations

+

Entity: An entity is a set of properties, similar to a database row. An entity can be up to 1MB in size.

+

Properties: A property is a name-value pair. Each entity can include up to 252 properties to store data. Each entity also has 3 system properties that specify a partition key, a row key, and a timestamp. Entities with the same partition key can be queried more quickly, and inserted/updated in atomic operations. An entity's row key is its unique identifier within a partition.

+ +

Query Entities

+

TableService provides queryEntities for querying a table under a storage account.

+
+var tableQuery = new AzureStorage.TableQuery().top(200);
+tableService.queryEntities('mytable', tableQuery, null, function(error, result) {
+    if (error) {
+        // Query entities error
+    } else {
+        for (var i = 0, entity; entity = results.entries[i]; i++) {
+            // Deal with entity object
+        }
+    }
+});
+
+ +

Insert or Replace Entity

+

TableService provides insertEntity, insertOrReplaceEntity and insertOrMergeEntity for adding a table entity under a storage account.

+
+var insertEntity = {
+    PartitionKey: {'_': 'partitionKey'},
+    RowKey: {'_': 'rowKey'}
+};
+
+tableService.insertOrReplaceEntity('mytable', insertEntity, function(error, result, response) {
+    if(error) {
+        // Insert table entity error
+    } else {
+        // Insert table entity successfully
+    }
+});
+
+ +

Delete Entity

+

TableService provides deleteEntity for deleting a table entity under a storage account.

+
+var deleteEntity = {
+    PartitionKey: {'_': 'partitionKey'},
+    RowKey: {'_': 'rowKey'}
+};
+
+tableService.deleteEntity('mytable', deleteEntity, function(error, result, response) {
+    if(error) {
+        // Delete table entity error
+    } else {
+        // Delete table entity successfully
+    }
+});
+
+ +

Executable Example

+

After clicked the "Select" button on the table list, you are able to operate with the table entities under the selected table.

+

    +
  • Click button to refresh the entity list in your selected table

  • +
  • Click button to create an entity in your selected table. If existing entity with the sampe PartitionKey and RowKey, old entity will be merged.

    +

    + + + + +

    +
  • +
  • Click "Delete" button to delete the selected table entity in your selected table

  • +
+
+ +

Step 6: Creating your JavaScript Application based on Azure Storage JavaScript Client Library

+
    +
  • 1. Setting CORS rules for your selected Azure-Storage account table service.
  • +
  • 2. Including "azure-storage.common.js" in the html file.
  • +
  • 3. Including functional file(s) needed, such as "azure-storage.table.js" for table operation.
  • +
  • 4. Using keyword "AzureStorage" to access to Azure storage JavaScript APIs.
  • +
  • 5. Referring to Azure Storage Node.js SDK documents for detailed API definitions which keep same with JavaScript APIs.
  • +
+

You can view the source code of this sample for detailed reference.

+
+ + + + + + + + \ No newline at end of file diff --git a/package.json b/package.json index d558198..4b455be 100644 --- a/package.json +++ b/package.json @@ -34,9 +34,11 @@ "md5.js": "1.3.4" }, "devDependencies": { + "browserify": "^13.3.0", "browserify-fs": "^1.0.0", "batchflow": "0.4.0", "coveralls": "^2.11.4", + "factor-bundle": "^2.5.0", "grunt": "~0.4.2", "grunt-contrib-jshint": "~0.11.0", "grunt-devserver": "^0.6.0", @@ -69,6 +71,7 @@ "test": "jshint lib && nsp check && mocha --no-timeouts --recursive test", "testwithoutcheck": "jshint lib && mocha --no-timeouts --recursive test", "cover": "istanbul cover ./node_modules/mocha/bin/_mocha -- -R spec -u bdd --no-timeouts --recursive test", - "coveralls": "npm run cover && cat ./coverage/lcov.info | node ./node_modules/coveralls/bin/coveralls.js" + "coveralls": "npm run cover && cat ./coverage/lcov.info | node ./node_modules/coveralls/bin/coveralls.js", + "genjs": "node ./browser/bundle.js" } }