Merge branch 'table' into tablemastermerge

This commit is contained in:
Edwin Huber 2021-03-29 11:41:44 +02:00 коммит произвёл GitHub
Родитель 456cbe74ca 42ff12135c
Коммит f0a4571aa5
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
167 изменённых файлов: 20087 добавлений и 732 удалений

Просмотреть файл

@ -1,4 +1,6 @@
.vscode/**
.github/**
.github
node_modules/**
swagger/**
__blobstorage__/**

4
.gitignore поставляемый
Просмотреть файл

@ -9,4 +9,6 @@ debug.log
*.tgz
__*
*.env
temp
temp
**/.DS_Store.DS_Store
*.pem

Просмотреть файл

@ -1,5 +1,9 @@
node_modules/
src/
typings/
coverage
.prettierrc.json
README.mcr.md
azurite-testdrive/
.nyc_output
debug.log
@ -8,6 +12,7 @@ azure-pipelines.yml
ISSUE_TEMPLATE.md
PULL_REQUEST_TEMPLATE.md
.vscode/
.github/
tslint.json
tsconfig.json
*.vsix

14
.vscode/launch.json поставляемый
Просмотреть файл

@ -14,6 +14,8 @@
"env": {
"AZURITE_ACCOUNTS": ""
},
"skipFiles": ["node_modules/*/**", "<node_internals>/*/**"],
"outputCapture": "std"
},
{
@ -108,6 +110,18 @@
},
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Azurite Table Service - Loki",
"cwd": "${workspaceFolder}",
"runtimeArgs": ["-r", "ts-node/register"],
"args": ["${workspaceFolder}/src/table/main.ts", "-d", "debug.log"],
"env": {
"AZURITE_ACCOUNTS": ""
},
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",

Просмотреть файл

@ -4,6 +4,11 @@
## Upcoming Release
## 2021.03 Version 3.11.0-table-alpha.1
- First Alpha version of Azurite V3 Table.
- Allow empty RowKey in an entity.
## 2021.2 Version 3.11.0
- Bump up Azure Storage service API version to 2020-06-12.

Просмотреть файл

@ -31,16 +31,16 @@ VOLUME [ "/data" ]
COPY package*.json LICENSE NOTICE.txt ./
RUN npm config set unsafe-perm=true && \
npm ci
COPY --from=builder /opt/azurite/dist/ dist/
RUN npm install -g --loglevel verbose
RUN npm config set unsafe-perm=true && \
npm install -g --loglevel verbose
# Blob Storage Port
EXPOSE 10000
# Queue Storage Port
EXPOSE 10001
# Table Storage Port
EXPOSE 10002
CMD ["azurite", "-l", "/data", "--blobHost", "0.0.0.0","--queueHost", "0.0.0.0"]
CMD ["azurite", "-l", "/data", "--blobHost", "0.0.0.0","--queueHost", "0.0.0.0", "--tableHost", "0.0.0.0"]

Просмотреть файл

@ -17,11 +17,12 @@ Azurite is an open source Azure Storage API compatible server (emulator). Based
# How to Use this Image
```bash
docker run -p 10000:10000 -p 10001:10001 mcr.microsoft.com/azure-storage/azurite
docker run -p 10000:10000 -p 10001:10001 -p 10002:10002 mcr.microsoft.com/azure-storage/azurite
```
`-p 10000:10000` will expose blob service's default listening port.
`-p 10000:10000` will expose blob service's default listening port.
`-p 10001:10001` will expose queue service's default listening port.
`-p 10002:10002` will expose table service's default listening port.
Just run blob service:
@ -32,7 +33,7 @@ docker run -p 10000:10000 mcr.microsoft.com/azure-storage/azurite azurite-blob -
**Run Azurite V3 docker image with customized persisted data location**
```bash
docker run -p 10000:10000 -p 10001:10001 -v c:/azurite:/data mcr.microsoft.com/azure-storage/azurite
docker run -p 10000:10000 -p 10001:10001 -p 10002:10002 -v c:/azurite:/data mcr.microsoft.com/azure-storage/azurite
```
`-v c:/azurite:/data` will use and map host path `c:/azurite` as Azurite's workspace location.
@ -40,7 +41,7 @@ docker run -p 10000:10000 -p 10001:10001 -v c:/azurite:/data mcr.microsoft.com/a
**Customize Azurite V3 supported parameters for docker image**
```bash
docker run -p 8888:8888 -p 9999:9999 -v c:/azurite:/workspace mcr.microsoft.com/azure-storage/azurite azurite -l /workspace -d /workspace/debug.log --blobPort 8888 --blobHost 0.0.0.0 --queuePort 9999 --queueHost 0.0.0.0 --loose --skipApiVersionCheck
docker run -p 7777:7777 -p 8888:8888 -p 9999:9999 -v c:/azurite:/workspace mcr.microsoft.com/azure-storage/azurite azurite -l /workspace -d /workspace/debug.log --blobPort 7777 --blobHost 0.0.0.0 --queuePort 8888 --queueHost 0.0.0.0 --tablePort 9999 --tableHost 0.0.0.0 --loose --skipApiVersionCheck
```
Above command will try to start Azurite image with configurations:
@ -49,14 +50,18 @@ Above command will try to start Azurite image with configurations:
`-d //workspace/debug.log` enables debug log into `/workspace/debug.log` inside docker instance. `debug.log` will also mapped to `c:/azurite/debug.log` in host machine because of docker volume mapping.
`--blobPort 8888` makes Azurite blob service listen to port 8888, while `-p 8888:8888` redirects requests from host machine's port 8888 to docker instance.
`--blobPort 7777` makes Azurite blob service listen to port 7777, while `-p 7777:7777` redirects requests from host machine's port 7777 to docker instance.
`--blobHost 0.0.0.0` defines blob service listening endpoint to accept requests from host machine.
`--queuePort 9999` makes Azurite queue service listen to port 9999, while `-p 9999:9999` redirects requests from host machine's port 9999 to docker instance.
`--queuePort 8888` makes Azurite queue service listen to port 8888, while `-p 8888:8888` redirects requests from host machine's port 8888 to docker instance.
`--queueHost 0.0.0.0` defines queue service listening endpoint to accept requests from host machine.
`--tablePort 9999` makes Azurite table service listen to port 9999, while `-p 9999:9999` redirects requests from host machine's port 9999 to docker instance.
`--tableHost 0.0.0.0` defines table service listening endpoint to accept requests from host machine.
`--loose` enables loose mode which ignore unsupported headers and parameters.
`--skipApiVersionCheck` skip the request API version check.

Просмотреть файл

@ -37,6 +37,11 @@ Compared to V2, Azurite V3 implements a new architecture leveraging code generat
- Preflight Request
- Create/List/Delete Queues
- Put/Get/Peek/Updata/Deleta/Clear Messages
- Table storage features align with Azure Storage API version 2019-12-12 (Refer to support matrix section below)
- SharedKey/Account SAS/Service SAS
- Get/Set Table Service Properties
- Create/List/Delete Tables
- Insert/Update/Query/Delete Table Entities
- Features **NEW** on V3
- Built with TypeScript and ECMA native promise and async features
- New architecture based on TypeScript server generator. Leverage auto generated protocol layer, models, serializer, deserializer and handler interfaces from REST API swagger
@ -97,6 +102,12 @@ Start queue service only:
$ azurite-queue -l path/to/azurite/workspace
```
Start table service only:
```bash
$ azurite-table -l path/to/azurite/workspace
```
### Visual Studio Code Extension
Azurite V3 can be installed from [Visual Studio Code extension market](https://marketplace.visualstudio.com/items?itemName=Azurite.azurite).
@ -121,6 +132,8 @@ Following extension configurations are supported:
- `azurite.blobPort` Blob service listening port, by default 10000
- `azurite.queueHost` Queue service listening endpoint, by default 127.0.0.1
- `azurite.queuePort` Queue service listening port, by default 10001
- `azurite.tableHost` Table service listening endpoint, by default 127.0.0.1
- `azurite.tablePort` Table service listening port, by default 10002
- `azurite.location` Workspace location path, by default existing Visual Studio Code opened folder
- `azurite.silent` Silent mode to disable access log in Visual Studio channel, by default false
- `azurite.debug` Output debug log into Azurite channel, by default false
@ -138,11 +151,12 @@ Following extension configurations are supported:
> Note. Find more docker images tags in https://mcr.microsoft.com/v2/azure-storage/azurite/tags/list
```bash
docker run -p 10000:10000 -p 10001:10001 mcr.microsoft.com/azure-storage/azurite
docker run -p 10000:10000 -p 10001:10001 -p 10002:10002 mcr.microsoft.com/azure-storage/azurite
```
`-p 10000:10000` will expose blob service's default listening port.
`-p 10001:10001` will expose queue service's default listening port.
`-p 10002:10002` will expose table service's default listening port.
Or just run blob service:
@ -161,7 +175,7 @@ docker run -p 10000:10000 -p 10001:10001 -v c:/azurite:/data mcr.microsoft.com/a
#### Customize all Azurite V3 supported parameters for docker image
```bash
docker run -p 8888:8888 -p 9999:9999 -v c:/azurite:/workspace mcr.microsoft.com/azure-storage/azurite azurite -l /workspace -d /workspace/debug.log --blobPort 8888 --blobHost 0.0.0.0 --queuePort 9999 --queueHost 0.0.0.0 --loose --skipApiVersionCheck
docker run -p 7777:7777 -p 8888:8888 -p 9999:9999 -v c:/azurite:/workspace mcr.microsoft.com/azure-storage/azurite azurite -l /workspace -d /workspace/debug.log --blobPort 7777 --blobHost 0.0.0.0 --queuePort 8888 --queueHost 0.0.0.0 --tablePort 9999 --tableHost 0.0.0.0 --loose --skipApiVersionCheck
```
Above command will try to start Azurite image with configurations:
@ -170,14 +184,18 @@ Above command will try to start Azurite image with configurations:
`-d //workspace/debug.log` enables debug log into `/workspace/debug.log` inside docker instance. `debug.log` will also mapped to `c:/azurite/debug.log` in host machine because of docker volume mapping.
`--blobPort 8888` makes Azurite blob service listen to port 8888, while `-p 8888:8888` redirects requests from host machine's port 8888 to docker instance.
`--blobPort 7777` makes Azurite blob service listen to port 7777, while `-p 7777:7777` redirects requests from host machine's port 7777 to docker instance.
`--blobHost 0.0.0.0` defines blob service listening endpoint to accept requests from host machine.
`--queuePort 9999` makes Azurite queue service listen to port 9999, while `-p 9999:9999` redirects requests from host machine's port 9999 to docker instance.
`--queuePort 8888` makes Azurite queue service listen to port 8888, while `-p 8888:8888` redirects requests from host machine's port 8888 to docker instance.
`--queueHost 0.0.0.0` defines queue service listening endpoint to accept requests from host machine.
`--tablePort 9999` makes Azurite table service listen to port 9999, while `-p 9999:9999` redirects requests from host machine's port 9999 to docker instance.
`--tableHost 0.0.0.0` defines table service listening endpoint to accept requests from host machine.
`--loose` enables loose mode which ignore unsupported headers and parameters.
`--skipApiVersionCheck` skip the request API version check.
@ -208,6 +226,7 @@ You can customize the listening address per your requirements.
```cmd
--blobHost 127.0.0.1
--queueHost 127.0.0.1
--tableHost 127.0.0.1
```
#### Allow Accepting Requests from Remote (potentially unsafe)
@ -215,6 +234,7 @@ You can customize the listening address per your requirements.
```cmd
--blobHost 0.0.0.0
--queueHost 0.0.0.0
--tableHost 0.0.0.0
```
### Listening Port Configuration
@ -229,6 +249,7 @@ You can customize the listening port per your requirements.
```cmd
--blobPort 8888
--queuePort 9999
--tablePort 11111
```
#### Let System Auto Select an Available Port
@ -236,6 +257,7 @@ You can customize the listening port per your requirements.
```cmd
--blobPort 0
--queuePort 0
--tablePort 0
```
> Note: The port in use is displayed on Azurite startup.
@ -262,7 +284,7 @@ Optional. By default Azurite will display access log in console. **Disable** it
### Debug Log Configuration
Optional. Debug log includes detailed information on every request and exception stack traces.
Optional. Debug log includes detailed information on every request and exception stack traces.
Enable it by providing a valid local file path for the debug log destination.
```cmd
@ -521,7 +543,7 @@ You can pass the following connection strings to the [Azure SDKs](https://aka.ms
The full connection string is:
```bash
DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;QueueEndpoint=http://127.0.0.1:10001/devstoreaccount1;
DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;QueueEndpoint=http://127.0.0.1:10001/devstoreaccount1;TableEndpoint=http://127.0.0.1:10002/devstoreaccount1;
```
Take blob service only, the full connection string is:
@ -754,7 +776,7 @@ Azurite V3 selected TypeScript as its' programming language, as this facilitates
### Features Scope
Legacy Azurite V2 supports Azure Storage Blob, Queue and Table services.
Legacy Azurite V2 supports Azure Storage Blob, Queue and Table services.
Azurite V3 currently only supports Azure Storage blob service. Queue service is supported after V3.2.0-preview.
Table service support is currently under discussion.
@ -762,14 +784,15 @@ Azurite V3 supports features from Azure Storage API version 2020-06-12, and will
## TypeScript Server Code Generator
Azurite V3 leverages a TypeScript Node.js Server Code Generator to generate the majority of code from Azure Storage REST APIs swagger specification.
Azurite V3 leverages a TypeScript Node.js Server Code Generator to generate the majority of code from Azure Storage REST APIs swagger specification.
Currently, the generator project is private, under development and only used by Azurite V3.
We have plans to make the TypeScript server generator public after Azurite V3 releases.
We have plans to make the TypeScript server generator public after Azurite V3 releases.
All the generated code is kept in `generated` folder, including the generated middleware, request and response models.
## Support Matrix
Latest release targets **2020-06-12** API version **blob** service.
Detailed support matrix:
- Supported Vertical Features
@ -853,6 +876,28 @@ Detailed support matrix:
- SharedKey Lite
- Delegation SAS
Latest version supports for **2019-12-12** API version **table** service.
Detailed support matrix:
- Supported Vertical Features
- SharedKey Authentication
- Shared Access Signature Account Level
- Shared Access Signature Service Level
- Supported REST APIs
- List Tables
- Set Service Properties
- Get Service Properties
- Get Stats
- Create Table
- Get Table ACL
- Set Table ACL
- Delete Table
- Update Entity
- Query Entities
- Merge Entity
- Delete Entity
- Insert Entity
## License
This project is licensed under MIT.
@ -861,8 +906,8 @@ This project is licensed under MIT.
> Go to [GitHub project](https://github.com/Azure/Azurite/projects) page or [GitHub issues](https://github.com/Azure/Azurite/issues) for the milestone and TODO items we are used for tracking upcoming features and bug fixes.
We are currently working on Azurite V3 to implement the remaining Azure Storage REST APIs.
We finished the basic structure and majority of features in Blob Storage, as can be seen in the support matrix.
We are currently working on Azurite V3 to implement the remaining Azure Storage REST APIs.
We finished the basic structure and majority of features in Blob Storage, as can be seen in the support matrix.
The detailed work items are also tracked in GitHub repository projects and issues.
Any contribution and suggestions for Azurite V3 is welcome, please goto [CONTRIBUTION.md](https://github.com/Azure/Azurite/blob/master/CONTRIBUTION.md) for detailed contribution guidelines. Alternatively, you can open GitHub issues voting for any missing features in Azurite V3.

Просмотреть файл

@ -6,6 +6,7 @@
trigger:
- master
- table
- dev
jobs:
@ -220,6 +221,93 @@ jobs:
displayName: "npm run test:queue"
env: {}
- job: tabletestlinux
displayName: Table Test Linux
pool:
vmImage: "ubuntu-16.04"
strategy:
matrix:
node_8_x:
node_version: 8.x
node_10_x:
node_version: 10.x
node_12_x:
node_version: 12.x
steps:
- task: NodeTool@0
inputs:
versionSpec: "$(node_version)"
displayName: "Install Node.js"
- script: |
npm ci
workingDirectory: "./"
displayName: "npm ci"
- script: |
npm run test:table
workingDirectory: "./"
displayName: "npm run test:table"
env: {}
- job: tabletestwin
displayName: Table Test Windows
pool:
vmImage: "vs2017-win2016"
strategy:
matrix:
node_8_x:
node_version: 8.x
node_10_x:
node_version: 10.x
node_12_x:
node_version: 12.x
steps:
- task: NodeTool@0
inputs:
versionSpec: "$(node_version)"
displayName: "Install Node.js"
- script: |
npm ci
workingDirectory: "./"
displayName: "npm ci"
- script: |
npm run test:table
workingDirectory: "./"
displayName: "npm run test:table"
env: {}
- job: tabletestmac
displayName: Table Test Mac
pool:
vmImage: "macOS-10.14"
strategy:
matrix:
node_8_x:
node_version: 8.x
node_10_x:
node_version: 10.x
node_12_x:
node_version: 12.x
steps:
- task: NodeTool@0
inputs:
versionSpec: "$(node_version)"
displayName: "Install Node.js"
- script: |
npm ci
workingDirectory: "./"
displayName: "npm ci"
- script: |
npm run test:table
workingDirectory: "./"
displayName: "npm run test:table"
env: {}
- job: azuritenodejslinux
displayName: Azurite Linux
pool:
@ -383,6 +471,7 @@ jobs:
docker run xstoreazurite.azurecr.io/public/azure-storage/azurite:latest azurite -v
docker run xstoreazurite.azurecr.io/public/azure-storage/azurite:latest azurite-blob -v
docker run xstoreazurite.azurecr.io/public/azure-storage/azurite:latest azurite-queue -v
docker run xstoreazurite.azurecr.io/public/azure-storage/azurite:latest azurite-table -v
workingDirectory: "./"
displayName: "Validate docker image"

279
package-lock.json сгенерированный
Просмотреть файл

@ -1,6 +1,6 @@
{
"name": "azurite",
"version": "3.11.0",
"version": "3.11.0-table-alpha.1",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -555,12 +555,6 @@
"@types/node": "*"
}
},
"@types/parse-json": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz",
"integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==",
"dev": true
},
"@types/range-parser": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.3.tgz",
@ -717,6 +711,12 @@
"resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz",
"integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8="
},
"arg": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"dev": true
},
"argparse": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
@ -826,9 +826,9 @@
"dev": true
},
"autorest": {
"version": "2.0.4283",
"resolved": "https://registry.npmjs.org/autorest/-/autorest-2.0.4283.tgz",
"integrity": "sha512-3jU9yDR71d2thRnKdPH03DaWbla1Iqnrx2rqUUwbMrb4di36a8+nttCQaTWG7biWPJc6Ke6zSSTzFH0uhya+Nw==",
"version": "2.0.4413",
"resolved": "https://registry.npmjs.org/autorest/-/autorest-2.0.4413.tgz",
"integrity": "sha512-Ttx/O6Yag5q8fpJDvllgf/joLuVPBySkbAqDF7+9jAmscCPQplYjA9H/N12bwkhh+6YR3ugWZHQq3NTN33hkAg==",
"dev": true
},
"aws-sign2": {
@ -870,6 +870,72 @@
}
}
},
"azure-storage": {
"version": "2.10.3",
"resolved": "https://registry.npmjs.org/azure-storage/-/azure-storage-2.10.3.tgz",
"integrity": "sha512-IGLs5Xj6kO8Ii90KerQrrwuJKexLgSwYC4oLWmc11mzKe7Jt2E5IVg+ZQ8K53YWZACtVTMBNO3iGuA+4ipjJxQ==",
"requires": {
"browserify-mime": "~1.2.9",
"extend": "^3.0.2",
"json-edm-parser": "0.1.2",
"md5.js": "1.3.4",
"readable-stream": "~2.0.0",
"request": "^2.86.0",
"underscore": "~1.8.3",
"uuid": "^3.0.0",
"validator": "~9.4.1",
"xml2js": "0.2.8",
"xmlbuilder": "^9.0.7"
},
"dependencies": {
"process-nextick-args": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz",
"integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M="
},
"readable-stream": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz",
"integrity": "sha1-j5A0HmilPMySh4jaz80Rs265t44=",
"requires": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.1",
"isarray": "~1.0.0",
"process-nextick-args": "~1.0.6",
"string_decoder": "~0.10.x",
"util-deprecate": "~1.0.1"
}
},
"sax": {
"version": "0.5.8",
"resolved": "https://registry.npmjs.org/sax/-/sax-0.5.8.tgz",
"integrity": "sha1-1HLbIo6zMcJQaw6MFVJK25OdEsE="
},
"string_decoder": {
"version": "0.10.31",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
"integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ="
},
"validator": {
"version": "9.4.1",
"resolved": "https://registry.npmjs.org/validator/-/validator-9.4.1.tgz",
"integrity": "sha512-YV5KjzvRmSyJ1ee/Dm5UED0G+1L4GZnLN3w6/T+zZm8scVua4sOhYKWTUrKa0H/tMiJyO9QLHMPN+9mB/aMunA=="
},
"xml2js": {
"version": "0.2.8",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.2.8.tgz",
"integrity": "sha1-m4FpCTFjH/CdGVdUn69U9PmAs8I=",
"requires": {
"sax": "0.5.x"
}
},
"xmlbuilder": {
"version": "9.0.7",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
"integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0="
}
}
},
"babel-code-frame": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz",
@ -1918,6 +1984,11 @@
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
"dev": true
},
"browserify-mime": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/browserify-mime/-/browserify-mime-1.2.9.tgz",
"integrity": "sha1-rrGvKN5sDXpqLOQK22j/GEIq8x8="
},
"buffer-crc32": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
@ -3352,6 +3423,38 @@
}
}
},
"hash-base": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz",
"integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==",
"requires": {
"inherits": "^2.0.4",
"readable-stream": "^3.6.0",
"safe-buffer": "^5.2.0"
},
"dependencies": {
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"requires": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
}
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
}
}
},
"he": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz",
@ -3895,18 +3998,20 @@
"integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=",
"dev": true
},
"json-edm-parser": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/json-edm-parser/-/json-edm-parser-0.1.2.tgz",
"integrity": "sha1-HmCw/vG8CvZ7wNFG393lSGzWFbQ=",
"requires": {
"jsonparse": "~1.2.0"
}
},
"json-parse-better-errors": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz",
"integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==",
"dev": true
},
"json-parse-even-better-errors": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
"integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
"dev": true
},
"json-schema": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
@ -3928,6 +4033,11 @@
"integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=",
"dev": true
},
"jsonparse": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.2.0.tgz",
"integrity": "sha1-XAxWhRBxYOcv50ib3eoLRMK8Z70="
},
"jsonwebtoken": {
"version": "8.5.1",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz",
@ -4015,12 +4125,6 @@
"resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
"integrity": "sha1-wuep93IJTe6dNCAq6KzORoeHVYA="
},
"lines-and-columns": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz",
"integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=",
"dev": true
},
"linkify-it": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz",
@ -4093,6 +4197,36 @@
"integrity": "sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4=",
"dev": true
},
"listr-update-renderer": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz",
"integrity": "sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA==",
"dev": true,
"requires": {
"chalk": "^1.1.3",
"cli-truncate": "^0.2.1",
"elegant-spinner": "^1.0.1",
"figures": "^1.7.0",
"indent-string": "^3.0.0",
"log-symbols": "^1.0.2",
"log-update": "^2.3.0",
"strip-ansi": "^3.0.1"
},
"dependencies": {
"p-map": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz",
"integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==",
"dev": true
}
}
},
"listr-silent-renderer": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz",
"integrity": "sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4=",
"dev": true
},
"listr-update-renderer": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz",
@ -4285,9 +4419,9 @@
}
},
"make-error": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz",
"integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==",
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true
},
"map-age-cleaner": {
@ -4336,6 +4470,15 @@
"escape-string-regexp": "^1.0.4"
}
},
"md5.js": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.4.tgz",
"integrity": "sha1-6b296UogpawYsENA/Fdk1bCdkB0=",
"requires": {
"hash-base": "^3.0.0",
"inherits": "^2.0.1"
}
},
"mdurl": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz",
@ -4698,6 +4841,15 @@
"which": "^1.2.10"
}
},
"npm-path": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/npm-path/-/npm-path-2.0.4.tgz",
"integrity": "sha512-IFsj0R9C7ZdR5cP+ET342q77uSRdtWOlWpih5eC+lu29tIDbNEgDbzgVJ5UFvYHWhxDZ5TFkJafFioO0pPQjCw==",
"dev": true,
"requires": {
"which": "^1.2.10"
}
},
"npm-run-path": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz",
@ -4927,23 +5079,6 @@
"integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
"dev": true
},
"parent-module": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
"integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
"dev": true,
"requires": {
"callsites": "^3.0.0"
},
"dependencies": {
"callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
"integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
"dev": true
}
}
},
"parse-json": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz",
@ -5097,9 +5232,9 @@
"dev": true
},
"prettier": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.1.2.tgz",
"integrity": "sha512-16c7K+x4qVlJg9rEbXl7HEGmQyZlG4R9AgP+oHKRMsMsuk8s+ATStlf1NpDqyBI1HpVyfjLOeMhH2LvuNvV5Vg==",
"dev": true
},
"prettier-tslint": {
@ -6117,6 +6252,11 @@
}
}
},
"to-readable-stream": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-2.1.0.tgz",
"integrity": "sha512-o3Qa6DGg1CEXshSdvWNX2sN4QHqg03SPq7U6jPXRahlQdl5dK8oXjkU/2/sGrnOZKeGV1zLSO8qPwyKklPPE7w=="
},
"to-regex": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz",
@ -6185,25 +6325,22 @@
}
},
"ts-node": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz",
"integrity": "sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==",
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-9.0.0.tgz",
"integrity": "sha512-/TqB4SnererCDR/vb4S/QvSZvzQMJN8daAslg7MeaiHvD8rDZsSfXmNeNumyZZzMned72Xoq/isQljYSt8Ynfg==",
"dev": true,
"requires": {
"arrify": "^1.0.0",
"buffer-from": "^1.1.0",
"diff": "^3.1.0",
"arg": "^4.1.0",
"diff": "^4.0.1",
"make-error": "^1.1.1",
"minimist": "^1.2.0",
"mkdirp": "^0.5.1",
"source-map-support": "^0.5.6",
"yn": "^2.0.0"
"source-map-support": "^0.5.17",
"yn": "3.1.1"
},
"dependencies": {
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
"dev": true
},
"source-map": {
@ -6213,9 +6350,9 @@
"dev": true
},
"source-map-support": {
"version": "0.5.13",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz",
"integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==",
"version": "0.5.19",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
"integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
"dev": true,
"requires": {
"buffer-from": "^1.0.0",
@ -6325,9 +6462,9 @@
}
},
"typescript": {
"version": "3.6.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.3.tgz",
"integrity": "sha512-N7bceJL1CtRQ2RiG0AQME13ksR7DiuQh/QehubYcghzv20tnh+MQnQIuJddTmsbqYj+dztchykemz0zFzlvdQw==",
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.5.tgz",
"integrity": "sha512-ywmr/VrTVCmNTJ6iV2LwIrfG1P+lv6luD8sUJs+2eI9NLGigaN+nUQc13iHqisq7bra9lnmUSYqbJvegraBOPQ==",
"dev": true
},
"uc.micro": {
@ -6655,12 +6792,6 @@
"resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz",
"integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI="
},
"yaml": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz",
"integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==",
"dev": true
},
"yargs": {
"version": "11.1.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-11.1.1.tgz",
@ -6794,9 +6925,9 @@
}
},
"yn": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz",
"integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
"dev": true
},
"yup": {

Просмотреть файл

@ -3,7 +3,7 @@
"displayName": "Azurite",
"description": "An open source Azure Storage API compatible server",
"icon": "icon.png",
"version": "3.11.0",
"version": "3.11.0-table-alpha.1",
"publisher": "Azurite",
"categories": [
"Other"
@ -12,7 +12,8 @@
"bin": {
"azurite": "./dist/src/azurite.js",
"azurite-blob": "./dist/src/blob/main.js",
"azurite-queue": "./dist/src/queue/main.js"
"azurite-queue": "./dist/src/queue/main.js",
"azurite-table": "./dist/src/table/main.js"
},
"engines": {
"node": ">=8.0.0",
@ -21,6 +22,7 @@
"dependencies": {
"@azure/ms-rest-js": "^1.5.0",
"args": "^5.0.1",
"azure-storage": "^2.10.3",
"etag": "^1.8.1",
"express": "^4.16.4",
"jsonwebtoken": "^8.5.1",
@ -31,6 +33,7 @@
"rimraf": "^2.6.3",
"sequelize": "^6.3.0",
"tedious": "^9.2.1",
"to-readable-stream": "^2.1.0",
"tslib": "^1.9.3",
"uri-templates": "^0.2.0",
"uuid": "^3.3.2",
@ -57,18 +60,18 @@
"@types/validator": "^10.11.3",
"@types/vscode": "^1.39.0",
"@types/xml2js": "^0.4.3",
"autorest": "^2.0.4283",
"autorest": "^2.0.4413",
"cross-env": "^6.0.3",
"cross-var": "^1.1.0",
"husky": "^4.3.5",
"lint-staged": "^8.2.1",
"mocha": "^5.2.0",
"prettier": "^1.16.4",
"prettier": "^2.1.2",
"prettier-tslint": "^0.4.2",
"ts-mockito": "^2.6.1",
"ts-node": "^7.0.1",
"ts-node": "^9.0.0",
"tslint": "^6.1.3",
"typescript": "^3.1.4",
"typescript": "^4.0.5",
"vsce": "^1.64.0"
},
"activationEvents": [
@ -212,10 +215,12 @@
"docker:build:internal": "npm run docker:prebuild && cross-var docker build --no-cache --rm -f \"Dockerfile\" -t xstoreazurite.azurecr.io/internal/azure-storage/azurite:$npm_package_version . && cross-var docker tag xstoreazurite.azurecr.io/internal/azure-storage/azurite:$npm_package_version xstoreazurite.azurecr.io/internal/azure-storage/azurite:latest",
"docker:publish": "cross-var docker push xstoreazurite.azurecr.io/public/azure-storage/azurite:$npm_package_version",
"docker:publish:internal": "cross-var docker push xstoreazurite.azurecr.io/internal/azure-storage/azurite:$npm_package_version",
"prepare": "npm run build",
"build": "tsc",
"build:autorest:debug": "autorest ./swagger/blob.md --typescript --typescript.debugger --use=E:/GitHub/XiaoningLiu/autorest.typescript.server",
"build:autorest:blob": "autorest ./swagger/blob.md --typescript --use=S:/GitHub/XiaoningLiu/autorest.typescript.server",
"build:autorest:queue": "autorest ./swagger/queue.md --typescript --use=S:/GitHub/XiaoningLiu/autorest.typescript.server",
"build:autorest:table": "autorest ./swagger/table.md --typescript --use=S:/GitHub/XiaoningLiu/autorest.typescript.server",
"watch": "tsc -watch -p ./",
"blob": "node -r ts-node/register src/blob/main.ts",
"queue": "node -r ts-node/register src/queue/main.ts",
@ -226,6 +231,7 @@
"test:blob:sql": "npm run lint && cross-env cross-env NODE_TLS_REJECT_UNAUTHORIZED=0 AZURITE_TEST_DB=mysql://root:my-secret-pw@127.0.0.1:3306/azurite_blob_test mocha --compilers ts-node/register --no-timeouts --grep @sql --recursive tests/blob/*.test.ts tests/blob/**/*.test.ts",
"test:blob:sql:ci": "npm run lint && cross-env cross-env NODE_TLS_REJECT_UNAUTHORIZED=0 AZURITE_TEST_DB=mysql://root:my-secret-pw@127.0.0.1:13306/azurite_blob_test mocha --compilers ts-node/register --no-timeouts --grep @sql --recursive tests/blob/*.test.ts tests/blob/**/*.test.ts",
"test:queue": "npm run lint && cross-env NODE_TLS_REJECT_UNAUTHORIZED=0 mocha --compilers ts-node/register --no-timeouts --recursive tests/queue/*.test.ts tests/queue/**/*.test.ts",
"test:table": "npm run lint && cross-env NODE_TLS_REJECT_UNAUTHORIZED=0 mocha --compilers ts-node/register --no-timeouts --recursive tests/table/*.test.ts tests/table/apis/*.test.ts tests/table/apis/**/*.test.ts",
"clean": "rimraf dist typings *.log coverage __testspersistence__ temp __testsstorage__ .nyc_output debug.log *.vsix *.tgz",
"clean:deep": "npm run clean && rimraf debug.log __*",
"validate:npmpack:win": "npm install && npm run build && npm pack && cross-var npm install -g azurite-$npm_package_version.tgz && azurite -v && azurite-blob -v && azurite-queue -v",

Просмотреть файл

@ -7,6 +7,7 @@ import { promisify } from "util";
import Environment from "./common/Environment";
// tslint:disable-next-line:ordered-imports
import { BlobServerFactory } from "./blob/BlobServerFactory";
import * as Logger from "./common/Logger";
import QueueConfiguration from "./queue/QueueConfiguration";
import QueueServer from "./queue/QueueServer";
@ -19,18 +20,26 @@ import {
import SqlBlobServer from "./blob/SqlBlobServer";
import BlobServer from "./blob/BlobServer";
import TableConfiguration from "./table/TableConfiguration";
import TableServer from "./table/TableServer";
import { DEFAULT_TABLE_LOKI_DB_PATH } from "./table/utils/constants";
// tslint:disable:no-console
const accessAsync = promisify(access);
function shutdown(
blobServer: BlobServer | SqlBlobServer,
queueServer: QueueServer
queueServer: QueueServer,
tableServer: TableServer
) {
const blobBeforeCloseMessage = `Azurite Blob service is closing...`;
const blobAfterCloseMessage = `Azurite Blob service successfully closed`;
const queueBeforeCloseMessage = `Azurite Queue service is closing...`;
const queueAfterCloseMessage = `Azurite Queue service successfully closed`;
const tableBeforeCloseMessage = `Azurite Table service is closing...`;
const tableAfterCloseMessage = `Azurite Table service successfully closed`;
console.log(blobBeforeCloseMessage);
blobServer.close().then(() => {
@ -41,6 +50,11 @@ function shutdown(
queueServer.close().then(() => {
console.log(queueAfterCloseMessage);
});
console.log(tableBeforeCloseMessage);
tableServer.close().then(() => {
console.log(tableAfterCloseMessage);
});
}
/**
@ -68,6 +82,7 @@ async function main() {
location,
DEFAULT_QUEUE_PERSISTENCE_PATH
);
const queueConfig = new QueueConfiguration(
env.queueHost(),
env.queuePort(),
@ -86,15 +101,34 @@ async function main() {
env.oauth()
);
const tableConfig = new TableConfiguration(
env.tableHost(),
env.tablePort(),
join(location, DEFAULT_TABLE_LOKI_DB_PATH),
env.debug() !== undefined,
!env.silent(),
undefined,
await env.debug(),
env.loose(),
env.skipApiVersionCheck(),
env.cert(),
env.key(),
env.pwd(),
env.oauth()
);
// We use logger singleton as global debugger logger to track detailed outputs cross layers
// Note that, debug log is different from access log which is only available in request handler layer to
// track every request. Access log is not singleton, and initialized in specific RequestHandlerFactory implementations
// Enable debug log by default before first release for debugging purpose
Logger.configLogger(blobConfig.enableDebugLog, blobConfig.debugLogFilePath);
// Create server instance
// Create queue server instance
const queueServer = new QueueServer(queueConfig);
// Create table server instance
const tableServer = new TableServer(tableConfig);
// Start server
console.log(
`Azurite Blob service is starting at ${blobConfig.getHttpServerAddress()}`
@ -113,16 +147,24 @@ async function main() {
`Azurite Queue service is successfully listening at ${queueServer.getHttpServerAddress()}`
);
// Handle close event
// Start server
console.log(
`Azurite Table service is starting at ${tableConfig.getHttpServerAddress()}`
);
await tableServer.start();
console.log(
`Azurite Table service is successfully listening at ${tableServer.getHttpServerAddress()}`
);
// Handle close event
process
.once("message", (msg) => {
if (msg === "shutdown") {
shutdown(blobServer, queueServer);
shutdown(blobServer, queueServer, tableServer);
}
})
.once("SIGINT", () => shutdown(blobServer, queueServer))
.once("SIGTERM", () => shutdown(blobServer, queueServer));
.once("SIGINT", () => shutdown(blobServer, queueServer, tableServer))
.once("SIGTERM", () => shutdown(blobServer, queueServer, tableServer));
}
main().catch((err) => {

Просмотреть файл

@ -1,11 +1,11 @@
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import { computeHMACSHA256, getURLQueries } from "../../common/utils/utils";
import BlobStorageContext from "../context/BlobStorageContext";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import { HeaderConstants } from "../utils/constants";
import { computeHMACSHA256, getURLQueries } from "../utils/utils";
import IAuthenticator from "./IAuthenticator";
export default class BlobSharedKeyAuthenticator implements IAuthenticator {

Просмотреть файл

@ -1,6 +1,9 @@
import { SasIPRange } from "@azure/storage-blob";
import { computeHMACSHA256, truncatedISO8061Date } from "../utils/utils";
import {
computeHMACSHA256,
truncatedISO8061Date
} from "../../common/utils/utils";
import AccountSASPermissions from "./AccountSASPermissions";
import AccountSASResourceTypes from "./AccountSASResourceTypes";
import AccountSASServices from "./AccountSASServices";

Просмотреть файл

@ -1,4 +1,7 @@
import { computeHMACSHA256, truncatedISO8061Date } from "../utils/utils";
import {
computeHMACSHA256,
truncatedISO8061Date
} from "../../common/utils/utils";
import { BlobSASResourceType } from "./BlobSASResourceType";
import { SASProtocol } from "./IAccountSASSignatureValues";
import { IIPRange, ipRangeToString } from "./IIPRange";

Просмотреть файл

@ -43,6 +43,10 @@ export default class ExpressResponseAdapter implements IResponse {
return this;
}
public getHeader(field: string): number | string | string[] | undefined {
return this.res.getHeader(field);
}
public getHeaders(): OutgoingHttpHeaders {
return this.res.getHeaders();
}

Просмотреть файл

@ -7,6 +7,7 @@ export type HttpMethod =
| "CONNECT"
| "OPTIONS"
| "TRACE"
| "MERGE"
| "PATCH";
export default interface IRequest {

Просмотреть файл

@ -9,6 +9,7 @@ export default interface IResponse {
field: string,
value?: string | string[] | undefined | number | boolean
): IResponse;
getHeader(field: string): number | string | string[] | undefined;
getHeaders(): OutgoingHttpHeaders;
headersSent(): boolean;
setContentType(value: string | undefined): IResponse;

Просмотреть файл

@ -90,11 +90,24 @@ function isRequestAgainstOperation(
return [false, metConditionsNum];
}
// Validate HTTP method
if (req.getMethod() !== spec.httpMethod) {
return [false, metConditionsNum++];
const xHttpMethod = req.getHeader("X-HTTP-Method");
let method = req.getMethod();
if (xHttpMethod && xHttpMethod.length > 0) {
const value = xHttpMethod.trim();
if (
value === "GET" ||
value === "MERGE" ||
value === "PATCH" ||
value === "DELETE"
) {
method = value;
}
}
// Validate HTTP method
if (method !== spec.httpMethod) {
return [false, metConditionsNum++];
}
// Validate URL path
const path = spec.path
? spec.path.startsWith("/")
@ -123,7 +136,7 @@ function isRequestAgainstOperation(
if (
queryParameter.mapper.type.name === "Enum" &&
queryParameter.mapper.type.allowedValues.findIndex(val => {
queryParameter.mapper.type.allowedValues.findIndex((val) => {
return val === queryValue;
}) < 0
) {
@ -153,7 +166,7 @@ function isRequestAgainstOperation(
if (
headerParameter.mapper.type.name === "Enum" &&
headerParameter.mapper.type.allowedValues.findIndex(val => {
headerParameter.mapper.type.allowedValues.findIndex((val) => {
return val === headerValue;
}) < 0
) {

Просмотреть файл

@ -136,7 +136,10 @@ export async function deserialize(
const body = await readRequestIntoText(req);
logger.debug(
`deserialize(): Raw request body string is ${body}`,
`deserialize(): Raw request body string is (removed all empty characters) ${body.replace(
/\s/g,
""
)}`,
context.contextId
);
@ -321,6 +324,42 @@ export async function serialize(
logger.info(`Serializer: Start returning stream body.`, context.contextId);
}
// Serialize JSON bodies
if (
!spec.isXML &&
responseSpec.bodyMapper &&
responseSpec.bodyMapper.type.name !== "Stream"
) {
let body = spec.serializer.serialize(
responseSpec.bodyMapper!,
handlerResponse
);
// When root element is sequence type, should wrap with because serialize() doesn't do that
if (responseSpec.bodyMapper!.type.name === "Sequence") {
const sequenceElementName = responseSpec.bodyMapper!.xmlElementName;
if (sequenceElementName !== undefined) {
const newBody = {} as any;
newBody[sequenceElementName] = body;
body = newBody;
}
}
if (!res.getHeader("content-type")) {
res.setContentType("application/json");
}
const jsonBody = JSON.stringify(body);
// TODO: Should send response in a serializer?
res.getBodyStream().write(jsonBody);
logger.debug(
`Serializer: Raw response body string is ${jsonBody}`,
context.contextId
);
logger.info(`Serializer: Start returning stream body.`, context.contextId);
}
// Serialize stream body
// TODO: Move to end middleware for end tracking
if (

Просмотреть файл

@ -3,7 +3,7 @@ import URITemplate from "uri-templates";
export function isURITemplateMatch(url: string, template: string): boolean {
const uriTemplate = URITemplate(template);
// TODO: Fixing $ parsing issue such as $logs container cannot work in strict mode issue
const result = (uriTemplate.fromUri as any)(url, { strict: true });
const result = (uriTemplate.fromUri as any)(url, { strict: false });
if (result === undefined) {
return false;
}

Просмотреть файл

@ -1,4 +1,5 @@
import { convertRawHeadersToMetadata } from "../../common/utils/utils";
import { getMD5FromStream, newEtag } from "../../common/utils/utils";
import BlobStorageContext from "../context/BlobStorageContext";
import NotImplementedError from "../errors/NotImplementedError";
import StorageErrorFactory from "../errors/StorageErrorFactory";
@ -12,7 +13,6 @@ import {
MAX_APPEND_BLOB_BLOCK_COUNT,
MAX_APPEND_BLOB_BLOCK_SIZE
} from "../utils/constants";
import { getMD5FromStream, newEtag } from "../utils/utils";
import BaseHandler from "./BaseHandler";
export default class AppendBlobHandler extends BaseHandler

Просмотреть файл

@ -3,7 +3,10 @@ import axios, { AxiosResponse } from "axios";
import { URL } from "url";
import IExtentStore from "../../common/persistence/IExtentStore";
import { convertRawHeadersToMetadata } from "../../common/utils/utils";
import {
convertRawHeadersToMetadata,
getMD5FromStream
} from "../../common/utils/utils";
import BlobStorageContext from "../context/BlobStorageContext";
import NotImplementedError from "../errors/NotImplementedError";
import StorageErrorFactory from "../errors/StorageErrorFactory";
@ -24,8 +27,7 @@ import {
} from "../utils/constants";
import {
deserializePageBlobRangeHeader,
deserializeRangeHeader,
getMD5FromStream
deserializeRangeHeader
} from "../utils/utils";
import BaseHandler from "./BaseHandler";
import IPageBlobRangesManager from "./IPageBlobRangesManager";
@ -993,7 +995,7 @@ export default class BlobHandler extends BaseHandler implements IBlobHandler {
} else {
bodyGetter = async () => {
return this.extentStore.readExtents(
blocks.map(block => block.persistency),
blocks.map((block) => block.persistency),
rangeStart,
rangeEnd + 1 - rangeStart,
context.contextId
@ -1113,7 +1115,7 @@ export default class BlobHandler extends BaseHandler implements IBlobHandler {
const bodyGetter = async () => {
return this.extentStore.readExtents(
ranges.map(value => value.persistency),
ranges.map((value) => value.persistency),
0,
contentLength,
context.contextId

Просмотреть файл

@ -1,4 +1,9 @@
import { convertRawHeadersToMetadata } from "../../common/utils/utils";
import {
getMD5FromStream,
getMD5FromString,
newEtag
} from "../../common/utils/utils";
import BlobStorageContext from "../context/BlobStorageContext";
import NotImplementedError from "../errors/NotImplementedError";
import StorageErrorFactory from "../errors/StorageErrorFactory";
@ -8,7 +13,6 @@ import IBlockBlobHandler from "../generated/handlers/IBlockBlobHandler";
import { parseXML } from "../generated/utils/xml";
import { BlobModel, BlockModel } from "../persistence/IBlobMetadataStore";
import { BLOB_API_VERSION } from "../utils/constants";
import { getMD5FromStream, getMD5FromString, newEtag } from "../utils/utils";
import BaseHandler from "./BaseHandler";
/**

Просмотреть файл

@ -1,4 +1,4 @@
import { convertRawHeadersToMetadata } from "../../common/utils/utils";
import { convertRawHeadersToMetadata, newEtag } from "../../common/utils/utils";
import BlobStorageContext from "../context/BlobStorageContext";
import * as Models from "../generated/artifacts/models";
import Context from "../generated/Context";
@ -9,7 +9,7 @@ import {
EMULATOR_ACCOUNT_SKUNAME
} from "../utils/constants";
import { DEFAULT_LIST_BLOBS_MAX_RESULTS } from "../utils/constants";
import { newEtag, removeQuotationFromListBlobEtag } from "../utils/utils";
import { removeQuotationFromListBlobEtag } from "../utils/utils";
import BaseHandler from "./BaseHandler";
/**

Просмотреть файл

@ -1,5 +1,5 @@
import IExtentStore from "../../common/persistence/IExtentStore";
import { convertRawHeadersToMetadata } from "../../common/utils/utils";
import { convertRawHeadersToMetadata, newEtag } from "../../common/utils/utils";
import BlobStorageContext from "../context/BlobStorageContext";
import NotImplementedError from "../errors/NotImplementedError";
import StorageErrorFactory from "../errors/StorageErrorFactory";
@ -13,7 +13,7 @@ import IBlobMetadataStore, {
BlobModel
} from "../persistence/IBlobMetadataStore";
import { BLOB_API_VERSION } from "../utils/constants";
import { deserializePageBlobRangeHeader, newEtag } from "../utils/utils";
import { deserializePageBlobRangeHeader } from "../utils/utils";
import BaseHandler from "./BaseHandler";
import IPageBlobRangesManager from "./IPageBlobRangesManager";

Просмотреть файл

@ -7,6 +7,7 @@ import {
convertDateTimeStringMsTo7Digital,
rimrafAsync
} from "../../common/utils/utils";
import { newEtag } from "../../common/utils/utils";
import { validateReadConditions } from "../conditions/ReadConditionalHeadersValidator";
import {
validateSequenceNumberWriteConditions,
@ -32,7 +33,6 @@ import {
DEFAULT_LIST_CONTAINERS_MAX_RESULTS,
MAX_APPEND_BLOB_BLOCK_COUNT
} from "../utils/constants";
import { newEtag } from "../utils/utils";
import BlobReferredExtentsAsyncIterator from "./BlobReferredExtentsAsyncIterator";
import IBlobMetadataStore, {
AcquireBlobLeaseResponse,
@ -121,7 +121,7 @@ export default class LokiBlobMetadataStore
await new Promise<void>((resolve, reject) => {
stat(this.lokiDBPath, (statError, stats) => {
if (!statError) {
this.db.loadDatabase({}, dbError => {
this.db.loadDatabase({}, (dbError) => {
if (dbError) {
reject(dbError);
} else {
@ -169,7 +169,7 @@ export default class LokiBlobMetadataStore
}
await new Promise((resolve, reject) => {
this.db.saveDatabase(err => {
this.db.saveDatabase((err) => {
if (err) {
reject(err);
} else {
@ -190,7 +190,7 @@ export default class LokiBlobMetadataStore
*/
public async close(): Promise<void> {
await new Promise<void>((resolve, reject) => {
this.db.close(err => {
this.db.close((err) => {
if (err) {
reject(err);
} else {
@ -331,13 +331,13 @@ export default class LokiBlobMetadataStore
.chain()
.find(query)
.find(query2)
.limit(maxResults + 1)
.simplesort("name")
.limit(maxResults + 1)
.data();
if (docs.length <= maxResults) {
return [
docs.map(doc => {
docs.map((doc) => {
return LeaseFactory.createLeaseState(
new ContainerLeaseAdapter(doc),
context
@ -350,7 +350,7 @@ export default class LokiBlobMetadataStore
const nextMarker = docs[docs.length - 2].name;
docs.pop();
return [
docs.map(doc => {
docs.map((doc) => {
return LeaseFactory.createLeaseState(
new ContainerLeaseAdapter(doc),
context
@ -845,13 +845,13 @@ export default class LokiBlobMetadataStore
const docs = await coll
.chain()
.find(query)
.where(obj => {
.where((obj) => {
return obj.name > marker!;
})
.where(obj => {
.where((obj) => {
return includeSnapshots ? true : obj.snapshot.length === 0;
})
.where(obj => {
.where((obj) => {
return includeUncommittedBlobs ? true : obj.isCommitted;
})
.simplesort("name")
@ -867,7 +867,7 @@ export default class LokiBlobMetadataStore
if (docs.length <= maxResults) {
return [
docs.map(doc => {
docs.map((doc) => {
return LeaseFactory.createLeaseState(
new BlobLeaseAdapter(doc),
context
@ -879,7 +879,7 @@ export default class LokiBlobMetadataStore
const nextMarker = docs[docs.length - 2].name;
docs.pop();
return [
docs.map(doc => {
docs.map((doc) => {
return LeaseFactory.createLeaseState(
new BlobLeaseAdapter(doc),
context
@ -900,13 +900,13 @@ export default class LokiBlobMetadataStore
const docs = await coll
.chain()
.where(obj => {
.where((obj) => {
return obj.name > marker!;
})
.where(obj => {
.where((obj) => {
return includeSnapshots ? true : obj.snapshot.length === 0;
})
.where(obj => {
.where((obj) => {
return includeUncommittedBlobs ? true : obj.isCommitted;
})
.simplesort("name")
@ -2487,7 +2487,7 @@ export default class LokiBlobMetadataStore
doc.properties.contentLanguage = blob.properties.contentLanguage;
doc.properties.contentDisposition = blob.properties.contentDisposition;
doc.properties.contentLength = selectedBlockList
.map(block => block.size)
.map((block) => block.size)
.reduce((total, val) => {
return total + val;
}, 0);
@ -2501,7 +2501,7 @@ export default class LokiBlobMetadataStore
} else {
blob.committedBlocksInOrder = selectedBlockList;
blob.properties.contentLength = selectedBlockList
.map(block => block.size)
.map((block) => block.size)
.reduce((total, val) => {
return total + val;
}, 0);
@ -2961,7 +2961,7 @@ export default class LokiBlobMetadataStore
const coll = this.db.getCollection(this.BLOCKS_COLLECTION);
const blockDocs = coll
.chain()
.where(obj => {
.where((obj) => {
return obj.$loki > parseInt(marker, 10);
})
.simplesort("$loki")
@ -2969,11 +2969,11 @@ export default class LokiBlobMetadataStore
.data();
if (blockDocs.length <= maxResults) {
return [blockDocs.map(block => block.persistency), undefined];
return [blockDocs.map((block) => block.persistency), undefined];
} else {
blockDocs.pop();
const nextMarker = `${blockDocs[maxResults - 1].$loki}`;
return [blockDocs.map(block => block.persistency), nextMarker];
return [blockDocs.map((block) => block.persistency), nextMarker];
}
}

Просмотреть файл

@ -18,6 +18,7 @@ import {
DEFAULT_SQL_COLLATE
} from "../../common/utils/constants";
import { convertDateTimeStringMsTo7Digital } from "../../common/utils/utils";
import { newEtag } from "../../common/utils/utils";
import { validateReadConditions } from "../conditions/ReadConditionalHeadersValidator";
import { validateWriteConditions } from "../conditions/WriteConditionalHeadersValidator";
import StorageErrorFactory from "../errors/StorageErrorFactory";
@ -38,7 +39,6 @@ import {
DEFAULT_LIST_BLOBS_MAX_RESULTS,
DEFAULT_LIST_CONTAINERS_MAX_RESULTS
} from "../utils/constants";
import { newEtag } from "../utils/utils";
import BlobReferredExtentsAsyncIterator from "./BlobReferredExtentsAsyncIterator";
import IBlobMetadataStore, {
AcquireBlobLeaseResponse,
@ -386,7 +386,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
context: Context,
serviceProperties: ServicePropertiesModel
): Promise<ServicePropertiesModel> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
const findResult = await ServicesModel.findByPk(
serviceProperties.accountName,
{
@ -594,7 +594,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
container: string,
options: Models.ContainerDeleteMethodOptionalParams = {}
): Promise<void> {
await this.sequelize.transaction(async t => {
await this.sequelize.transaction(async (t) => {
/* Transaction starts */
const findResult = await ContainersModel.findOne({
attributes: [
@ -677,7 +677,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseAccessConditions?: Models.LeaseAccessConditions,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<void> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
/* Transaction starts */
const findResult = await ContainersModel.findOne({
attributes: [
@ -765,7 +765,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
container: string,
setAclModel: SetContainerAccessPolicyOptions
): Promise<void> {
await this.sequelize.transaction(async t => {
await this.sequelize.transaction(async (t) => {
const findResult = await ContainersModel.findOne({
attributes: [
"accountName",
@ -826,7 +826,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
container: string,
options: Models.ContainerAcquireLeaseOptionalParams
): Promise<AcquireContainerLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
/* Transaction starts */
const findResult = await ContainersModel.findOne({
where: {
@ -880,7 +880,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseId: string,
options: Models.ContainerReleaseLeaseOptionalParams = {}
): Promise<Models.ContainerProperties> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
/* Transaction starts */
const findResult = await ContainersModel.findOne({
where: {
@ -932,7 +932,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseId: string,
options: Models.ContainerRenewLeaseOptionalParams = {}
): Promise<RenewContainerLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
/* Transaction starts */
// TODO: Filter out unnecessary fields in select query
const findResult = await ContainersModel.findOne({
@ -988,7 +988,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
breakPeriod: number | undefined,
options: Models.ContainerBreakLeaseOptionalParams = {}
): Promise<BreakContainerLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
const findResult = await ContainersModel.findOne({
where: {
accountName: account,
@ -1052,7 +1052,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
proposedLeaseId: string,
options: Models.ContainerChangeLeaseOptionalParams = {}
): Promise<ChangeContainerLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
const findResult = await ContainersModel.findOne({
where: {
accountName: account,
@ -1112,7 +1112,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseAccessConditions?: Models.LeaseAccessConditions,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<void> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(
context,
blob.accountName,
@ -1181,7 +1181,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseAccessConditions?: Models.LeaseAccessConditions,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<BlobModel> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -1232,7 +1232,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
includeSnapshots?: boolean,
includeUncommittedBlobs?: boolean
): Promise<[BlobModel[], any | undefined]> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const whereQuery: any = {
@ -1340,7 +1340,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
block: BlockModel,
leaseAccessConditions?: Models.LeaseAccessConditions
): Promise<void> {
await this.sequelize.transaction(async t => {
await this.sequelize.transaction(async (t) => {
await this.assertContainerExists(
context,
block.accountName,
@ -1436,7 +1436,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
isCommitted?: boolean,
leaseAccessConditions?: Models.LeaseAccessConditions
): Promise<any> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -1503,7 +1503,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseAccessConditions?: Models.LeaseAccessConditions,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<void> {
await this.sequelize.transaction(async t => {
await this.sequelize.transaction(async (t) => {
await this.assertContainerExists(
context,
blob.accountName,
@ -1633,7 +1633,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
creationTime,
lastModified: blob.properties.lastModified || context.startTime,
contentLength: selectedBlockList
.map(block => block.size)
.map((block) => block.size)
.reduce((total, val) => {
return total + val;
}, 0),
@ -1680,7 +1680,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseAccessConditions?: Models.LeaseAccessConditions,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<GetBlobPropertiesRes> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -1739,7 +1739,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
metadata?: Models.BlobMetadata,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<CreateSnapshotResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -1810,7 +1810,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
blob: string,
options: Models.BlobDeleteMethodOptionalParams
): Promise<void> {
await this.sequelize.transaction(async t => {
await this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -1985,7 +1985,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
blobHTTPHeaders: Models.BlobHTTPHeaders | undefined,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<Models.BlobProperties> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2061,7 +2061,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
metadata: Models.BlobMetadata | undefined,
modifiedAccessConditions?: Models.ModifiedAccessConditions
): Promise<Models.BlobProperties> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2137,7 +2137,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
proposedLeaseId?: string,
options: Models.BlobAcquireLeaseOptionalParams = {}
): Promise<AcquireBlobLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2193,7 +2193,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseId: string,
options: Models.BlobReleaseLeaseOptionalParams = {}
): Promise<ReleaseBlobLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2249,7 +2249,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
leaseId: string,
options: Models.BlobRenewLeaseOptionalParams = {}
): Promise<RenewBlobLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2306,7 +2306,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
proposedLeaseId: string,
options: Models.BlobChangeLeaseOptionalParams = {}
): Promise<ChangeBlobLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2362,7 +2362,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
breakPeriod: number | undefined,
options: Models.BlobBreakLeaseOptionalParams = {}
): Promise<BreakBlobLeaseResponse> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2426,7 +2426,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
blob: string,
snapshot?: string | undefined
): Promise<void> {
await this.sequelize.transaction(async t => {
await this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const res = await BlobsModel.findOne({
@ -2483,7 +2483,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
tier: Models.AccessTier | undefined,
options: Models.BlobStartCopyFromURLOptionalParams = {}
): Promise<Models.BlobProperties> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
const sourceBlob = await this.getBlobWithLeaseUpdated(
source.account,
source.container,
@ -2658,7 +2658,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
tier: Models.AccessTier,
leaseAccessConditions?: Models.LeaseAccessConditions
): Promise<200 | 202> {
return this.sequelize.transaction(async t => {
return this.sequelize.transaction(async (t) => {
await this.assertContainerExists(context, account, container, t);
const blobFindResult = await BlobsModel.findOne({
@ -2831,10 +2831,10 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
},
limit: maxResults + 1,
order: [["id", "ASC"]]
}).then(res => {
}).then((res) => {
if (res.length < maxResults) {
return [
res.map(obj => {
res.map((obj) => {
return this.deserializeModelValue(obj, "persistency", true);
}),
undefined
@ -2847,7 +2847,9 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore {
true
);
return [
res.map(obj => this.deserializeModelValue(obj, "persistency", true)),
res.map((obj) =>
this.deserializeModelValue(obj, "persistency", true)
),
nextMarker
];
}

Просмотреть файл

@ -1,21 +1,6 @@
import { createHash, createHmac } from "crypto";
import { createWriteStream, PathLike } from "fs";
import { parse } from "url";
import StorageErrorFactory from "../errors/StorageErrorFactory";
/**
* Generates a hash signature for an HTTP request or for a SAS.
*
* @param {string} stringToSign
* @param {key} key
* @returns {string}
*/
export function computeHMACSHA256(stringToSign: string, key: Buffer): string {
return createHmac("sha256", key)
.update(stringToSign, "utf8")
.digest("base64");
}
export function checkApiVersion(
inputApiVersion: string,
validApiVersions: Array<string>,
@ -29,40 +14,6 @@ export function checkApiVersion(
}
}
/**
* Rounds a date off to seconds.
*
* @export
* @param {Date} date
* @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
* If false, YYYY-MM-DDThh:mm:ssZ will be returned.
* @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component
*/
export function truncatedISO8061Date(
date: Date,
withMilliseconds: boolean = true
): string {
// Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
const dateString = date.toISOString();
return withMilliseconds
? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
: dateString.substring(0, dateString.length - 5) + "Z";
}
export function newEtag(): string {
// Etag should match ^"0x[A-F0-9]{15,}"$
// Date().getTime().toString(16) only has 11 digital
// so multiply a number between 70000-100000, can get a 16 based 15+ digital number
return (
'"0x' +
(new Date().getTime() * Math.round(Math.random() * 30000 + 70000))
.toString(16)
.toUpperCase() +
'"'
);
}
export async function streamToLocalFile(
stream: NodeJS.ReadableStream,
path: PathLike
@ -78,66 +29,6 @@ export async function streamToLocalFile(
});
}
export async function getMD5FromStream(
stream: NodeJS.ReadableStream
): Promise<Uint8Array> {
const hash = createHash("md5");
return new Promise<Uint8Array>((resolve, reject) => {
stream
.on("data", data => {
hash.update(data);
})
.on("end", () => {
resolve(hash.digest());
})
.on("error", err => {
reject(err);
});
});
}
export async function getMD5FromString(text: string): Promise<Uint8Array> {
return createHash("md5")
.update(text)
.digest();
}
/**
* Get URL query key value pairs from an URL string.
*
* @export
* @param {string} url
* @returns {{[key: string]: string}}
*/
export function getURLQueries(url: string): { [key: string]: string } {
let queryString = parse(url).query;
if (!queryString) {
return {};
}
queryString = queryString.trim();
queryString = queryString.startsWith("?")
? queryString.substr(1)
: queryString;
let querySubStrings: string[] = queryString.split("&");
querySubStrings = querySubStrings.filter((value: string) => {
const indexOfEqual = value.indexOf("=");
const lastIndexOfEqual = value.lastIndexOf("=");
return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual;
});
const queries: { [key: string]: string } = {};
for (const querySubString of querySubStrings) {
const splitResults = querySubString.split("=");
const key: string = splitResults[0];
const value: string = splitResults[1];
queries[key] = value;
}
return queries;
}
/**
* Default range value [0, Infinite] will be returned if all parameters not provided.
*

Просмотреть файл

@ -1,7 +1,7 @@
import {
EMULATOR_ACCOUNT_KEY,
EMULATOR_ACCOUNT_NAME
} from "../blob/utils/constants";
} from "../common/utils/constants";
import ILogger from "../queue/generated/utils/ILogger";
import IAccountDataStore, { IAccountProperties } from "./IAccountDataStore";
import {

Просмотреть файл

@ -4,10 +4,17 @@ import {
DEFAULT_BLOB_LISTENING_PORT,
DEFAULT_BLOB_SERVER_HOST_NAME
} from "../blob/utils/constants";
import {
DEFAULT_QUEUE_LISTENING_PORT,
DEFAULT_QUEUE_SERVER_HOST_NAME
} from "../queue/utils/constants";
import {
DEFAULT_TABLE_LISTENING_PORT,
DEFAULT_TABLE_SERVER_HOST_NAME
} from "../table/utils/constants";
import IEnvironment from "./IEnvironment";
args
@ -31,6 +38,16 @@ args
"Optional. Customize listening port for queue",
DEFAULT_QUEUE_LISTENING_PORT
)
.option(
["", "tableHost"],
"Optional. Customize listening address for table",
DEFAULT_TABLE_SERVER_HOST_NAME
)
.option(
["", "tablePort"],
"Optional. Customize listening port for table",
DEFAULT_TABLE_LISTENING_PORT
)
.option(
["l", "location"],
"Optional. Use an existing folder as workspace path, default is current working directory",
@ -75,6 +92,14 @@ export default class Environment implements IEnvironment {
return this.flags.queuePort;
}
public tableHost(): string | undefined {
return this.flags.tableHost;
}
public tablePort(): number | undefined {
return this.flags.tablePort;
}
public async location(): Promise<string> {
return this.flags.location || process.cwd();
}

Просмотреть файл

@ -0,0 +1,31 @@
import { HttpMethod } from "../../blob/generated/IRequest";
export enum BatchType {
blob = "blob",
table = "table"
}
/**
* A container for batch operations
*
* @export
* @class BatchOperation
*/
export default class BatchOperation {
public rawHeaders: string[];
public protocol?: string;
public batchType: BatchType;
public httpMethod?: HttpMethod;
public parameters?: string;
public uri?: string;
public path?: string;
public jsonRequestBody?: string; // maybe we want the entity operation to be stored in a parsed format?
public constructor(_batchType: BatchType, headers: string) {
this.batchType = _batchType;
const dirtyHeaderArray = headers.split("\n");
// filter out the blanks
this.rawHeaders = dirtyHeaderArray.filter(
(candidate) => candidate.search(/\S/) < 1
);
}
}

Просмотреть файл

@ -0,0 +1,198 @@
import { Stream } from "stream";
import IRequest, { HttpMethod } from "../../table/generated/IRequest";
import BatchOperation from "./BatchOperation";
import BatchRequestHeaders from "./BatchRequestHeaders";
import * as Models from "../../table/generated/artifacts/models";
import BatchTableUpdateEntityOptionalParams from "../../table/batch/BatchTableUpdateEntityOptionalParams";
import BatchTableDeleteEntityOptionalParams from "../../table/batch/BatchTableDeleteEntityOptionalParams";
import IOptionalParams from "../../table/batch/IOptionalParams";
/*
* Represents a request in the context of batch operations.
* ToDo: Requires validation against all operation types
* Currently several funcitons of the interface are not implemented
* @export
* @class BatchRequest
* @implements {IRequest}
*/
export default class BatchRequest implements IRequest {
public response?: any;
private headers: BatchRequestHeaders;
private batchOperation: BatchOperation;
public contentID: number | undefined;
public constructor(batchOperation: BatchOperation) {
this.batchOperation = batchOperation;
this.headers = new BatchRequestHeaders(batchOperation.rawHeaders);
// set default params, due to our processing logic
this.params = new BatchTableUpdateEntityOptionalParams();
}
// ToDo: This should really be using an interface.
// refactor once the basic logic is working
// | BatchTableDeleteEntityOptionalParams
// | BatchTableUpdateEntityOptionalParams
// | BatchTableMergeEntityOptionalParams
// | BatchTableQueryEntitiesWithPartitionAndRowKeyOptionalParams
// | BatchTableQueryEntitiesOptionalParams
// | BatchTableInsertEntityOptionalParams;
public params: IOptionalParams;
// ingests the optional params for a batch request, and sets these
// based on the type of operation and headers present on an
// individual request
public ingestOptionalParams(params: IOptionalParams) {
this.params = params;
// need to compare headers to option params and set accordingly
if (this.getHeader("x-ms-client-request-id") !== undefined) {
this.params.requestId = this.getHeader("x-ms-client-request-id");
}
// Theoretically, this Enum is redundant, and used across all table
// optional param models, thinking that we only need to use the 1,
// the code generator is however differentiating across all of them
// as distinct
if (this.getHeader("maxdataserviceversion")?.includes("3.0")) {
this.params.dataServiceVersion =
Models.DataServiceVersion4.ThreeFullStopZero;
}
// TableDeleteEntityOptionalParams is the only interface without a body
// I instantiate the batch class to enable this check and the other
// interface acrobatics needed for batch processing
const body = this.getBody();
if (
body != null &&
body !== "" &&
!(this.params instanceof BatchTableDeleteEntityOptionalParams)
) {
this.params.tableEntityProperties = JSON.parse(body);
}
// set request timeout
// https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-table-service-operations
// set responsePreference
// set queryOptions
// https://docs.microsoft.com/en-us/rest/api/storageservices/payload-format-for-table-service-operations
const options: Models.QueryOptions = new Object() as Models.QueryOptions;
// format
// set payload options
if (this.getHeader("accept")?.includes("minimalmeta")) {
options.format =
Models.OdataMetadataFormat.Applicationjsonodataminimalmetadata;
} else if (this.getHeader("accept")?.includes("fullmeta")) {
options.format =
Models.OdataMetadataFormat.Applicationjsonodatafullmetadata;
} else {
options.format =
Models.OdataMetadataFormat.Applicationjsonodatanometadata;
}
// top
// select
// filter
this.params.queryOptions = options;
}
public getMethod(): HttpMethod {
if (this.batchOperation.httpMethod != null) {
return this.batchOperation.httpMethod;
} else {
throw new Error("httpMethod invalid on batch operation");
}
}
public getUrl(): string {
// ToDo: is this a valid assumption for the batch API?
// ToDo: here we also assume https, which is also not true...
// we need to parse this from the request
// return `https://${this.accountName}.${this.batchOperation.batchType}.core.windows.net/$batch`;
// in delete, it seems that we actuall expect the full uri
if (this.batchOperation.uri != null && this.batchOperation.path != null) {
return this.batchOperation.uri;
// this substring is not needed.
// .substring(
// 0,
// this.batchOperation.uri.length - this.batchOperation.path.length
// );
} else {
throw new Error("uri or path null when calling getUrl on BatchRequest");
}
}
public getEndpoint(): string {
throw new Error("Method not implemented.");
}
public getPath(): string {
if (this.batchOperation.path != null) {
return this.batchOperation.path;
} else {
throw new Error("path null when calling getPath on BatchRequest");
}
}
public getBodyStream(): NodeJS.ReadableStream {
if (this.batchOperation.jsonRequestBody != null) {
return Stream.Readable.from(this.batchOperation.jsonRequestBody);
} else {
throw new Error("body null when calling getBodyStream on BatchRequest");
}
}
public setBody(body: string | undefined): IRequest {
throw new Error("Method not implemented.");
}
public getBody(): string | undefined {
if (this.batchOperation.jsonRequestBody != null) {
return this.batchOperation.jsonRequestBody;
} else {
throw new Error("body null when calling getBody on BatchRequest");
}
}
public getHeader(field: string): string | undefined {
return this.headers.header(field);
}
public getHeaders(): { [header: string]: string | string[] | undefined } {
throw new Error("Method not implemented.");
}
public getRawHeaders(): string[] {
return this.batchOperation.rawHeaders;
}
public getQuery(key: string): string | undefined {
switch (key) {
case "$format":
return this.params.queryOptions?.format;
case "$top":
return this.params.queryOptions?.top?.toLocaleString();
case "$select":
return this.params.queryOptions?.select;
case "$filter":
return this.params.queryOptions?.filter;
default:
break;
}
throw new Error("unknown query options type.");
}
public getProtocol(): string {
if (
this.batchOperation.protocol !== null &&
this.batchOperation.protocol !== undefined
) {
return this.batchOperation.protocol;
} else {
// try extract protocol
const protocolMatch = this.getUrl().match(/https?/);
if (protocolMatch !== null && protocolMatch!.length > 0) {
this.batchOperation.protocol = protocolMatch[0];
return this.batchOperation.protocol;
}
throw new Error("protocol null when calling getProtocol on BatchRequest");
}
}
}

Просмотреть файл

@ -0,0 +1,142 @@
/**
* Provides access to headers for batch requests.
* As requests in an entity group transaction have different headers per
* transaction, and these need to be handled separately to the
* outer request envelope.
*
* @export
* @class BatchRequestHeaders
*/
export default class BatchRequestHeaders {
public constructor(headers: string[]) {
this.rawHeaders = headers;
this.createDictFromRawHeaders();
}
private rawHeaders: string[];
private headerItems: { [index: string]: string } = {};
private headerCount: number = 0;
/**
* Returns the raw headers as a string array
*
* @return {*}
* @memberof BatchRequestHeaders
*/
public getRawHeaders() {
return this.rawHeaders;
}
/**
* Checks for existence of a header
*
* @param {string} key
* @return {*} {boolean}
* @memberof BatchRequestHeaders
*/
public containsHeader(key: string): boolean {
return this.headerItems.hasOwnProperty(key);
}
/**
* The count of headers
*
* @return {*} {number}
* @memberof BatchRequestHeaders
*/
public count(): number {
return this.headerCount;
}
/**
* Add a header to the header items
*
* @param {string} key
* @param {string} value
* @memberof BatchRequestHeaders
*/
public add(key: string, value: string) {
if (!this.headerItems.hasOwnProperty(key)) this.headerCount++;
this.headerItems[key] = value;
}
/**
* Remove a header from the header items
*
* @param {string} key
* @return {*} {string}
* @memberof BatchRequestHeaders
*/
public remove(key: string): string {
const val = this.headerItems[key];
delete this.headerItems[key];
this.headerCount--;
return val;
}
/**
* Returns the header value based on a lower case lookup of the key
*
* @param {string} key
* @return {*} {string}
* @memberof BatchRequestHeaders
*/
public header(key: string): string {
return this.headerItems[key.toLocaleLowerCase()];
}
/**
* The header keys as a string array
*
* @return {*} {string[]}
* @memberof BatchRequestHeaders
*/
public headerKeys(): string[] {
const headers: string[] = [];
for (const prop in this.headerItems) {
if (this.headerItems.hasOwnProperty(prop)) {
headers.push(prop);
}
}
return headers;
}
/**
* Header values as a string array
*
* @return {*} {string[]}
* @memberof BatchRequestHeaders
*/
public headerValues(): string[] {
const values: string[] = [];
for (const prop in this.headerItems) {
if (this.headerItems.hasOwnProperty(prop)) {
values.push(this.headerItems[prop]);
}
}
return values;
}
/**
* Creates the dictionary to allow key value lookups on the headers
*
* @private
* @memberof BatchRequestHeaders
*/
private createDictFromRawHeaders(): void {
this.rawHeaders.forEach((rawheader) => {
if (rawheader != null) {
const headerMatch = rawheader.match(/(\S+)(:\s?)(\S+)/);
if (headerMatch == null && rawheader.length > 2) {
this.add(rawheader, "");
} else if (headerMatch != null) {
this.add(headerMatch[1].toLocaleLowerCase(), headerMatch[3]);
}
}
});
}
}

Просмотреть файл

@ -0,0 +1,65 @@
import { exception } from "console";
import { StorageError } from "../../blob/generated/artifacts/mappers";
/**
* Base Batch serialization class.
* Contains shared logic for batch serialization.
* ToDo: Make these util functions static or aggregate this logic into one of the other
* batch classes
*
* @export
* @param {string} batchBoundary
* @param {string} changesetBoundary
*/
export class BatchSerialization {
public batchBoundary: string = "";
public changesetBoundary: string = "";
public lineEnding: string = "";
public extractBatchBoundary(batchRequestsString: string): void {
const batchBoundaryMatch = batchRequestsString.match(
// prettier-ignore
/--batch_(\w+-?)+/
);
if (null != batchBoundaryMatch) {
this.batchBoundary = batchBoundaryMatch[0];
} else {
throw exception("no batch boiundary found in request");
}
}
// ToDo: improve RegEx, as not sure if spec allows for use of other
// change set boundary styles (such as boundary=blahblahblah)
// have tried to make as generic as possible
public extractChangeSetBoundary(batchRequestsString: string): void {
let subChangeSetPrefixMatches = batchRequestsString.match(
/(boundary=)+(\w+_?(\w+-?)+)/
);
if (subChangeSetPrefixMatches != null) {
this.changesetBoundary = subChangeSetPrefixMatches[2];
} else {
// we need to see if this is a single query batch operation
// whose format is different! (as we only support a single query per batch)
// ToDo: do we need to check for GET HTTP verb?
subChangeSetPrefixMatches = batchRequestsString.match(/(--batch_\w+)/);
if (subChangeSetPrefixMatches != null) {
this.changesetBoundary = subChangeSetPrefixMatches[1];
} else {
throw StorageError;
}
}
}
public extractLineEndings(batchRequestsString: string): void {
const lineEndingMatch = batchRequestsString.match(
// prettier-ignore
/\r?\n+/
);
if (lineEndingMatch != null) {
this.lineEnding = lineEndingMatch[0];
} else {
throw StorageError;
}
}
}

Просмотреть файл

@ -106,13 +106,12 @@ export default class SqlExtentMetadataStore implements IExtentMetadataStore {
*/
public async updateExtent(extent: IExtentModel): Promise<void> {
return ExtentsModel.upsert({
id: extent.id,
...extent
})
.then(() => {
return;
})
.catch(err => {
.catch((err) => {
// console.log(`SqlExtentMetadataStore.updateExtent() upsert err:${err}`);
throw err;
});
@ -181,13 +180,13 @@ export default class SqlExtentMetadataStore implements IExtentMetadataStore {
limit: maxResults,
where: query as any,
order: [["id", "ASC"]]
}).then(res => {
}).then((res) => {
if (res.length < maxResults!) {
return [res.map(val => modelConvert(val)), undefined];
return [res.map((val) => modelConvert(val)), undefined];
} else {
const tailItem = res[res.length - 1];
const nextMarker = this.getModelValue<number>(tailItem, "id", true);
return [res.map(val => modelConvert(val)), nextMarker];
return [res.map((val) => modelConvert(val)), nextMarker];
}
});
}
@ -221,7 +220,7 @@ export default class SqlExtentMetadataStore implements IExtentMetadataStore {
where: {
id: extentId
}
}).then(res => {
}).then((res) => {
if (res === null || res === undefined) {
throw Error(
`SqlExtentMetadataStore:getExtentLocationId() Error. Extent not exists.`

Просмотреть файл

@ -47,3 +47,9 @@ export const VALID_ISSUE_PREFIXES = [
"https://sts.chinacloudapi.cn/",
"https://sts.windows-ppe.net"
];
export const EMULATOR_ACCOUNT_NAME = "devstoreaccount1";
export const EMULATOR_ACCOUNT_KEY = Buffer.from(
"Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==",
"base64"
);

Просмотреть файл

@ -1,4 +1,6 @@
import { createHash, createHmac } from "crypto";
import rimraf = require("rimraf");
import { parse } from "url";
import { promisify } from "util";
// LokiFsStructuredAdapter
@ -44,3 +46,113 @@ export function convertRawHeadersToMetadata(
return isEmpty ? undefined : res;
}
export function newEtag(): string {
// Etag should match ^"0x[A-F0-9]{15,}"$
// Date().getTime().toString(16) only has 11 digital
// so multiply a number between 70000-100000, can get a 16 based 15+ digital number
return (
'"0x' +
(new Date().getTime() * Math.round(Math.random() * 30000 + 70000))
.toString(16)
.toUpperCase() +
'"'
);
}
export function newTableEntityEtag(startTime: Date): string {
// Etag as returned by Table Storage should match W/"datetime'<ISO8601datetime>'"
return "W/\"datetime'" + truncatedISO8061Date(startTime, true) + "'\"";
}
/**
* Generates a hash signature for an HTTP request or for a SAS.
*
* @param {string} stringToSign
* @param {key} key
* @returns {string}
*/
export function computeHMACSHA256(stringToSign: string, key: Buffer): string {
return createHmac("sha256", key)
.update(stringToSign, "utf8")
.digest("base64");
}
/**
* Rounds a date off to seconds.
*
* @export
* @param {Date} date
* @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
* If false, YYYY-MM-DDThh:mm:ssZ will be returned.
* @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component
*/
export function truncatedISO8061Date(
date: Date,
withMilliseconds: boolean = true
): string {
// Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
const dateString = date.toISOString();
return withMilliseconds
? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
: dateString.substring(0, dateString.length - 5) + "Z";
}
/**
* Get URL query key value pairs from an URL string.
*
* @export
* @param {string} url
* @returns {{[key: string]: string}}
*/
export function getURLQueries(url: string): { [key: string]: string } {
let queryString = parse(url).query;
if (!queryString) {
return {};
}
queryString = queryString.trim();
queryString = queryString.startsWith("?")
? queryString.substr(1)
: queryString;
let querySubStrings: string[] = queryString.split("&");
querySubStrings = querySubStrings.filter((value: string) => {
const indexOfEqual = value.indexOf("=");
const lastIndexOfEqual = value.lastIndexOf("=");
return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual;
});
const queries: { [key: string]: string } = {};
for (const querySubString of querySubStrings) {
const splitResults = querySubString.split("=");
const key: string = splitResults[0];
const value: string = splitResults[1];
queries[key] = value;
}
return queries;
}
export async function getMD5FromString(text: string): Promise<Uint8Array> {
return createHash("md5").update(text).digest();
}
export async function getMD5FromStream(
stream: NodeJS.ReadableStream
): Promise<Uint8Array> {
const hash = createHash("md5");
return new Promise<Uint8Array>((resolve, reject) => {
stream
.on("data", (data) => {
hash.update(data);
})
.on("end", () => {
resolve(hash.digest());
})
.on("error", (err) => {
reject(err);
});
});
}

Просмотреть файл

@ -1,6 +1,9 @@
import { SasIPRange } from "@azure/storage-blob";
import { computeHMACSHA256, truncatedISO8061Date } from "../utils/utils";
import {
computeHMACSHA256,
truncatedISO8061Date
} from "../../common/utils/utils";
import AccountSASPermissions from "./AccountSASPermissions";
import AccountSASResourceTypes from "./AccountSASResourceTypes";
import AccountSASServices from "./AccountSASServices";

Просмотреть файл

@ -1,4 +1,7 @@
import { computeHMACSHA256, truncatedISO8061Date } from "../utils/utils";
import {
computeHMACSHA256,
truncatedISO8061Date
} from "../../common/utils/utils";
import { SASProtocol } from "./IAccountSASSignatureValues";
import { IIPRange, ipRangeToString } from "./IIPRange";

Просмотреть файл

@ -1,11 +1,11 @@
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import { computeHMACSHA256, getURLQueries } from "../../common/utils/utils";
import QueueStorageContext from "../context/QueueStorageContext";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import { HeaderConstants } from "../utils/constants";
import { computeHMACSHA256, getURLQueries } from "../utils/utils";
import IAuthenticator from "./IAuthenticator";
export default class QueueSharedKeyAuthenticator implements IAuthenticator {
@ -180,11 +180,9 @@ export default class QueueSharedKeyAuthenticator implements IAuthenticator {
.startsWith(HeaderConstants.PREFIX_FOR_STORAGE);
});
headersArray.sort(
(a, b): number => {
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
}
);
headersArray.sort((a, b): number => {
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
});
let canonicalizedHeadersStringToSign: string = "";
headersArray.forEach(header => {

Просмотреть файл

@ -43,6 +43,10 @@ export default class ExpressResponseAdapter implements IResponse {
return this;
}
public getHeader(field: string): number | string | string[] | undefined {
return this.res.getHeader(field);
}
public getHeaders(): OutgoingHttpHeaders {
return this.res.getHeaders();
}

Просмотреть файл

@ -7,6 +7,7 @@ export type HttpMethod =
| "CONNECT"
| "OPTIONS"
| "TRACE"
| "MERGE"
| "PATCH";
export default interface IRequest {

Просмотреть файл

@ -9,6 +9,7 @@ export default interface IResponse {
field: string,
value?: string | string[] | undefined | number | boolean
): IResponse;
getHeader(field: string): number | string | string[] | undefined;
getHeaders(): OutgoingHttpHeaders;
headersSent(): boolean;
setContentType(value: string | undefined): IResponse;

Просмотреть файл

@ -90,8 +90,22 @@ function isRequestAgainstOperation(
return [false, metConditionsNum];
}
const xHttpMethod = req.getHeader("X-HTTP-Method");
let method = req.getMethod();
if (xHttpMethod && xHttpMethod.length > 0) {
const value = xHttpMethod.trim();
if (
value === "GET" ||
value === "MERGE" ||
value === "PATCH" ||
value === "DELETE"
) {
method = value;
}
}
// Validate HTTP method
if (req.getMethod() !== spec.httpMethod) {
if (method !== spec.httpMethod) {
return [false, metConditionsNum++];
}
@ -123,7 +137,7 @@ function isRequestAgainstOperation(
if (
queryParameter.mapper.type.name === "Enum" &&
queryParameter.mapper.type.allowedValues.findIndex(val => {
queryParameter.mapper.type.allowedValues.findIndex((val) => {
return val === queryValue;
}) < 0
) {
@ -153,7 +167,7 @@ function isRequestAgainstOperation(
if (
headerParameter.mapper.type.name === "Enum" &&
headerParameter.mapper.type.allowedValues.findIndex(val => {
headerParameter.mapper.type.allowedValues.findIndex((val) => {
return val === headerValue;
}) < 0
) {

Просмотреть файл

@ -136,7 +136,10 @@ export async function deserialize(
const body = await readRequestIntoText(req);
logger.debug(
`deserialize(): Raw request body string is ${body}`,
`deserialize(): Raw request body string is (removed all empty characters) ${body.replace(
/\s/g,
""
)}`,
context.contextID
);
@ -321,6 +324,42 @@ export async function serialize(
logger.info(`Serializer: Start returning stream body.`, context.contextID);
}
// Serialize JSON bodies
if (
!spec.isXML &&
responseSpec.bodyMapper &&
responseSpec.bodyMapper.type.name !== "Stream"
) {
let body = spec.serializer.serialize(
responseSpec.bodyMapper!,
handlerResponse
);
// When root element is sequence type, should wrap with because serialize() doesn't do that
if (responseSpec.bodyMapper!.type.name === "Sequence") {
const sequenceElementName = responseSpec.bodyMapper!.xmlElementName;
if (sequenceElementName !== undefined) {
const newBody = {} as any;
newBody[sequenceElementName] = body;
body = newBody;
}
}
if (!res.getHeader("content-type")) {
res.setContentType("application/json");
}
const jsonBody = JSON.stringify(body);
// TODO: Should send response in a serializer?
res.getBodyStream().write(jsonBody);
logger.debug(
`Serializer: Raw response body string is ${jsonBody}`,
context.contextID
);
logger.info(`Serializer: Start returning stream body.`, context.contextID);
}
// Serialize stream body
// TODO: Move to end middleware for end tracking
if (

Просмотреть файл

@ -16,11 +16,6 @@ export const DEFAULT_ENABLE_ACCESS_LOG = true;
export const DEFAULT_QUEUE_CONTEXT_PATH = "azurite_queue_context";
export const LOGGER_CONFIGS = {};
export const DEFAULT_GC_INTERVAL_MS = 60 * 1000;
export const EMULATOR_ACCOUNT_NAME = "devstoreaccount1";
export const EMULATOR_ACCOUNT_KEY = Buffer.from(
"Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==",
"base64"
);
export const NEVER_EXPIRE_DATE = new Date("9999-12-31T23:59:59.999Z");
export const QUEUE_SERVICE_PERMISSION = "raup";
export const LIST_QUEUE_MAXRESSULTS_MIN = 1;

Просмотреть файл

@ -1,23 +1,8 @@
import { createHash, createHmac, randomBytes } from "crypto";
import etag from "etag";
import { createWriteStream, PathLike } from "fs";
import { parse } from "url";
import { randomBytes } from "crypto";
import * as xml2js from "xml2js";
import StorageErrorFactory from "../errors/StorageErrorFactory";
/**
* Generates a hash signature for an HTTP request or for a SAS.
*
* @param {string} stringToSign
* @param {key} key
* @returns {string}
*/
export function computeHMACSHA256(stringToSign: string, key: Buffer): string {
return createHmac("sha256", key)
.update(stringToSign, "utf8")
.digest("base64");
}
export function checkApiVersion(
inputApiVersion: string,
validApiVersions: Array<string>,
@ -31,105 +16,6 @@ export function checkApiVersion(
}
}
/**
* Rounds a date off to seconds.
*
* @export
* @param {Date} date
* @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
* If false, YYYY-MM-DDThh:mm:ssZ will be returned.
* @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component
*/
export function truncatedISO8061Date(
date: Date,
withMilliseconds: boolean = true
): string {
// Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
const dateString = date.toISOString();
return withMilliseconds
? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
: dateString.substring(0, dateString.length - 5) + "Z";
}
// TODO: Align eTag with Azure Storage Service
export function newEtag(): string {
return etag(`${new Date().getTime()}`);
}
export async function streamToLocalFile(
stream: NodeJS.ReadableStream,
path: PathLike
): Promise<void> {
return new Promise<void>((resolve, reject) => {
const writeStream = createWriteStream(path);
stream
.on("error", reject)
// .on("end", resolve)
.pipe(writeStream)
.on("close", resolve)
.on("error", reject);
});
}
export async function getMD5FromStream(
stream: NodeJS.ReadableStream
): Promise<Uint8Array> {
const hash = createHash("md5");
return new Promise<Uint8Array>((resolve, reject) => {
stream
.on("data", data => {
hash.update(data);
})
.on("end", () => {
resolve(hash.digest());
})
.on("error", reject);
});
}
export async function getMD5FromString(text: string): Promise<Uint8Array> {
return createHash("md5")
.update(text)
.digest();
}
/**
* Get URL query key value pairs from an URL string.
*
* @export
* @param {string} url
* @returns {{[key: string]: string}}
*/
export function getURLQueries(url: string): { [key: string]: string } {
let queryString = parse(url).query;
if (!queryString) {
return {};
}
queryString = queryString.trim();
queryString = queryString.startsWith("?")
? queryString.substr(1)
: queryString;
let querySubStrings: string[] = queryString.split("&");
querySubStrings = querySubStrings.filter((value: string) => {
const indexOfEqual = value.indexOf("=");
const lastIndexOfEqual = value.lastIndexOf("=");
return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual;
});
const queries: { [key: string]: string } = {};
for (const querySubString of querySubStrings) {
const splitResults = querySubString.split("=");
const key: string = splitResults[0];
const value: string = splitResults[1];
queries[key] = value;
}
return queries;
}
/**
* Default range value [0, Infinite] will be returned if all parameters not provided.
*

Просмотреть файл

@ -0,0 +1,21 @@
/**
* This interface defines the required functions of TableEnvironment given command line parameter
* @export
* @interface ITableEnvironment
*/
export default interface ITableEnvironment {
/** Optional. Customize listening address for table */
tableHost(): string | undefined;
/** Optional. Customize listening port for table */
tablePort(): number | undefined;
/** Optional. Use an existing folder as workspace path, default is current working directory */
location(): Promise<string>;
/** Optional. Disable access log displayed in console */
silent(): boolean;
/** Optional. Enable loose mode which ignores unsupported headers and parameters */
loose(): boolean;
/** Optional. Skip the request API version check request with all Api versions will be allowed */
skipApiVersionCheck(): boolean;
/** Optional. Enable debug log by providing a valid local file, path as log destination path as log destination */
debug(): Promise<string | boolean | undefined>;
}

Просмотреть файл

@ -0,0 +1,54 @@
import ConfigurationBase from "../common/ConfigurationBase";
import {
DEFAULT_ENABLE_ACCESS_LOG,
DEFAULT_ENABLE_DEBUG_LOG,
DEFAULT_TABLE_LISTENING_PORT,
DEFAULT_TABLE_LOKI_DB_PATH,
DEFAULT_TABLE_SERVER_HOST_NAME
} from "./utils/constants";
/**
* Default configurations for default implementation of TableServer.
*
* As default implementation of TableServer class leverages LokiJS DB.
* This configuration class also maintains configuration settings for LokiJS DB.
*
* When creating other server implementations, should also create a NEW
* corresponding configuration class by extending ConfigurationBase.
*
* @export
* @class Configuration
*/
export default class TableConfiguration extends ConfigurationBase {
public constructor(
host: string = DEFAULT_TABLE_SERVER_HOST_NAME,
port: number = DEFAULT_TABLE_LISTENING_PORT,
public readonly /* Store metadata */ metadataDBPath: string = DEFAULT_TABLE_LOKI_DB_PATH,
enableDebugLog: boolean = DEFAULT_ENABLE_DEBUG_LOG,
enableAccessLog: boolean = DEFAULT_ENABLE_ACCESS_LOG,
accessLogWriteStream?: NodeJS.WritableStream,
debugLogFilePath?: string,
loose: boolean = false,
skipApiVersionCheck: boolean = false,
cert: string = "",
key: string = "",
pwd: string = "",
oauth?: string
) {
super(
host,
port,
enableAccessLog,
accessLogWriteStream,
enableDebugLog,
debugLogFilePath,
loose,
skipApiVersionCheck,
cert,
key,
pwd,
oauth
);
}
}

Просмотреть файл

@ -0,0 +1,101 @@
/**
* This file store table parameter from command line parameters
*/
import args from "args";
import ITableEnvironment from "./ITableEnvironment";
import {
DEFAULT_TABLE_LISTENING_PORT,
DEFAULT_TABLE_SERVER_HOST_NAME
} from "./utils/constants";
args
.option(
["", "tableHost"],
"Optional. Customize listening address for table",
DEFAULT_TABLE_SERVER_HOST_NAME
)
.option(
["", "tablePort"],
"Optional. Customize listening port for table",
DEFAULT_TABLE_LISTENING_PORT
)
.option(
["l", "location"],
"Optional. Use an existing folder as workspace path, default is current working directory",
process.cwd()
)
.option(["s", "silent"], "Optional. Disable access log displayed in console")
.option(
["L", "loose"],
"Optional. Enable loose mode which ignores unsupported headers and parameters"
)
.option(
["", "skipApiVersionCheck"],
"Optional. Skip the request API version check, request with all Api versions will be allowed"
)
.option(
["d", "debug"],
"Optional. Enable debug log by providing a valid local file path as log destination"
);
(args as any).config.name = "azurite-table";
/**
* This class store table configuration from command line parameters
* @export
*
*/
export default class TableEnvironment implements ITableEnvironment {
private flags = args.parse(process.argv);
public tableHost(): string | undefined {
return this.flags.tableHost;
}
public tablePort(): number | undefined {
return this.flags.tablePort;
}
public async location(): Promise<string> {
return this.flags.location || process.cwd();
}
public silent(): boolean {
if (this.flags.silent !== undefined) {
return true;
}
return false;
}
public loose(): boolean {
if (this.flags.loose !== undefined) {
return true;
}
// default is false which will block not supported APIs, headers and parameters
return false;
}
public skipApiVersionCheck(): boolean {
if (this.flags.skipApiVersionCheck !== undefined) {
return true;
}
// default is false which will check API veresion
return false;
}
public async debug(): Promise<string | undefined> {
if (typeof this.flags.debug === "string") {
// Enable debug log to file
return this.flags.debug;
}
if (this.flags.debug === true) {
throw RangeError(
`Must provide a debug log file path for parameter -d or --debug`
);
}
// By default disable debug log
}
}

Просмотреть файл

@ -0,0 +1,150 @@
import { OperationSpec } from "@azure/ms-rest-js/es/lib/operationSpec";
import express from "express";
import { RequestListener } from "http";
import IAccountDataStore from "../common/IAccountDataStore";
import IRequestListenerFactory from "../common/IRequestListenerFactory";
import logger from "../common/Logger";
import AccountSASAuthenticator from "./authentication/AccountSASAuthenticator";
import IAuthenticator from "./authentication/IAuthenticator";
import TableSASAuthenticator from "./authentication/TableSASAuthenticator";
import { TableQueryResponse } from "./generated/artifacts/mappers";
import { Operation } from "./generated/artifacts/operation";
import Specifications from "./generated/artifacts/specifications";
import ExpressMiddlewareFactory from "./generated/ExpressMiddlewareFactory";
import IHandlers from "./generated/handlers/IHandlers";
import MiddlewareFactory from "./generated/MiddlewareFactory";
import ServiceHandler from "./handlers/ServiceHandler";
import TableHandler from "./handlers/TableHandler";
import AuthenticationMiddlewareFactory from "./middleware/AuthenticationMiddlewareFactory";
import createTableStorageContextMiddleware from "./middleware/tableStorageContext.middleware";
import ITableMetadataStore from "./persistence/ITableMetadataStore";
import { DEFAULT_TABLE_CONTEXT_PATH } from "./utils/constants";
import morgan = require("morgan");
import TableSharedKeyAuthenticator from "./authentication/TableSharedKeyAuthenticator";
import TableSharedKeyLiteAuthenticator from "./authentication/TableSharedKeyLiteAuthenticator";
/**
* Default RequestListenerFactory based on express framework.
*
* When creating other server implementations, such as based on Koa. Should also create a NEW
* corresponding TableKoaRequestListenerFactory class by extending IRequestListenerFactory.
*
* @export
* @class TableRequestListenerFactory
* @implements {IRequestListenerFactory}
*/
export default class TableRequestListenerFactory
implements IRequestListenerFactory {
public constructor(
private readonly metadataStore: ITableMetadataStore,
private readonly accountDataStore: IAccountDataStore,
private readonly enableAccessLog: boolean,
private readonly accessLogWriteStream?: NodeJS.WritableStream,
private readonly skipApiVersionCheck?: boolean
) {}
public createRequestListener(): RequestListener {
// TODO: Workarounds for generated specification isXML issue. Ideally should fix in generator.
type MutableSpecification = {
-readonly [K in keyof OperationSpec]: OperationSpec[K];
};
[
Operation.Table_Create,
Operation.Table_Query,
Operation.Table_Delete,
Operation.Table_QueryEntities,
Operation.Table_QueryEntitiesWithPartitionAndRowKey,
Operation.Table_UpdateEntity,
Operation.Table_MergeEntity,
Operation.Table_DeleteEntity,
Operation.Table_InsertEntity
].forEach((operation) => {
(Specifications[operation] as MutableSpecification).isXML = false;
});
// TODO: MERGE verbs is not supported by auto generator yet,
// So there we generate a post method and change the verb for MERGE here
Object.defineProperty(
Specifications[Operation.Table_MergeEntityWithMerge],
"httpMethod",
{
value: "MERGE",
writable: false
}
);
// TODO: Override Query Table JSON response element value
TableQueryResponse.type.modelProperties!.value.xmlElementName = "value";
const app = express().disable("x-powered-by");
// MiddlewareFactory is a factory to create auto-generated middleware
const middlewareFactory: MiddlewareFactory = new ExpressMiddlewareFactory(
logger,
DEFAULT_TABLE_CONTEXT_PATH
);
// Create handlers into handler middleware factory
const handlers: IHandlers = {
tableHandler: new TableHandler(this.metadataStore, logger),
serviceHandler: new ServiceHandler(this.metadataStore, logger)
};
/*
* Generated middleware should follow strict orders
* Manually created middleware can be injected into any points
*/
// Access log per request
if (this.enableAccessLog) {
app.use(
morgan("common", {
stream: this.accessLogWriteStream
})
);
}
// Manually created middleware to deserialize feature related context which swagger doesn't know
app.use(createTableStorageContextMiddleware(this.skipApiVersionCheck));
// Dispatch incoming HTTP request to specific operation
app.use(middlewareFactory.createDispatchMiddleware());
// AuthN middleware, like shared key auth or SAS auth
const authenticationMiddlewareFactory = new AuthenticationMiddlewareFactory(
logger
);
const authenticators: IAuthenticator[] = [
new TableSharedKeyLiteAuthenticator(this.accountDataStore, logger),
new TableSharedKeyAuthenticator(this.accountDataStore, logger),
new AccountSASAuthenticator(this.accountDataStore, logger),
new TableSASAuthenticator(
this.accountDataStore,
this.metadataStore,
logger
)
];
app.use(
authenticationMiddlewareFactory.createAuthenticationMiddleware(
authenticators
)
);
// Generated, will do basic validation defined in swagger
app.use(middlewareFactory.createDeserializerMiddleware());
// Generated, inject handlers to create a handler middleware
app.use(middlewareFactory.createHandlerMiddleware(handlers));
// Generated, will serialize response models into HTTP response
app.use(middlewareFactory.createSerializerMiddleware());
// Generated, will return MiddlewareError and Errors thrown in previous middleware/handlers to HTTP response
app.use(middlewareFactory.createErrorMiddleware());
// Generated, will end and return HTTP response immediately
app.use(middlewareFactory.createEndMiddleware());
return app;
}
}

109
src/table/TableServer.ts Normal file
Просмотреть файл

@ -0,0 +1,109 @@
import * as http from "http";
import * as https from "https";
import AccountDataStore from "../common/AccountDataStore";
import { CertOptions } from "../common/ConfigurationBase";
import IAccountDataStore from "../common/IAccountDataStore";
import IRequestListenerFactory from "../common/IRequestListenerFactory";
import logger from "../common/Logger";
import ITableMetadataStore from "../table/persistence/ITableMetadataStore";
import LokiTableMetadataStore from "../table/persistence/LokiTableMetadataStore";
import ServerBase from "../common/ServerBase";
import TableConfiguration from "./TableConfiguration";
import TableRequestListenerFactory from "./TableRequestListenerFactory";
/**
* Default implementation of Azurite Table HTTP server.
* This implementation provides a HTTP service based on express framework and LokiJS in memory database.
*
* We can create other table servers by extending abstract Server class and initialize different httpServer,
* dataStore or requestListenerFactory fields.
*
* For example, creating a HTTPS server to accept HTTPS requests, or using other
* Node.js HTTP frameworks like Koa, or just using another SQL database.
*
* @export
* @class Server
*/
export default class TableServer extends ServerBase {
private readonly /* Store the metadata of the table service */ metadataStore: ITableMetadataStore;
private readonly /* Store the account data */ accountDataStore: IAccountDataStore;
constructor(configuration?: TableConfiguration) {
// If configuration is undefined, we'll use the default one
if (configuration === undefined) {
configuration = new TableConfiguration();
}
// Create a http server to accept table operation request
let httpServer;
const certOption = configuration.hasCert();
switch (certOption) {
case CertOptions.PEM:
case CertOptions.PFX:
httpServer = https.createServer(configuration.getCert(certOption)!);
break;
default:
httpServer = http.createServer();
}
// Create **dataStore with Loki.js
const metadataStore: ITableMetadataStore = new LokiTableMetadataStore(
configuration.metadataDBPath
);
const accountDataStore: IAccountDataStore = new AccountDataStore(logger);
// Here we use express request listener and register table handler
const requestListenerFactory: IRequestListenerFactory = new TableRequestListenerFactory(
metadataStore,
accountDataStore,
configuration.enableAccessLog, // Access log includes every handled HTTP request
configuration.accessLogWriteStream,
configuration.skipApiVersionCheck
);
const host = configuration.host;
const port = configuration.port;
super(host, port, httpServer, requestListenerFactory, configuration);
this.metadataStore = metadataStore;
this.accountDataStore = accountDataStore;
}
protected async beforeStart(): Promise<void> {
const msg = `Azurite Table service is starting on ${this.host}:${this.port}`;
logger.info(msg);
if (this.accountDataStore !== undefined) {
await this.accountDataStore.init();
}
if (this.metadataStore !== undefined) {
await this.metadataStore.init();
}
}
protected async afterStart(): Promise<void> {
const msg = `Azurite Table service successfully listens on ${this.getHttpServerAddress()}`;
logger.info(msg);
}
protected async beforeClose(): Promise<void> {
const BEFORE_CLOSE_MESSAGE = `Azurite Table service is closing...`;
logger.info(BEFORE_CLOSE_MESSAGE);
}
protected async afterClose(): Promise<void> {
if (this.metadataStore !== undefined) {
await this.metadataStore.close();
}
if (this.accountDataStore !== undefined) {
await this.accountDataStore.close();
}
const AFTER_CLOSE_MESSAGE = `Azurite Table service successfully closed`;
logger.info(AFTER_CLOSE_MESSAGE);
}
}

Просмотреть файл

@ -0,0 +1,304 @@
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import Operation from "../generated/artifacts/operation";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import {
generateAccountSASSignature,
IAccountSASSignatureValues
} from "./IAccountSASSignatureValues";
import IAuthenticator from "./IAuthenticator";
import OPERATION_ACCOUNT_SAS_PERMISSIONS from "./OperationAccountSASPermission";
export default class AccountSASAuthenticator implements IAuthenticator {
public constructor(
private readonly accountDataStore: IAccountDataStore,
private readonly logger: ILogger
) {}
public async validate(
req: IRequest,
context: Context
): Promise<boolean | undefined> {
this.logger.info(
`AccountSASAuthenticator:validate() Start validation against account Shared Access Signature pattern.`,
context.contextID
);
this.logger.debug(
"AccountSASAuthenticator:validate() Getting account properties...",
context.contextID
);
const account: string = context.context.account;
const tableName: string | undefined = context.context.table;
this.logger.debug(
// tslint:disable-next-line:max-line-length
`AccountSASAuthenticator:validate() Retrieved account name from context: ${account}, table: ${tableName}`,
context.contextID
);
// TODO: Make following async
const accountProperties = this.accountDataStore.getAccount(account);
if (accountProperties === undefined) {
throw StorageErrorFactory.getInvalidOperation(
context,
"Invalid storage account."
);
}
this.logger.debug(
"AccountSASAuthenticator:validate() Got account properties successfully.",
context.contextID
);
const signature = this.decodeIfExist(req.getQuery("sig"));
this.logger.debug(
`AccountSASAuthenticator:validate() Retrieved signature from URL parameter sig: ${signature}`,
context.contextID
);
const values = this.getAccountSASSignatureValuesFromRequest(req);
if (values === undefined) {
this.logger.info(
`AccountSASAuthenticator:validate() Failed to get valid account SAS values from request.`,
context.contextID
);
return false;
}
this.logger.debug(
`AccountSASAuthenticator:validate() Successfully got valid account SAS values from request. ${JSON.stringify(
values
)}`,
context.contextID
);
this.logger.info(
`AccountSASAuthenticator:validate() Validate signature based account key1.`,
context.contextID
);
const [sig1, stringToSign1] = generateAccountSASSignature(
values,
account,
accountProperties.key1
);
this.logger.debug(
`AccountSASAuthenticator:validate() String to sign is: ${JSON.stringify(
stringToSign1
)}`,
context.contextID!
);
this.logger.debug(
`AccountSASAuthenticator:validate() Calculated signature is: ${sig1}`,
context.contextID!
);
const sig1Pass = sig1 === signature;
this.logger.info(
`AccountSASAuthenticator:validate() Signature based on key1 validation ${
sig1Pass ? "passed" : "failed"
}.`,
context.contextID
);
if (accountProperties.key2 !== undefined) {
this.logger.info(
`AccountSASAuthenticator:validate() Account key2 is not empty, validate signature based account key2.`,
context.contextID
);
const [sig2, stringToSign2] = generateAccountSASSignature(
values,
account,
accountProperties.key2
);
this.logger.debug(
`AccountSASAuthenticator:validate() String to sign is: ${JSON.stringify(
stringToSign2
)}`,
context.contextID!
);
this.logger.debug(
`AccountSASAuthenticator:validate() Calculated signature is: ${sig2}`,
context.contextID!
);
const sig2Pass = sig2 !== signature;
this.logger.info(
`AccountSASAuthenticator:validate() Signature based on key2 validation ${
sig2Pass ? "passed" : "failed"
}.`,
context.contextID
);
if (!sig2Pass && !sig1Pass) {
this.logger.info(
`AccountSASAuthenticator:validate() Validate signature based account key1 and key2 failed.`,
context.contextID
);
return false;
}
} else {
if (!sig1Pass) {
return false;
}
}
// When signature validation passes, we enforce account SAS validation
// Any validation errors will stop this request immediately
this.logger.info(
`AccountSASAuthenticator:validate() Validate start and expiry time.`,
context.contextID
);
if (!this.validateTime(values.expiryTime, values.startTime)) {
this.logger.info(
`AccountSASAuthenticator:validate() Validate start and expiry failed.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationFailure(context);
}
this.logger.info(
`AccountSASAuthenticator:validate() Validate IP range.`,
context.contextID
);
if (!this.validateIPRange()) {
this.logger.info(
`AccountSASAuthenticator:validate() Validate IP range failed.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationSourceIPMismatch(context);
}
this.logger.info(
`AccountSASAuthenticator:validate() Validate request protocol.`,
context.contextID
);
if (!this.validateProtocol(values.protocol, req.getProtocol())) {
this.logger.info(
`AccountSASAuthenticator:validate() Validate protocol failed.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationProtocolMismatch(context);
}
const operation = context.operation;
if (operation === undefined) {
throw new Error(
// tslint:disable-next-line:max-line-length
`AccountSASAuthenticator:validate() operation shouldn't be undefined. Please make sure DispatchMiddleware is hooked before authentication related middleware.`
);
}
const accountSASPermission = OPERATION_ACCOUNT_SAS_PERMISSIONS.get(
operation
);
this.logger.debug(
`AccountSASAuthenticator:validate() Got permission requirements for operation ${
Operation[operation]
} - ${JSON.stringify(accountSASPermission)}`,
context.contextID
);
if (accountSASPermission === undefined) {
throw new Error(
// tslint:disable-next-line:max-line-length
`AccountSASAuthenticator:validate() OPERATION_ACCOUNT_SAS_PERMISSIONS doesn't have configuration for operation ${Operation[operation]}'s account SAS permission.`
);
}
if (!accountSASPermission.validateServices(values.services)) {
throw StorageErrorFactory.getAuthorizationServiceMismatch(context);
}
if (!accountSASPermission.validateResourceTypes(values.resourceTypes)) {
throw StorageErrorFactory.getAuthorizationResourceTypeMismatch(context);
}
if (!accountSASPermission.validatePermissions(values.permissions)) {
throw StorageErrorFactory.getAuthorizationPermissionMismatch(context);
}
this.logger.info(
`AccountSASAuthenticator:validate() Account SAS validation successfully.`,
context.contextID
);
return true;
}
private getAccountSASSignatureValuesFromRequest(
req: IRequest
): IAccountSASSignatureValues | undefined {
const version = this.decodeIfExist(req.getQuery("sv"));
const services = this.decodeIfExist(req.getQuery("ss"));
const resourceTypes = this.decodeIfExist(req.getQuery("srt"));
const protocol = this.decodeIfExist(req.getQuery("spr"));
const startTime = this.decodeIfExist(req.getQuery("st"));
const expiryTime = this.decodeIfExist(req.getQuery("se"));
const ipRange = this.decodeIfExist(req.getQuery("sip"));
const permissions = this.decodeIfExist(req.getQuery("sp"));
const signature = this.decodeIfExist(req.getQuery("sig"));
if (
version === undefined ||
expiryTime === undefined ||
permissions === undefined ||
services === undefined ||
resourceTypes === undefined ||
signature === undefined
) {
return undefined;
}
const accountSASValues: IAccountSASSignatureValues = {
version,
protocol,
startTime,
expiryTime,
permissions,
ipRange,
services,
resourceTypes
};
return accountSASValues;
}
private validateTime(expiry: Date | string, start?: Date | string): boolean {
const expiryTime = new Date(expiry);
const now = new Date();
if (now > expiryTime) {
return false;
}
if (start !== undefined) {
const startTime = new Date(start);
if (now < startTime) {
return false;
}
}
return true;
}
private validateIPRange(): boolean {
// TODO: Emulator doesn't validate IP Address
return true;
}
private validateProtocol(
sasProtocol: string = "https,http",
requestProtocol: string
): boolean {
if (sasProtocol.includes(",")) {
return true;
} else {
return sasProtocol.toLowerCase() === requestProtocol;
}
}
private decodeIfExist(value?: string): string | undefined {
return value === undefined ? value : decodeURIComponent(value);
}
}

Просмотреть файл

@ -0,0 +1,190 @@
export enum AccountSASPermission {
Read = "r",
Write = "w",
Delete = "d",
List = "l",
Add = "a",
Create = "c",
Update = "u",
Process = "p"
}
/**
* This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value
* to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the
* values are set, this should be serialized with toString and set as the permissions field on an
* {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but
* the order of the permissions is particular and this class guarantees correctness.
*
* @export
* @class AccountSASPermissions
*/
export default class AccountSASPermissions {
/**
* Parse initializes the AccountSASPermissions fields from a string.
*
* @static
* @param {string} permissions
* @returns {AccountSASPermissions}
* @memberof AccountSASPermissions
*/
public static parse(permissions: string): AccountSASPermissions {
const accountSASPermissions = new AccountSASPermissions();
for (const c of permissions) {
switch (c) {
case AccountSASPermission.Read:
if (accountSASPermissions.read) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.read = true;
break;
case AccountSASPermission.Write:
if (accountSASPermissions.write) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.write = true;
break;
case AccountSASPermission.Delete:
if (accountSASPermissions.delete) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.delete = true;
break;
case AccountSASPermission.List:
if (accountSASPermissions.list) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.list = true;
break;
case AccountSASPermission.Add:
if (accountSASPermissions.add) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.add = true;
break;
case AccountSASPermission.Create:
if (accountSASPermissions.create) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.create = true;
break;
case AccountSASPermission.Update:
if (accountSASPermissions.update) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASPermissions.update = true;
break;
default:
throw new RangeError(`Invalid permission character: ${c}`);
}
}
return accountSASPermissions;
}
/**
* Permission to read resources and list tables granted.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public read: boolean = false;
/**
* Permission to write resources granted.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public write: boolean = false;
/**
* Permission to delete tables granted.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public delete: boolean = false;
/**
* Permission to list tables granted.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public list: boolean = false;
/**
* Permission to add table entities granted.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public add: boolean = false;
/**
* Permission to create tables.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public create: boolean = false;
/**
* Permissions to update table entities granted.
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public update: boolean = false;
/**
* Permissions to process, which doesn't apply for tables
*
* @type {boolean}
* @memberof AccountSASPermissions
*/
public process: boolean = false;
/**
* Produces the SAS permissions string for an Azure Storage account.
* Call this method to set AccountSASSignatureValues Permissions field.
*
* Using this method will guarantee the resource types are in
* an order accepted by the service.
*
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
*
* @returns {string}
* @memberof AccountSASPermissions
*/
public toString(): string {
// The order of the characters should be as specified here to ensure correctness:
// https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
// Use a string array instead of string concatenating += operator for performance
const permissions: string[] = [];
if (this.read) {
permissions.push(AccountSASPermission.Read);
}
if (this.write) {
permissions.push(AccountSASPermission.Write);
}
if (this.delete) {
permissions.push(AccountSASPermission.Delete);
}
if (this.list) {
permissions.push(AccountSASPermission.List);
}
if (this.add) {
permissions.push(AccountSASPermission.Add);
}
if (this.create) {
permissions.push(AccountSASPermission.Create);
}
if (this.update) {
permissions.push(AccountSASPermission.Update);
}
return permissions.join("");
}
}

Просмотреть файл

@ -0,0 +1,103 @@
export enum AccountSASResourceType {
Service = "s",
Container = "c",
Object = "o"
}
/**
* This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value
* to true means that any SAS which uses these permissions will grant access to that resource type. Once all the
* values are set, this should be serialized with toString and set as the resources field on an
* {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but
* the order of the resources is particular and this class guarantees correctness.
*
* @export
* @class AccountSASResourceTypes
*/
export default class AccountSASResourceTypes {
/**
* Creates an {@link AccountSASResourceType} from the specified resource types string. This method will throw an
* Error if it encounters a character that does not correspond to a valid resource type.
*
* @static
* @param {string} resourceTypes
* @returns {AccountSASResourceTypes}
* @memberof AccountSASResourceTypes
*/
public static parse(resourceTypes: string): AccountSASResourceTypes {
const accountSASResourceTypes = new AccountSASResourceTypes();
for (const c of resourceTypes) {
switch (c) {
case AccountSASResourceType.Service:
if (accountSASResourceTypes.service) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASResourceTypes.service = true;
break;
case AccountSASResourceType.Container:
if (accountSASResourceTypes.container) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASResourceTypes.container = true;
break;
case AccountSASResourceType.Object:
if (accountSASResourceTypes.object) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASResourceTypes.object = true;
break;
default:
throw new RangeError(`Invalid resource type: ${c}`);
}
}
return accountSASResourceTypes;
}
/**
* Permission to access service level APIs granted.
*
* @type {boolean}
* @memberof AccountSASResourceTypes
*/
public service: boolean = false;
/**
* Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.
*
* @type {boolean}
* @memberof AccountSASResourceTypes
*/
public container: boolean = false;
/**
* Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.
*
* @type {boolean}
* @memberof AccountSASResourceTypes
*/
public object: boolean = false;
/**
* Converts the given resource types to a string.
*
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
*
* @returns {string}
* @memberof AccountSASResourceTypes
*/
public toString(): string {
const resourceTypes: string[] = [];
if (this.service) {
resourceTypes.push(AccountSASResourceType.Service);
}
if (this.container) {
resourceTypes.push(AccountSASResourceType.Container);
}
if (this.object) {
resourceTypes.push(AccountSASResourceType.Object);
}
return resourceTypes.join("");
}
}

Просмотреть файл

@ -0,0 +1,121 @@
export enum AccountSASService {
Blob = "b",
File = "f",
Queue = "q",
Table = "t"
}
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value
* to true means that any SAS which uses these permissions will grant access to that service. Once all the
* values are set, this should be serialized with toString and set as the services field on an
* {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but
* the order of the services is particular and this class guarantees correctness.
*
* @export
* @class AccountSASServices
*/
export default class AccountSASServices {
/**
* Creates an {@link AccountSASServices} from the specified services string. This method will throw an
* Error if it encounters a character that does not correspond to a valid service.
*
* @static
* @param {string} services
* @returns {AccountSASServices}
* @memberof AccountSASServices
*/
public static parse(services: string): AccountSASServices {
const accountSASServices = new AccountSASServices();
for (const c of services) {
switch (c) {
case AccountSASService.Blob:
if (accountSASServices.blob) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASServices.blob = true;
break;
case AccountSASService.File:
if (accountSASServices.file) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASServices.file = true;
break;
case AccountSASService.Queue:
if (accountSASServices.queue) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASServices.queue = true;
break;
case AccountSASService.Table:
if (accountSASServices.table) {
throw new RangeError(`Duplicated permission character: ${c}`);
}
accountSASServices.table = true;
break;
default:
throw new RangeError(`Invalid service character: ${c}`);
}
}
return accountSASServices;
}
/**
* Permission to access blob resources granted.
*
* @type {boolean}
* @memberof AccountSASServices
*/
public blob: boolean = false;
/**
* Permission to access file resources granted.
*
* @type {boolean}
* @memberof AccountSASServices
*/
public file: boolean = false;
/**
* Permission to access queue resources granted.
*
* @type {boolean}
* @memberof AccountSASServices
*/
public queue: boolean = false;
/**
* Permission to access table resources granted.
*
* @type {boolean}
* @memberof AccountSASServices
*/
public table: boolean = false;
/**
* Converts the given services to a string.
*
* @returns {string}
* @memberof AccountSASServices
*/
public toString(): string {
const services: string[] = [];
if (this.blob) {
services.push(AccountSASService.Blob);
}
if (this.table) {
services.push(AccountSASService.Table);
}
if (this.queue) {
services.push(AccountSASService.Queue);
}
if (this.file) {
services.push(AccountSASService.File);
}
return services.join("");
}
}

Просмотреть файл

@ -0,0 +1,167 @@
import { SasIPRange as IIPRange } from "@azure/storage-blob";
import {
computeHMACSHA256,
truncatedISO8061Date
} from "../../common/utils/utils";
import AccountSASPermissions from "./AccountSASPermissions";
import AccountSASResourceTypes from "./AccountSASResourceTypes";
import AccountSASServices from "./AccountSASServices";
import { ipRangeToString } from "./IIPRange";
/**
* Protocols for generated SAS.
*
* @export
* @enum {number}
*/
export enum SASProtocol {
/**
* Protocol that allows HTTPS only
*/
HTTPS = "https",
/**
* Protocol that allows both HTTPS and HTTP
*/
HTTPSandHTTP = "https,http"
}
/**
* IAccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account.
*
* @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1
* for more conceptual information on SAS
*
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
* for descriptions of the parameters, including which are required
*
* @export
* @class IAccountSASSignatureValues
*/
export interface IAccountSASSignatureValues {
/**
* If not provided, this defaults to the service version targeted by this version of the library.
*
* @type {string}
* @memberof IAccountSASSignatureValues
*/
version: string;
/**
* Optional. SAS protocols allowed.
*
* @type {SASProtocol | string}
* @memberof IAccountSASSignatureValues
*/
protocol?: SASProtocol | string;
/**
* Optional. When the SAS will take effect.
*
* @type {Date | string}
* @memberof IAccountSASSignatureValues
*/
startTime?: Date | string;
/**
* The time after which the SAS will no longer work.
*
* @type {Date | string}
* @memberof IAccountSASSignatureValues
*/
expiryTime: Date | string;
/**
* Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help
* constructing the permissions string.
*
* @type {AccountSASPermissions | string}
* @memberof IAccountSASSignatureValues
*/
permissions: AccountSASPermissions | string;
/**
* Optional. IP range allowed.
*
* @type {IIPRange | string}
* @memberof IAccountSASSignatureValues
*/
ipRange?: IIPRange | string;
/**
* The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to
* construct this value.
*
* @type {AccountSASServices | string}
* @memberof IAccountSASSignatureValues
*/
services: AccountSASServices | string;
/**
* The values that indicate the resource types accessible with this SAS. Please refer
* to {@link AccountSASResourceTypes} to construct this value.
*
* @type {AccountSASResourceType | string}
* @memberof IAccountSASSignatureValues
*/
resourceTypes: AccountSASResourceTypes | string;
}
/**
* Generates signature string from account SAS parameters.
*
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
*
* @param {SharedKeyCredential} sharedKeyCredential
* @param {string} accountName
* @param {Buffer} sharedKey
* @returns {[string, string]} signature and stringToSign
* @memberof IAccountSASSignatureValues
*/
export function generateAccountSASSignature(
accountSASSignatureValues: IAccountSASSignatureValues,
accountName: string,
sharedKey: Buffer
): [string, string] {
const parsedPermissions = accountSASSignatureValues.permissions.toString();
const parsedServices = accountSASSignatureValues.services.toString();
const parsedResourceTypes = accountSASSignatureValues.resourceTypes.toString();
const parsedStartTime =
accountSASSignatureValues.startTime === undefined
? ""
: typeof accountSASSignatureValues.startTime === "string"
? accountSASSignatureValues.startTime
: truncatedISO8061Date(accountSASSignatureValues.startTime, false);
const parsedExpiryTime =
typeof accountSASSignatureValues.expiryTime === "string"
? accountSASSignatureValues.expiryTime
: truncatedISO8061Date(accountSASSignatureValues.expiryTime, false);
const parsedIPRange =
accountSASSignatureValues.ipRange === undefined
? ""
: typeof accountSASSignatureValues.ipRange === "string"
? accountSASSignatureValues.ipRange
: ipRangeToString(accountSASSignatureValues.ipRange);
const parsedProtocol =
accountSASSignatureValues.protocol === undefined
? ""
: accountSASSignatureValues.protocol;
const version = accountSASSignatureValues.version;
const stringToSign = [
accountName,
parsedPermissions,
parsedServices,
parsedResourceTypes,
parsedStartTime,
parsedExpiryTime,
parsedIPRange,
parsedProtocol,
version,
"" // Account SAS requires an additional newline character
].join("\n");
const signature: string = computeHMACSHA256(stringToSign, sharedKey);
return [signature, stringToSign];
}

Просмотреть файл

@ -0,0 +1,3 @@
export default interface IAuthenticationContext {
account?: string;
}

Просмотреть файл

@ -0,0 +1,6 @@
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
export default interface IAuthenticator {
validate(req: IRequest, content: Context): Promise<boolean | undefined>;
}

Просмотреть файл

@ -0,0 +1,37 @@
/**
* Allowed IP range for a SAS.
*
* @export
* @interface IIPRange
*/
export interface IIPRange {
/**
* Starting IP address in the IP range.
* If end IP doesn't provide, start IP will the only IP allowed.
*
* @type {string}
* @memberof IPRange
*/
start: string;
/**
* Optional. IP address that ends the IP range.
* If not provided, start IP will the only IP allowed.
*
* @type {string}
* @memberof IPRange
*/
end?: string;
}
/**
* Generate IPRange format string. For example:
*
* "8.8.8.8" or "1.1.1.1-255.255.255.255"
*
* @export
* @param {IIPRange} ipRange
* @returns {string}
*/
export function ipRangeToString(ipRange: IIPRange): string {
return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;
}

Просмотреть файл

@ -0,0 +1,249 @@
import {
computeHMACSHA256,
truncatedISO8061Date
} from "../../common/utils/utils";
import { SASProtocol } from "./IAccountSASSignatureValues";
import { IIPRange, ipRangeToString } from "./IIPRange";
/**
* ITableSASSignatureValues is used to help generating Table service SAS tokens for containers or tables.
*
* @export
* @class ITableSASSignatureValues
*/
export interface ITableSASSignatureValues {
/**
* The version of the service this SAS will target. If not specified, it will default to the version targeted by the
* library.
*
* @type {string}
* @memberof ITableSASSignatureValues
*/
version: string;
/**
* Optional. SAS protocols, HTTPS only or HTTPSandHTTP
*
* @type {SASProtocol | string}
* @memberof ITableSASSignatureValues
*/
protocol?: SASProtocol | string;
/**
* Optional. When the SAS will take effect.
*
* @type {Date | string}
* @memberof ITableSASSignatureValues
*/
startTime?: Date | string;
/**
* Optional only when identifier is provided. The time after which the SAS will no longer work.
*
* @type {Date | string}
* @memberof ITableSASSignatureValues
*/
expiryTime?: Date | string;
/**
* Optional only when identifier is provided.
* Please refer to either {@link ContainerSASPermissions} or {@link TableSASPermissions} depending on the resource
* being accessed for help constructing the permissions string.
*
* @type {string}
* @memberof ITableSASSignatureValues
*/
permissions?: string;
/**
* Optional. IP ranges allowed in this SAS.
*
* @type {IIPRange | string}
* @memberof ITableSASSignatureValues
*/
ipRange?: IIPRange | string;
/**
* The name of the table the SAS user may access.
*
* @type {string}
* @memberof ITableSASSignatureValues
*/
tableName: string;
/**
* Optional. The name of the access policy on the container this SAS references if any.
*
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy
*
* @type {string}
* @memberof ITableSASSignatureValues
*/
identifier?: string;
startingPartitionKey?: string;
startingRowKey?: string;
endingPartitionKey?: string;
endingRowKey?: string;
}
/**
* Creates an instance of SASQueryParameters.
*
* Only accepts required settings needed to create a SAS. For optional settings please
* set corresponding properties directly, such as permissions, startTime and identifier.
*
* WARNING: When identifier is not provided, permissions and expiryTime are required.
* You MUST assign value to identifier or expiryTime & permissions manually if you initial with
* this constructor.
*
* @export
* @param {ITableSASSignatureValues} tableSASSignatureValues
* @param {TableSASResourceType} resource
* @param {string} accountName
* @param {Buffer} sharedKey
* @returns {[string, string]} signature and stringToSign
*/
export function generateTableSASSignature(
tableSASSignatureValues: ITableSASSignatureValues,
accountName: string,
sharedKey: Buffer
): [string, string] {
if (tableSASSignatureValues.version >= "2018-11-09") {
return generateTableSASSignature20181109(
tableSASSignatureValues,
accountName,
sharedKey
);
} else {
return generateTableSASSignature20150405(
tableSASSignatureValues,
accountName,
sharedKey
);
}
}
function generateTableSASSignature20181109(
tableSASSignatureValues: ITableSASSignatureValues,
accountName: string,
sharedKey: Buffer
): [string, string] {
if (
!tableSASSignatureValues.identifier &&
(!tableSASSignatureValues.permissions &&
!tableSASSignatureValues.expiryTime)
) {
throw new RangeError(
// tslint:disable-next-line:max-line-length
"generateTableSASSignature(): Must provide 'permissions' and 'expiryTime' for Table SAS generation when 'identifier' is not provided."
);
}
const version = tableSASSignatureValues.version;
const verifiedPermissions = tableSASSignatureValues.permissions;
// Signature is generated on the un-url-encoded values.
// TODO: Check whether validating the snapshot is necessary.
const stringToSign = [
verifiedPermissions ? verifiedPermissions : "",
tableSASSignatureValues.startTime === undefined
? ""
: typeof tableSASSignatureValues.startTime === "string"
? tableSASSignatureValues.startTime
: truncatedISO8061Date(tableSASSignatureValues.startTime, false),
tableSASSignatureValues.expiryTime === undefined
? ""
: typeof tableSASSignatureValues.expiryTime === "string"
? tableSASSignatureValues.expiryTime
: truncatedISO8061Date(tableSASSignatureValues.expiryTime, false),
getCanonicalName(accountName, tableSASSignatureValues.tableName),
tableSASSignatureValues.identifier, // TODO: ? tableSASSignatureValues.identifier : "",
tableSASSignatureValues.ipRange
? typeof tableSASSignatureValues.ipRange === "string"
? tableSASSignatureValues.ipRange
: ipRangeToString(tableSASSignatureValues.ipRange)
: "",
tableSASSignatureValues.protocol ? tableSASSignatureValues.protocol : "",
version,
tableSASSignatureValues.startingPartitionKey
? tableSASSignatureValues.startingPartitionKey
: "",
tableSASSignatureValues.startingRowKey
? tableSASSignatureValues.startingRowKey
: "",
tableSASSignatureValues.endingPartitionKey
? tableSASSignatureValues.endingPartitionKey
: "",
tableSASSignatureValues.endingRowKey
? tableSASSignatureValues.endingRowKey
: ""
].join("\n");
const signature = computeHMACSHA256(stringToSign, sharedKey);
return [signature, stringToSign];
}
function generateTableSASSignature20150405(
tableSASSignatureValues: ITableSASSignatureValues,
accountName: string,
sharedKey: Buffer
): [string, string] {
if (
!tableSASSignatureValues.identifier &&
(!tableSASSignatureValues.permissions &&
!tableSASSignatureValues.expiryTime)
) {
throw new RangeError(
// tslint:disable-next-line:max-line-length
"generateTableSASSignature(): Must provide 'permissions' and 'expiryTime' for Table SAS generation when 'identifier' is not provided."
);
}
const version = tableSASSignatureValues.version;
const verifiedPermissions = tableSASSignatureValues.permissions;
// Signature is generated on the un-url-encoded values.
const stringToSign = [
verifiedPermissions ? verifiedPermissions : "",
tableSASSignatureValues.startTime === undefined
? ""
: typeof tableSASSignatureValues.startTime === "string"
? tableSASSignatureValues.startTime
: truncatedISO8061Date(tableSASSignatureValues.startTime, false),
tableSASSignatureValues.expiryTime === undefined
? ""
: typeof tableSASSignatureValues.expiryTime === "string"
? tableSASSignatureValues.expiryTime
: truncatedISO8061Date(tableSASSignatureValues.expiryTime, false),
getCanonicalName(accountName, tableSASSignatureValues.tableName),
tableSASSignatureValues.identifier, // TODO: ? tableSASSignatureValues.identifier : "",
tableSASSignatureValues.ipRange
? typeof tableSASSignatureValues.ipRange === "string"
? tableSASSignatureValues.ipRange
: ipRangeToString(tableSASSignatureValues.ipRange)
: "",
tableSASSignatureValues.protocol ? tableSASSignatureValues.protocol : "",
version,
tableSASSignatureValues.startingPartitionKey
? tableSASSignatureValues.startingPartitionKey
: "",
tableSASSignatureValues.startingRowKey
? tableSASSignatureValues.startingRowKey
: "",
tableSASSignatureValues.endingPartitionKey
? tableSASSignatureValues.endingPartitionKey
: "",
tableSASSignatureValues.endingRowKey
? tableSASSignatureValues.endingRowKey
: ""
].join("\n");
const signature = computeHMACSHA256(stringToSign, sharedKey);
return [signature, stringToSign];
}
function getCanonicalName(accountName: string, tableName: string): string {
return `/table/${accountName}/${tableName}`;
}

Просмотреть файл

@ -0,0 +1,219 @@
import { AccountSASPermissions, AccountSASResourceTypes, AccountSASServices } from "@azure/storage-blob";
import Operation from "../generated/artifacts/operation";
import { AccountSASPermission } from "./AccountSASPermissions";
import { AccountSASResourceType } from "./AccountSASResourceTypes";
import { AccountSASService } from "./AccountSASServices";
export class OperationAccountSASPermission {
constructor(
public readonly service: string,
public readonly resourceType: string,
public readonly permission: string
) {}
public validate(
services: AccountSASServices | string,
resourceTypes: AccountSASResourceTypes | string,
permissions: AccountSASPermissions | string
): boolean {
return (
this.validateServices(services) &&
this.validateResourceTypes(resourceTypes) &&
this.validatePermissions(permissions)
);
}
public validateServices(services: AccountSASServices | string): boolean {
return services.toString().includes(this.service);
}
public validateResourceTypes(
resourceTypes: AccountSASResourceTypes | string
): boolean {
for (const p of this.resourceType) {
if (resourceTypes.toString().includes(p)) {
return true;
}
}
return false;
}
public validatePermissions(
permissions: AccountSASPermissions | string
): boolean {
for (const p of this.permission) {
if (permissions.toString().includes(p)) {
return true;
}
}
return false;
}
}
// The permissions are in the table order
// See https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas#table-service
// TODO: Check all required operations
const OPERATION_ACCOUNT_SAS_PERMISSIONS = new Map<
Operation,
OperationAccountSASPermission
>();
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Service_GetProperties,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Service,
AccountSASPermission.Read
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Service_SetProperties,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Service,
AccountSASPermission.Write
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Service_GetStatistics,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Service,
AccountSASPermission.Read
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_Query,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Container,
AccountSASPermission.List
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_Create,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Container,
AccountSASPermission.Create + AccountSASPermission.Write
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_SetAccessPolicy,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Container,
"" // NOT ALLOWED
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_GetAccessPolicy,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Container,
"" // NOT ALLOWED
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_Delete,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Container,
AccountSASPermission.Delete
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_QueryEntities,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Container,
AccountSASPermission.Read
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_QueryEntitiesWithPartitionAndRowKey,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object,
AccountSASPermission.Read
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_InsertEntity,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object,
AccountSASPermission.Add
)
);
// TODO do we need to specify InsertOrMergeEntity?
// TODO do we need to specify InsertOrUpdateEntity
// or are they two separate operations with respective permissions
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_UpdateEntity,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object,
AccountSASPermission.Update
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_MergeEntity,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object,
AccountSASPermission.Update
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_MergeEntityWithMerge,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object,
AccountSASPermission.Update
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_DeleteEntity,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object,
AccountSASPermission.Delete
)
);
OPERATION_ACCOUNT_SAS_PERMISSIONS.set(
Operation.Table_Batch,
new OperationAccountSASPermission(
AccountSASService.Table,
AccountSASResourceType.Object +
AccountSASResourceType.Service +
AccountSASResourceType.Container,
AccountSASPermission.Delete +
AccountSASPermission.Add +
AccountSASPermission.Create +
AccountSASPermission.List +
AccountSASPermission.Process +
AccountSASPermission.Read +
AccountSASPermission.Update +
AccountSASPermission.Write
)
);
export default OPERATION_ACCOUNT_SAS_PERMISSIONS;

Просмотреть файл

@ -0,0 +1,140 @@
import Operation from "../generated/artifacts/operation";
import { TableSASPermission } from "./TableSASPermissions";
export class OperationTableSASPermission {
constructor(public readonly permission: string = "") {}
public validate(permissions: string): boolean {
return this.validatePermissions(permissions);
}
public validatePermissions(permissions: string): boolean {
for (const p of this.permission) {
if (permissions.toString().includes(p)) {
return true;
}
}
return false;
}
}
// See https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
// Table Service SAS Permissions for table level
export const OPERATION_TABLE_SAS_TABLE_PERMISSIONS = new Map<
Operation,
OperationTableSASPermission
>();
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Service_SetProperties,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Service_GetProperties,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Service_GetStatistics,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_Query,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_Create,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_Delete,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_QueryEntities,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_QueryEntitiesWithPartitionAndRowKey,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_UpdateEntity,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_MergeEntity,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_DeleteEntity,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_MergeEntityWithMerge,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_InsertEntity,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_GetAccessPolicy,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_SetAccessPolicy,
new OperationTableSASPermission()
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_InsertEntity,
new OperationTableSASPermission(TableSASPermission.Add)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_Query,
new OperationTableSASPermission(TableSASPermission.Query)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_QueryEntities,
new OperationTableSASPermission(TableSASPermission.Query)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_QueryEntitiesWithPartitionAndRowKey,
new OperationTableSASPermission(TableSASPermission.Query)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Service_GetProperties,
new OperationTableSASPermission(TableSASPermission.Query)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Service_GetStatistics,
new OperationTableSASPermission(TableSASPermission.Query)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_Delete,
new OperationTableSASPermission(TableSASPermission.Delete)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_DeleteEntity,
new OperationTableSASPermission(TableSASPermission.Delete)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_UpdateEntity,
new OperationTableSASPermission(TableSASPermission.Update)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_MergeEntity,
new OperationTableSASPermission(TableSASPermission.Update)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_MergeEntityWithMerge,
new OperationTableSASPermission(TableSASPermission.Update)
);
OPERATION_TABLE_SAS_TABLE_PERMISSIONS.set(
Operation.Table_Batch,
new OperationTableSASPermission(
TableSASPermission.Add +
TableSASPermission.Delete +
TableSASPermission.Query +
TableSASPermission.Update
)
);

Просмотреть файл

@ -0,0 +1,403 @@
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import TableStorageContext from "../context/TableStorageContext";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import { AccessPolicy } from "../generated/artifacts/models";
import Operation from "../generated/artifacts/operation";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import ITableMetadataStore from "../persistence/ITableMetadataStore";
import IAuthenticator from "./IAuthenticator";
import {
generateTableSASSignature,
ITableSASSignatureValues
} from "./ITableSASSignatureValues";
import { OPERATION_TABLE_SAS_TABLE_PERMISSIONS } from "./OperationTableSASPermission";
export default class TableSASAuthenticator implements IAuthenticator {
public constructor(
private readonly accountDataStore: IAccountDataStore,
private readonly tableMetadataStore: ITableMetadataStore,
private readonly logger: ILogger
) {}
public async validate(
req: IRequest,
context: Context
): Promise<boolean | undefined> {
this.logger.info(
`TableSASAuthenticator:validate() Start validation against table service Shared Access Signature pattern.`,
context.contextID
);
this.logger.debug(
"TableSASAuthenticator:validate() Getting account properties...",
context.contextID
);
const tableContext = new TableStorageContext(context);
const account = tableContext.account;
if (account === undefined) {
throw RangeError(
`TableSASAuthenticator:validate() account is undefined in context.`
);
}
const tableName = tableContext.tableName;
if (tableName === undefined) {
this.logger.error(
`TableSASAuthenticator:validate() table name is undefined in context.`,
context.contextID
);
return undefined;
}
this.logger.debug(
// tslint:disable-next-line:max-line-length
`TableSASAuthenticator:validate() Retrieved account name from context: ${account}, table: ${tableName}`,
context.contextID
);
// TODO: Make following async
const accountProperties = this.accountDataStore.getAccount(account);
if (accountProperties === undefined) {
throw StorageErrorFactory.getInvalidOperation(
context,
"Invalid storage account."
);
}
this.logger.debug(
"TableSASAuthenticator:validate() Got account properties successfully.",
context.contextID
);
// Extract table service SAS authentication required parameters
const signature = this.decodeIfExist(req.getQuery("sig"));
this.logger.debug(
`TableSASAuthenticator:validate() Retrieved signature from URL parameter sig: ${signature}`,
context.contextID
);
if (signature === undefined) {
this.logger.debug(
`TableSASAuthenticator:validate() No signature found in request. Skip table service SAS validation.`,
context.contextID
);
return undefined;
}
const values = this.getTableSASSignatureValuesFromRequest(
req,
tableName,
context
);
if (values === undefined) {
this.logger.info(
// tslint:disable-next-line:max-line-length
`TableSASAuthenticator:validate() Failed to get valid table service SAS values from request. Skip table service SAS validation.`,
context.contextID
);
return undefined;
}
this.logger.debug(
`TableSASAuthenticator:validate() Successfully got valid table service SAS values from request. ${JSON.stringify(
values
)}`,
context.contextID
);
this.logger.info(
`TableSASAuthenticator:validate() Validate signature based account key1.`,
context.contextID
);
const [sig1, stringToSign1] = generateTableSASSignature(
values,
account,
accountProperties.key1
);
this.logger.debug(
`TableSASAuthenticator:validate() String to sign is: ${JSON.stringify(
stringToSign1
)}`,
context.contextID!
);
this.logger.debug(
`TableSASAuthenticator:validate() Calculated signature is: ${sig1}`,
context.contextID!
);
const sig1Pass = sig1 === signature;
this.logger.info(
`TableSASAuthenticator:validate() Signature based on key1 validation ${
sig1Pass ? "passed" : "failed"
}.`,
context.contextID
);
if (accountProperties.key2 !== undefined) {
this.logger.info(
`TableSASAuthenticator:validate() Account key2 is not empty, validate signature based account key2.`,
context.contextID
);
const [sig2, stringToSign2] = generateTableSASSignature(
values,
account,
accountProperties.key2
);
this.logger.debug(
`TableSASAuthenticator:validate() String to sign is: ${JSON.stringify(
stringToSign2
)}`,
context.contextID!
);
this.logger.debug(
`TableSASAuthenticator:validate() Calculated signature is: ${sig2}`,
context.contextID!
);
const sig2Pass = sig2 !== signature;
this.logger.info(
`TableSASAuthenticator:validate() Signature based on key2 validation ${
sig2Pass ? "passed" : "failed"
}.`,
context.contextID
);
if (!sig2Pass && !sig1Pass) {
this.logger.info(
`TableSASAuthenticator:validate() Validate signature based account key1 and key2 failed.`,
context.contextID
);
return false;
}
} else {
if (!sig1Pass) {
return false;
}
}
// When signature validation passes, we enforce table service SAS validation
// Any validation errors will stop this request immediately
// TODO: Validate permissions from ACL identifier by extract permissions, start time and expiry time from ACL
if (values.identifier !== undefined) {
const accessPolicy:
| AccessPolicy
| undefined = await this.getTableAccessPolicyByIdentifier(
tableName,
values.identifier,
context
);
if (accessPolicy === undefined) {
this.logger.warn(
`TableSASAuthenticator:validate() Cannot get access policy defined for table ${tableName} with id ${values.identifier}.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationFailure(context);
}
values.startTime = accessPolicy.start;
values.expiryTime = accessPolicy.expiry;
values.permissions = accessPolicy.permission;
}
this.logger.info(
`TableSASAuthenticator:validate() Validate start and expiry time.`,
context.contextID
);
if (!this.validateTime(values.expiryTime, values.startTime)) {
this.logger.info(
`TableSASAuthenticator:validate() Validate start and expiry failed.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationFailure(context);
}
this.logger.info(
`TableSASAuthenticator:validate() Validate IP range.`,
context.contextID
);
if (!this.validateIPRange()) {
this.logger.info(
`TableSASAuthenticator:validate() Validate IP range failed.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationSourceIPMismatch(context);
}
this.logger.info(
`TableSASAuthenticator:validate() Validate request protocol.`,
context.contextID
);
if (!this.validateProtocol(values.protocol, req.getProtocol())) {
this.logger.info(
`TableSASAuthenticator:validate() Validate protocol failed.`,
context.contextID
);
throw StorageErrorFactory.getAuthorizationProtocolMismatch(context);
}
const operation = context.operation;
if (operation === undefined) {
throw new Error(
// tslint:disable-next-line:max-line-length
`TableSASAuthenticator:validate() Operation shouldn't be undefined. Please make sure DispatchMiddleware is hooked before authentication related middleware.`
);
}
const tableSASPermission = OPERATION_TABLE_SAS_TABLE_PERMISSIONS.get(
operation
);
this.logger.debug(
`TableSASAuthenticator:validate() Got permission requirements for operation ${
Operation[operation]
} - ${JSON.stringify(tableSASPermission)}`,
context.contextID
);
if (tableSASPermission === undefined) {
throw new Error(
// tslint:disable-next-line:max-line-length
`TableSASAuthenticator:validate() ${"OPERATION_TABLE_SAS_TABLE_PERMISSIONS"} doesn't have configuration for operation ${
Operation[operation]
}'s table service SAS permission.`
);
}
if (!tableSASPermission.validatePermissions(values.permissions!)) {
throw StorageErrorFactory.getAuthorizationPermissionMismatch(context);
}
this.logger.info(
`TableSASAuthenticator:validate() Table service SAS validation successfully.`,
context.contextID
);
// TODO: Handle enforced response headers defined in table service SAS
return true;
}
private getTableSASSignatureValuesFromRequest(
req: IRequest,
tableName: string,
context?: Context
): ITableSASSignatureValues | undefined {
const version = this.decodeIfExist(req.getQuery("sv"));
const protocol = this.decodeIfExist(req.getQuery("spr"));
const startTime = this.decodeIfExist(req.getQuery("st"));
const expiryTime = this.decodeIfExist(req.getQuery("se"));
const permissions = this.decodeIfExist(req.getQuery("sp"));
const ipRange = this.decodeIfExist(req.getQuery("sip"));
const identifier = this.decodeIfExist(req.getQuery("si"));
const startingPartitionKey = this.decodeIfExist(req.getQuery("spk"));
const startingRowKey = this.decodeIfExist(req.getQuery("srk"));
const endingPartitionKey = this.decodeIfExist(req.getQuery("epk"));
const endingRowKey = this.decodeIfExist(req.getQuery("erk"));
if (!identifier && (!permissions || !expiryTime)) {
this.logger.warn(
// tslint:disable-next-line:max-line-length
`TableSASAuthenticator:generateTableSASSignature(): Must provide 'permissions' and 'expiryTime' for Table SAS generation when 'identifier' is not provided.`,
context ? context.contextID : undefined
);
return undefined;
}
if (version === undefined) {
this.logger.warn(
// tslint:disable-next-line:max-line-length
`TableSASAuthenticator:generateTableSASSignature(): Must provide 'version'.`,
context ? context.contextID : undefined
);
return undefined;
}
const tableSASValues: ITableSASSignatureValues = {
version,
protocol,
startTime,
expiryTime,
permissions,
ipRange,
tableName,
identifier,
startingPartitionKey,
startingRowKey,
endingPartitionKey,
endingRowKey
};
return tableSASValues;
}
private validateTime(expiry?: Date | string, start?: Date | string): boolean {
if (expiry === undefined && start === undefined) {
return true;
}
const now = new Date();
if (expiry !== undefined) {
const expiryTime = new Date(expiry);
if (now > expiryTime) {
return false;
}
}
if (start !== undefined) {
const startTime = new Date(start);
if (now < startTime) {
return false;
}
}
return true;
}
private validateIPRange(): boolean {
// TODO: Emulator doesn't validate IP Address
return true;
}
private validateProtocol(
sasProtocol: string = "https,http",
requestProtocol: string
): boolean {
if (sasProtocol.includes(",")) {
return true;
} else {
return sasProtocol.toLowerCase() === requestProtocol;
}
}
private decodeIfExist(value?: string): string | undefined {
return value === undefined ? value : decodeURIComponent(value);
}
private async getTableAccessPolicyByIdentifier(
table: string,
id: string,
context: Context
): Promise<AccessPolicy | undefined> {
try {
const containerModel = await this.tableMetadataStore.getTableAccessPolicy(
context,
table,
{}
);
if (containerModel === undefined) {
return undefined;
}
for (const acl of containerModel) {
if (acl.id === id) {
return acl.accessPolicy;
}
}
} catch (err) {
return undefined;
}
}
}

Просмотреть файл

@ -0,0 +1,6 @@
export enum TableSASPermission {
Query = "r",
Add = "a",
Update = "c",
Delete = "d"
}

Просмотреть файл

@ -0,0 +1,194 @@
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import { computeHMACSHA256, getURLQueries } from "../../common/utils/utils";
import TableStorageContext from "../context/TableStorageContext";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import { HeaderConstants } from "../utils/constants";
import IAuthenticator from "./IAuthenticator";
export default class TableSharedKeyAuthenticator implements IAuthenticator {
public constructor(
private readonly dataStore: IAccountDataStore,
private readonly logger: ILogger
) {}
public async validate(
req: IRequest,
context: Context
): Promise<boolean | undefined> {
const tableContext = new TableStorageContext(context);
const account = tableContext.account!;
this.logger.info(
`TableSharedKeyAuthenticator:validate() Start validation against account shared key authentication.`,
tableContext.contextID
);
const authHeaderValue = req.getHeader(HeaderConstants.AUTHORIZATION);
if (authHeaderValue === undefined) {
this.logger.info(
// tslint:disable-next-line:max-line-length
`TableSharedKeyAuthenticator:validate() Request doesn't include valid authentication header. Skip shared key authentication.`,
tableContext.contextID
);
return undefined;
}
// TODO: Make following async
const accountProperties = this.dataStore.getAccount(account);
if (accountProperties === undefined) {
this.logger.error(
`TableSharedKeyAuthenticator:validate() Invalid storage account ${account}.`,
tableContext.contextID
);
throw StorageErrorFactory.getInvalidOperation(
tableContext,
"Invalid storage account."
);
}
const stringToSign: string =
[
req.getMethod().toUpperCase(),
this.getHeaderValueToSign(req, HeaderConstants.CONTENT_MD5),
this.getHeaderValueToSign(req, HeaderConstants.CONTENT_TYPE),
this.getHeaderValueToSign(req, HeaderConstants.DATE) ||
this.getHeaderValueToSign(req, HeaderConstants.X_MS_DATE)
].join("\n") +
"\n" +
this.getCanonicalizedResourceString(
req,
account,
tableContext.authenticationPath
);
this.logger.info(
`TableSharedKeyAuthenticator:validate() [STRING TO SIGN]:${JSON.stringify(
stringToSign
)}`,
tableContext.contextID
);
const signature1 = computeHMACSHA256(stringToSign, accountProperties.key1);
const authValue1 = `SharedKey ${account}:${signature1}`;
this.logger.info(
`TableSharedKeyAuthenticator:validate() Calculated authentication header based on key1: ${authValue1}`,
tableContext.contextID
);
if (authHeaderValue === authValue1) {
this.logger.info(
`TableSharedKeyAuthenticator:validate() Signature 1 matched.`,
tableContext.contextID
);
return true;
}
if (accountProperties.key2) {
const signature2 = computeHMACSHA256(
stringToSign,
accountProperties.key2
);
const authValue2 = `SharedKey ${account}:${signature2}`;
this.logger.info(
`TableSharedKeyAuthenticator:validate() Calculated authentication header based on key2: ${authValue2}`,
tableContext.contextID
);
if (authHeaderValue === authValue2) {
this.logger.info(
`TableSharedKeyAuthenticator:validate() Signature 2 matched.`,
tableContext.contextID
);
return true;
}
}
// this.logger.info(`[URL]:${req.getUrl()}`);
// this.logger.info(`[HEADERS]:${req.getHeaders().toString()}`);
// this.logger.info(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
this.logger.info(
`TableSharedKeyAuthenticator:validate() Validation failed.`,
tableContext.contextID
);
return false;
}
/**
* Retrieve header value according to shared key sign rules.
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
*
* @private
* @param {WebResource} request
* @param {string} headerName
* @returns {string}
* @memberof SharedKeyCredentialPolicy
*/
private getHeaderValueToSign(request: IRequest, headerName: string): string {
const value = request.getHeader(headerName);
if (!value) {
return "";
}
// When using version 2015-02-21 or later, if Content-Length is zero, then
// set the Content-Length part of the StringToSign to an empty string.
// https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
return "";
}
return value;
}
/**
* Retrieves canonicalized resource string.
*
* @private
* @param {IRequest} request
* @returns {string}
* @memberof SharedKeyCredentialPolicy
*/
private getCanonicalizedResourceString(
request: IRequest,
account: string,
authenticationPath?: string
): string {
let path = request.getPath() || "/";
// For secondary account, we use account name (without "-secondary") for the path
if (authenticationPath !== undefined) {
path = authenticationPath;
}
let canonicalizedResourceString: string = "";
canonicalizedResourceString += `/${account}${path}`;
const queries = getURLQueries(request.getUrl());
const lowercaseQueries: { [key: string]: string } = {};
if (queries) {
const queryKeys: string[] = [];
for (const key in queries) {
if (queries.hasOwnProperty(key)) {
const lowercaseKey = key.toLowerCase();
lowercaseQueries[lowercaseKey] = queries[key];
queryKeys.push(lowercaseKey);
}
}
if (queryKeys.includes("comp")) {
canonicalizedResourceString += "?comp=" + lowercaseQueries.comp;
}
// queryKeys.sort();
// for (const key of queryKeys) {
// canonicalizedResourceString += `\n${key}:${decodeURIComponent(
// lowercaseQueries[key]
// )}`;
// }
}
return canonicalizedResourceString;
}
}

Просмотреть файл

@ -0,0 +1,194 @@
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import { computeHMACSHA256, getURLQueries } from "../../common/utils/utils";
import TableStorageContext from "../context/TableStorageContext";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import { HeaderConstants } from "../utils/constants";
import IAuthenticator from "./IAuthenticator";
export default class TableSharedKeyLiteAuthenticator implements IAuthenticator {
public constructor(
private readonly dataStore: IAccountDataStore,
private readonly logger: ILogger
) {}
public async validate(
req: IRequest,
context: Context
): Promise<boolean | undefined> {
const tableContext = new TableStorageContext(context);
const account = tableContext.account!;
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() Start validation against account shared key authentication.`,
tableContext.contextID
);
const authHeaderValue = req.getHeader(HeaderConstants.AUTHORIZATION);
if (
authHeaderValue === undefined ||
!authHeaderValue.startsWith("SharedKeyLite")
) {
this.logger.info(
// tslint:disable-next-line:max-line-length
`TableSharedKeyLiteAuthenticator:validate() Request doesn't include valid authentication header. Skip SharedKeyLite authentication.`,
tableContext.contextID
);
return;
}
// TODO: Make following async
const accountProperties = this.dataStore.getAccount(account);
if (accountProperties === undefined) {
this.logger.error(
`TableSharedKeyLiteAuthenticator:validate() Invalid storage account ${account}.`,
tableContext.contextID
);
throw StorageErrorFactory.getInvalidOperation(
tableContext,
"Invalid storage account."
);
}
const stringToSign: string =
[
this.getHeaderValueToSign(req, HeaderConstants.DATE) ||
this.getHeaderValueToSign(req, HeaderConstants.X_MS_DATE)
].join("\n") +
"\n" +
this.getCanonicalizedResourceString(
req,
account,
tableContext.authenticationPath
);
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() [STRING TO SIGN]:${JSON.stringify(
stringToSign
)}`,
tableContext.contextID
);
const signature1 = computeHMACSHA256(stringToSign, accountProperties.key1);
const authValue1 = `SharedKeyLite ${account}:${signature1}`;
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() Calculated authentication header based on key1: ${authValue1}`,
tableContext.contextID
);
if (authHeaderValue === authValue1) {
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() Signature 1 matched.`,
tableContext.contextID
);
return true;
}
if (accountProperties.key2) {
const signature2 = computeHMACSHA256(
stringToSign,
accountProperties.key2
);
const authValue2 = `SharedKeyLite ${account}:${signature2}`;
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() Calculated authentication header based on key2: ${authValue2}`,
tableContext.contextID
);
if (authHeaderValue === authValue2) {
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() Signature 2 matched.`,
tableContext.contextID
);
return true;
}
}
// this.logger.info(`[URL]:${req.getUrl()}`);
// this.logger.info(`[HEADERS]:${req.getHeaders().toString()}`);
// this.logger.info(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
this.logger.info(
`TableSharedKeyLiteAuthenticator:validate() Validation failed.`,
tableContext.contextID
);
return false;
}
/**
* Retrieve header value according to shared key sign rules.
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
*
* @private
* @param {WebResource} request
* @param {string} headerName
* @returns {string}
* @memberof SharedKeyCredentialPolicy
*/
private getHeaderValueToSign(request: IRequest, headerName: string): string {
const value = request.getHeader(headerName);
if (!value) {
return "";
}
// When using version 2015-02-21 or later, if Content-Length is zero, then
// set the Content-Length part of the StringToSign to an empty string.
// https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
return "";
}
return value;
}
/**
* Retrieves canonicalized resource string.
*
* @private
* @param {IRequest} request
* @returns {string}
* @memberof SharedKeyCredentialPolicy
*/
private getCanonicalizedResourceString(
request: IRequest,
account: string,
authenticationPath?: string
): string {
let path = request.getPath() || "/";
// For secondary account, we use account name (without "-secondary") for the path
if (authenticationPath !== undefined) {
path = authenticationPath;
}
let canonicalizedResourceString: string = "";
canonicalizedResourceString += `/${account}${path}`;
const queries = getURLQueries(request.getUrl());
const lowercaseQueries: { [key: string]: string } = {};
if (queries) {
const queryKeys: string[] = [];
for (const key in queries) {
if (queries.hasOwnProperty(key)) {
const lowercaseKey = key.toLowerCase();
lowercaseQueries[lowercaseKey] = queries[key];
queryKeys.push(lowercaseKey);
}
}
if (queryKeys.includes("comp")) {
canonicalizedResourceString += "?comp=" + lowercaseQueries.comp;
}
// queryKeys.sort();
// for (const key of queryKeys) {
// canonicalizedResourceString += `\n${key}:${decodeURIComponent(
// lowercaseQueries[key]
// )}`;
// }
}
return canonicalizedResourceString;
}
}

Просмотреть файл

@ -0,0 +1,68 @@
import {
DataServiceVersion7,
QueryOptions,
ResponseFormat,
TableDeleteEntityOptionalParams
} from "../generated/artifacts/models";
import IOptionalParams from "./IOptionalParams";
/**
* Batch Table Delete Entity Optional Params
*
* @export
* @class BatchTableDeleteEntityOptionalParams
* @implements {TableDeleteEntityOptionalParams}
*/
export default class BatchTableDeleteEntityOptionalParams
implements TableDeleteEntityOptionalParams, IOptionalParams {
/**
* The timeout parameter is expressed in seconds.
*
* @type {number}
* @memberof BatchTableDeleteEntityOptionalParams
*/
public timeout?: number;
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*
* @type {string}
* @memberof BatchTableDeleteEntityOptionalParams
*/
public requestId?: string;
/**
* Specifies the data service version. Possible values include: '3.0'
*
* @type {DataServiceVersion7}
* @memberof BatchTableDeleteEntityOptionalParams
*/
public dataServiceVersion?: DataServiceVersion7;
/**
* The properties for the table entity.
*
* @type {{ [propertyName: string]: any }}
* @memberof BatchTableDeleteEntityOptionalParams
*/
public tableEntityProperties?: { [propertyName: string]: any };
/**
* Specifies whether the response should include the inserted entity in the payload. Possible
* values are return-no-content and return-content. Possible values include: 'return-no-content',
* 'return-content'
*
* @type {ResponseFormat}
* @memberof BatchTableDeleteEntityOptionalParams
*/
public responsePreference?: ResponseFormat;
/**
* Additional parameters for the operation
*
* @type {QueryOptions}
* @memberof BatchTableDeleteEntityOptionalParams
*/
public queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,46 @@
import {
DataServiceVersion9,
QueryOptions,
ResponseFormat,
TableInsertEntityOptionalParams
} from "../generated/artifacts/models";
import IOptionalParams from "./IOptionalParams";
/**
* Batch Table Insert Entity Optional Params
*
* @export
* @class BatchTableInsertEntityOptionalParams
* @implements {TableInsertEntityOptionalParams}
* @implements {IOptionalParams}
*/
export default class BatchTableInsertEntityOptionalParams
implements TableInsertEntityOptionalParams, IOptionalParams {
/**
* The timeout parameter is expressed in seconds.
*/
public timeout?: number;
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*/
public requestId?: string;
/**
* Specifies the data service version. Possible values include: '3.0'
*/
public dataServiceVersion?: DataServiceVersion9;
/**
* The properties for the table entity.
*/
public tableEntityProperties?: { [propertyName: string]: any };
/**
* Specifies whether the response should include the inserted entity in the payload. Possible
* values are return-no-content and return-content. Possible values include: 'return-no-content',
* 'return-content'
*/
public responsePreference?: ResponseFormat;
/**
* Additional parameters for the operation
*/
public queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,46 @@
import {
DataServiceVersion6,
QueryOptions,
ResponseFormat,
TableMergeEntityOptionalParams
} from "../generated/artifacts/models";
import IOptionalParams from "./IOptionalParams";
/**
* Batch Table Merge Entity Optional Params
*
* @export
* @class BatchTableMergeEntityOptionalParams
* @implements {TableMergeEntityOptionalParams}
* @implements {IOptionalParams}
*/
export default class BatchTableMergeEntityOptionalParams
implements TableMergeEntityOptionalParams, IOptionalParams {
/**
* The timeout parameter is expressed in seconds.
*/
public timeout?: number;
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*/
public requestId?: string;
/**
* Specifies the data service version. Possible values include: '3.0'
*/
public dataServiceVersion?: DataServiceVersion6;
/**
* The properties for the table entity.
*/
public tableEntityProperties?: { [propertyName: string]: any };
/**
* Specifies whether the response should include the inserted entity in the payload. Possible
* values are return-no-content and return-content. Possible values include: 'return-no-content',
* 'return-content'
*/
public responsePreference?: ResponseFormat;
/**
* Additional parameters for the operation
*/
public queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,46 @@
import {
DataServiceVersion3,
QueryOptions,
ResponseFormat,
TableQueryEntitiesOptionalParams
} from "../generated/artifacts/models";
import IOptionalParams from "./IOptionalParams";
/**
* Batch Table Query Entities Optional Params
*
* @export
* @class BatchTableQueryEntitiesOptionalParams
* @implements {TableQueryEntitiesOptionalParams}
* @implements {IOptionalParams}
*/
export default class BatchTableQueryEntitiesOptionalParams
implements TableQueryEntitiesOptionalParams, IOptionalParams {
/**
* The timeout parameter is expressed in seconds.
*/
public timeout?: number;
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*/
public requestId?: string;
/**
* Specifies the data service version. Possible values include: '3.0'
*/
public dataServiceVersion?: DataServiceVersion3;
/**
* The properties for the table entity.
*/
public tableEntityProperties?: { [propertyName: string]: any };
/**
* Specifies whether the response should include the inserted entity in the payload. Possible
* values are return-no-content and return-content. Possible values include: 'return-no-content',
* 'return-content'
*/
public responsePreference?: ResponseFormat;
/**
* Additional parameters for the operation
*/
public queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,48 @@
import {
DataServiceVersion4,
QueryOptions,
ResponseFormat,
TableQueryEntitiesWithPartitionAndRowKeyOptionalParams
} from "../generated/artifacts/models";
import IOptionalParams from "./IOptionalParams";
/**
* Batch Table Query Entities With Partition And RowKey Optional Params
*
* @export
* @class BatchTableQueryEntitiesWithPartitionAndRowKeyOptionalParams
* @implements {TableQueryEntitiesWithPartitionAndRowKeyOptionalParams}
* @implements {IOptionalParams}
*/
export default class BatchTableQueryEntitiesWithPartitionAndRowKeyOptionalParams
implements
TableQueryEntitiesWithPartitionAndRowKeyOptionalParams,
IOptionalParams {
/**
* The timeout parameter is expressed in seconds.
*/
public timeout?: number;
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*/
public requestId?: string;
/**
* Specifies the data service version. Possible values include: '3.0'
*/
public dataServiceVersion?: DataServiceVersion4;
/**
* The properties for the table entity.
*/
public tableEntityProperties?: { [propertyName: string]: any };
/**
* Specifies whether the response should include the inserted entity in the payload. Possible
* values are return-no-content and return-content. Possible values include: 'return-no-content',
* 'return-content'
*/
public responsePreference?: ResponseFormat;
/**
* Additional parameters for the operation
*/
public queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,46 @@
import {
DataServiceVersion5,
QueryOptions,
ResponseFormat,
TableUpdateEntityOptionalParams
} from "../generated/artifacts/models";
import IOptionalParams from "./IOptionalParams";
/**
* Batch Table Update Entity Optional Params
*
* @export
* @class BatchTableUpdateEntityOptionalParams
* @implements {TableUpdateEntityOptionalParams}
* @implements {IOptionalParams}
*/
export default class BatchTableUpdateEntityOptionalParams
implements TableUpdateEntityOptionalParams, IOptionalParams {
/**
* The timeout parameter is expressed in seconds.
*/
public timeout?: number;
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*/
public requestId?: string;
/**
* Specifies the data service version. Possible values include: '3.0'
*/
public dataServiceVersion?: DataServiceVersion5;
/**
* The properties for the table entity.
*/
public tableEntityProperties?: { [propertyName: string]: any };
/**
* Specifies whether the response should include the inserted entity in the payload. Possible
* values are return-no-content and return-content. Possible values include: 'return-no-content',
* 'return-content'
*/
public responsePreference?: ResponseFormat;
/**
* Additional parameters for the operation
*/
public queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,58 @@
import {
DataServiceVersion3,
DataServiceVersion4,
DataServiceVersion5,
DataServiceVersion6,
DataServiceVersion7,
DataServiceVersion9,
QueryOptions
} from "../generated/artifacts/models";
/**
* Interface to simplify the processing of batch requests which need to
* be sent through to the table handler.
*
* @export
* @interface IOptionalParams
*/
export default interface IOptionalParams {
/**
* Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the
* analytics logs when analytics logging is enabled.
*
* @type {(string | undefined)}
* @memberof IOptionalParams
*/
requestId?: string | undefined;
/**
* Specifies the data service version. Possible values include: '3.0', although the service returns 1.0!
*
* @type {(DataServiceVersion4 | DataServiceVersion5 | undefined)}
* @memberof IOptionalParams
*/
dataServiceVersion?:
| DataServiceVersion3
| DataServiceVersion4
| DataServiceVersion5
| DataServiceVersion6
| DataServiceVersion7
| DataServiceVersion9
| undefined;
/**
* The properties for the table entity.
*
* @type {{ [propertyName: string]: any }}
* @memberof IOptionalParams
*/
tableEntityProperties?: { [propertyName: string]: any };
/**
* Additional parameters for the operation
*
* @type {QueryOptions}
* @memberof IOptionalParams
*/
queryOptions?: QueryOptions;
}

Просмотреть файл

@ -0,0 +1,7 @@
import BatchOperation, { BatchType } from "../../common/batch/BatchOperation";
export default class TableBatchOperation extends BatchOperation {
public constructor(_batchType: BatchType, headers: string) {
super(_batchType, headers);
}
}

Просмотреть файл

@ -0,0 +1,536 @@
import BatchRequest from "../../common/batch/BatchRequest";
import BatchTableInsertEntityOptionalParams from "./BatchTableInsertEntityOptionalParams";
import TableStorageContext from "../context/TableStorageContext";
import Context from "../generated/Context";
import TableHandler from "../handlers/TableHandler";
import { TableBatchSerialization } from "./TableBatchSerialization";
import TableBatchOperation from "./TableBatchOperation";
import BatchTableDeleteEntityOptionalParams from "./BatchTableDeleteEntityOptionalParams";
import BatchTableUpdateEntityOptionalParams from "./BatchTableUpdateEntityOptionalParams";
import BatchTableMergeEntityOptionalParams from "./BatchTableMergeEntityOptionalParams";
import BatchTableQueryEntitiesWithPartitionAndRowKeyOptionalParams from "./BatchTableQueryEntitiesWithPartitionAndRowKeyOptionalParams";
import {
TableQueryEntitiesOptionalParams,
TableQueryEntitiesWithPartitionAndRowKeyOptionalParams
} from "../generated/artifacts/models";
import BatchTableQueryEntitiesOptionalParams from "./BatchTableQueryEntitiesOptionalParams";
/**
* Currently there is a single distinct and concrete implementation of batch /
* entity group operations for the table api.
* The orchestrator manages the deserialization, submission and serialization of
* entity group transactions.
* ToDo: it might be possible to share code between this and the blob batch api, but this
* has not yet been validated.
* Will need refactoring when we address batch transactions for blob.
*
* @export
* @class TableBatchOrchestrator
*/
export default class TableBatchOrchestrator {
private batchOperations: TableBatchOperation[] = [];
private requests: BatchRequest[] = [];
private serialization = new TableBatchSerialization();
private context: TableStorageContext;
private parentHandler: TableHandler;
public constructor(context: TableStorageContext, handler: TableHandler) {
this.context = context;
this.parentHandler = handler;
}
/**
* This is the central route / sequence of the batch orchestration.
* Takes batchRequest body, deserializes requests, submits to handlers, then returns serialized response
*
* @param {string} batchRequestBody
* @return {*} {Promise<string>}
* @memberof TableBatchManager
*/
public async processBatchRequestAndSerializeResponse(
batchRequestBody: string
): Promise<string> {
this.batchOperations = this.serialization.deserializeBatchRequest(
batchRequestBody
);
await this.submitRequestsToHandlers();
return this.serializeResponses();
}
/**
* Submits requests to the appropriate handlers
* ToDo: Correct logic and handling of requests with Content ID
*
* @private
* @return {*} {Promise<void>}
* @memberof TableBatchManager
*/
private async submitRequestsToHandlers(): Promise<void> {
this.batchOperations.forEach((operation) => {
const request: BatchRequest = new BatchRequest(operation);
this.requests.push(request);
});
let contentID = 1; // contentID starts at 1 for batch
if (this.requests.length > 0) {
for (const singleReq of this.requests) {
try {
singleReq.response = await this.routeAndDispatchBatchRequest(
singleReq,
this.context,
contentID
);
} catch (err) {
throw err;
}
contentID++;
}
}
}
/**
* Serializes responses from the table handler
* see Link below for details of response format
* tslint:disable-next-line: max-line-length
* https://docs.microsoft.com/en-us/rest/api/storageservices/performing-entity-group-transactions#json-versions-2013-08-15-and-later-2
*
* @private
* @return {*} {string}
* @memberof TableBatchManager
*/
private serializeResponses(): string {
let responseString: string = "";
// based on research, a stringbuilder is only worth doing with 1000s of string ops
// this can be optimized later if we get reports of slow batch operations
const batchBoundary = this.serialization.batchBoundary.replace(
"batch",
"batchresponse"
);
let changesetBoundary = this.serialization.changesetBoundary.replace(
"changeset",
"changesetresponse"
);
responseString += batchBoundary + "\r\n";
// (currently static header) ToDo: Validate if we need to correct headers via tests
responseString +=
"Content-Type: multipart/mixed; boundary=" + changesetBoundary + "\r\n";
changesetBoundary = "\r\n--" + changesetBoundary;
this.requests.forEach((request) => {
responseString += changesetBoundary;
responseString += request.response;
responseString += "\r\n\r\n";
});
responseString += changesetBoundary + "--\r\n";
responseString += batchBoundary + "--\r\n";
return responseString;
}
/**
* Routes and dispatches single operations against the table handler and stores
* the serialized result.
*
* @private
* @param {BatchRequest} request
* @param {Context} context
* @param {number} contentID
* @return {*} {Promise<any>}
* @memberof TableBatchManager
*/
private async routeAndDispatchBatchRequest(
request: BatchRequest,
context: Context,
contentID: number
): Promise<any> {
// the context that we have will not work with the calls and needs updating for
// batch operations, need a suitable deep clone, as each request needs to be treated seaprately
const batchContextClone = Object.create(context);
batchContextClone.tableName = request.getPath();
batchContextClone.path = request.getPath();
let response: any;
let __return: any;
// we only use 5 HTTP Verbs to determine the table operation type
try {
switch (request.getMethod()) {
case "POST":
// INSERT: we are inserting an entity
// POST https://myaccount.table.core.windows.net/mytable
({ __return, response } = await this.handleBatchInsert(
request,
response,
batchContextClone,
contentID
));
break;
case "PUT":
// UPDATE: we are updating an entity
// PUT http://127.0.0.1:10002/devstoreaccount1/mytable(PartitionKey='myPartitionKey', RowKey='myRowKey')
// INSERT OR REPLACE:
// PUT https://myaccount.table.core.windows.net/mytable(PartitionKey='myPartitionKey', RowKey='myRowKey')
({ __return, response } = await this.handleBatchUpdate(
request,
response,
batchContextClone,
contentID
));
break;
case "DELETE":
// DELETE: we are deleting an entity
// DELETE https://myaccount.table.core.windows.net/mytable(PartitionKey='myPartitionKey', RowKey='myRowKey')
({ __return, response } = await this.handleBatchDelete(
request,
response,
batchContextClone,
contentID
));
break;
case "GET":
// QUERY : we are querying / retrieving an entity
// GET https://myaccount.table.core.windows.net/mytable(PartitionKey='<partition-key>',RowKey='<row-key>')?$select=<comma-separated-property-names>
// GET https://myaccount.table.core.windows.net/mytable()?$filter=<query-expression>&$select=<comma-separated-property-names>
({ __return, response } = await this.handleBatchQuery(
request,
response,
batchContextClone,
contentID
));
break;
case "CONNECT":
throw new Error("Connect Method unsupported in batch.");
break;
case "HEAD":
throw new Error("Head Method unsupported in batch.");
break;
case "OPTIONS":
throw new Error("Options Method unsupported in batch.");
break;
case "TRACE":
throw new Error("Trace Method unsupported in batch.");
break;
case "PATCH":
throw new Error("Patch Method unsupported in batch.");
break;
default:
// MERGE: this must be the merge, as the merge operation is not currently generated by autorest
// MERGE https://myaccount.table.core.windows.net/mytable(PartitionKey='myPartitionKey', RowKey='myRowKey')
// INSERT OR MERGE
// MERGE https://myaccount.table.core.windows.net/mytable(PartitionKey='myPartitionKey', RowKey='myRowKey')
({ __return, response } = await this.handleBatchMerge(
request,
response,
batchContextClone,
contentID
));
}
} catch (batchException) {
// this allows us to catch and debug any errors in the batch handling
throw batchException;
}
return __return;
}
/**
* Helper function to extract values needed for handler calls
*
* @private
* @param {BatchRequest} request
* @return {*} {{ partitionKey: string; rowKey: string }}
* @memberof TableBatchManager
*/
private extractRowAndPartitionKeys(
request: BatchRequest
): { partitionKey: string; rowKey: string } {
let partitionKey: string;
let rowKey: string;
const url = request.getUrl();
const partKeyMatch = url.match(/(PartitionKey=)(%27)?'?(\w+)/gi);
partitionKey = partKeyMatch ? partKeyMatch[0] : "";
const rowKeyMatch = url.match(/(RowKey=)(%27)?'?(\w+)/gi);
rowKey = rowKeyMatch ? rowKeyMatch[0] : "";
if (partitionKey !== "" || rowKey !== "") {
// we need to filter out the delimeter (if URL encoded)
const urlencodedMatch = partitionKey.match(/%/);
let matchStringLength = 14;
if (urlencodedMatch) {
matchStringLength += 2;
}
partitionKey = partitionKey.substring(matchStringLength);
matchStringLength = 8;
if (urlencodedMatch) {
matchStringLength += 2;
}
rowKey = rowKey.substring(matchStringLength);
} else {
// row key not in URL, must be in body
const body = request.getBody();
if (body !== "") {
const jsonBody = JSON.parse(body ? body : "{}");
partitionKey = jsonBody.PartitionKey;
rowKey = jsonBody.RowKey;
}
}
return { partitionKey, rowKey };
}
/**
* Handles an insert operation inside a batch
*
* @private
* @param {BatchRequest} request
* @param {*} response
* @param {*} batchContextClone
* @param {number} contentID
* @return {*} {Promise<{
* __return: string;
* response: any;
* }>}
* @memberof TableBatchManager
*/
private async handleBatchInsert(
request: BatchRequest,
response: any,
batchContextClone: any,
contentID: number
): Promise<{
__return: string;
response: any;
}> {
request.ingestOptionalParams(new BatchTableInsertEntityOptionalParams());
const updatedContext = batchContextClone as TableStorageContext;
updatedContext.request = request;
response = await this.parentHandler.insertEntity(
request.getPath(),
request.params as BatchTableInsertEntityOptionalParams,
updatedContext
);
return {
__return: this.serialization.serializeTableInsertEntityBatchResponse(
request,
response
),
response
};
}
/**
* Handles a delete Operation inside a batch request
*
* @private
* @param {BatchRequest} request
* @param {*} response
* @param {*} batchContextClone
* @param {number} contentID
* @return {*} {Promise<{
* __return: string;
* response: any;
* }>}
* @memberof TableBatchManager
*/
private async handleBatchDelete(
request: BatchRequest,
response: any,
batchContextClone: any,
contentID: number
): Promise<{
__return: string;
response: any;
}> {
request.ingestOptionalParams(new BatchTableDeleteEntityOptionalParams());
const updatedContext = batchContextClone as TableStorageContext;
updatedContext.request = request;
let partitionKey: string;
let rowKey: string;
const ifmatch: string = request.getHeader("if-match") || "*";
({ partitionKey, rowKey } = this.extractRowAndPartitionKeys(request));
response = await this.parentHandler.deleteEntity(
request.getPath(),
partitionKey,
rowKey,
ifmatch,
request.params as BatchTableDeleteEntityOptionalParams,
updatedContext
);
return {
__return: this.serialization.serializeTableDeleteEntityBatchResponse(
request,
response
),
response
};
}
/**
* Handles an update Operation inside a batch request
*
* @private
* @param {BatchRequest} request
* @param {*} response
* @param {*} batchContextClone
* @param {number} contentID
* @return {*} {Promise<{
* __return: string;
* response: any;
* }>}
* @memberof TableBatchManager
*/
private async handleBatchUpdate(
request: BatchRequest,
response: any,
batchContextClone: any,
contentID: number
): Promise<{
__return: string;
response: any;
}> {
request.ingestOptionalParams(new BatchTableUpdateEntityOptionalParams());
const updatedContext = batchContextClone as TableStorageContext;
updatedContext.request = request;
let partitionKey: string;
let rowKey: string;
({ partitionKey, rowKey } = this.extractRowAndPartitionKeys(request));
response = await this.parentHandler.updateEntity(
request.getPath(),
partitionKey,
rowKey,
request.params as BatchTableUpdateEntityOptionalParams,
updatedContext
);
return {
__return: this.serialization.serializeTableUpdateEntityBatchResponse(
request,
response
),
response
};
}
/**
* Handles a query operation inside a batch request,
* should only ever be one operation if there is a query
*
* @private
* @param {BatchRequest} request
* @param {*} response
* @param {*} batchContextClone
* @param {number} contentID
* @return {*} {Promise<{
* __return: string;
* response: any;
* }>}
* @memberof TableBatchManager
*/
private async handleBatchQuery(
request: BatchRequest,
response: any,
batchContextClone: any,
contentID: number
): Promise<{
__return: string;
response: any;
}> {
let partitionKey: string;
let rowKey: string;
({ partitionKey, rowKey } = this.extractRowAndPartitionKeys(request));
const updatedContext = batchContextClone as TableStorageContext;
if (null !== partitionKey && null != rowKey) {
// ToDo: this is hideous... but we need the params on the request object,
// as they percolate through and are needed for the final serialization
// currently, because of the way we deconstruct / deserialize, we only
// have the right model at a very late stage in processing
// this might resolve when we simplify Query logic
// based on only accepting Query iwth partition and row key
request.ingestOptionalParams(
new BatchTableQueryEntitiesWithPartitionAndRowKeyOptionalParams()
);
updatedContext.request = request;
response = await this.parentHandler.queryEntitiesWithPartitionAndRowKey(
request.getPath(),
partitionKey,
rowKey,
request.params as TableQueryEntitiesWithPartitionAndRowKeyOptionalParams,
updatedContext
);
return {
__return: await this.serialization.serializeTableQueryEntityWithPartitionAndRowKeyBatchResponse(
request,
response
),
response
};
} else {
request.ingestOptionalParams(new BatchTableQueryEntitiesOptionalParams());
updatedContext.request = request;
response = await this.parentHandler.queryEntities(
request.getPath(),
request.params as TableQueryEntitiesOptionalParams,
updatedContext
);
return {
__return: await this.serialization.serializeTableQueryEntityBatchResponse(
request,
response
),
response
};
}
}
/**
* Handles a merge operation inside a batch request
*
* @private
* @param {BatchRequest} request
* @param {*} response
* @param {*} batchContextClone
* @param {number} contentID
* @return {*} {Promise<{
* __return: string;
* response: any;
* }>}
* @memberof TableBatchManager
*/
private async handleBatchMerge(
request: BatchRequest,
response: any,
batchContextClone: any,
contentID: number
): Promise<{
__return: string;
response: any;
}> {
request.ingestOptionalParams(new BatchTableMergeEntityOptionalParams());
const updatedContext = batchContextClone as TableStorageContext;
updatedContext.request = request;
let partitionKey: string;
let rowKey: string;
({ partitionKey, rowKey } = this.extractRowAndPartitionKeys(request));
response = await this.parentHandler.mergeEntity(
request.getPath(),
partitionKey,
rowKey,
request.params as BatchTableMergeEntityOptionalParams,
updatedContext
);
return {
__return: this.serialization.serializeTablMergeEntityBatchResponse(
request,
response
),
response
};
}
}

Просмотреть файл

@ -0,0 +1,575 @@
import { StorageError } from "../../blob/generated/artifacts/mappers";
// import BatchOperation from "../../common/BatchOperation";
// import { BatchOperationType } from "../../common/BatchOperation";
import { BatchType } from "../../common/batch/BatchOperation";
import BatchRequest from "../../common/batch/BatchRequest";
// import BatchSubResponse from "../../common/BatchSubResponse";
import { HttpMethod } from "../../table/generated/IRequest";
import { BatchSerialization } from "../../common/batch/BatchSerialization";
import TableBatchOperation from "../batch/TableBatchOperation";
import * as Models from "../generated/artifacts/models";
import TableBatchUtils from "./TableBatchUtils";
/**
* The semantics for entity group transactions are defined by the OData Protocol Specification.
* https://www.odata.org/
* http://docs.oasis-open.org/odata/odata-json-format/v4.01/odata-json-format-v4.01.html#_Toc38457781
*
* for now we are first getting the concrete implementation correct for table batch
* we then need to figure out how to do this for blob, and what can be shared.
* We set several headers in the responses to the same values that we see returned
* from the Azure Storage Service.
*
* @export
* @class TableBatchSerialization
* @extends {BatchSerialization}
*/
export class TableBatchSerialization extends BatchSerialization {
/**
* Deserializes a batch request
*
* @param {string} batchRequestsString
* @return {*} {TableBatchOperation[]}
* @memberof TableBatchSerialization
*/
public deserializeBatchRequest(
batchRequestsString: string
): TableBatchOperation[] {
this.extractBatchBoundary(batchRequestsString);
this.extractChangeSetBoundary(batchRequestsString);
this.extractLineEndings(batchRequestsString);
// we can't rely on case of strings we use in delimiters
// ToDo: might be easier and more efficient to use i option on the regex here...
const contentTypeHeaderString = this.extractRequestHeaderString(
batchRequestsString,
"(\\n)+(([c,C])+(ontent-)+([t,T])+(ype)+)+(?=:)+"
);
const contentTransferEncodingString = this.extractRequestHeaderString(
batchRequestsString,
"(\\n)+(([c,C])+(ontent-)+([t,T])+(ransfer-)+([e,E])+(ncoding))+(?=:)+"
);
// the line endings might be \r\n or \n
const HTTP_LINE_ENDING = this.lineEnding;
const subRequestPrefix = `--${this.changesetBoundary}${HTTP_LINE_ENDING}${contentTypeHeaderString}: application/http${HTTP_LINE_ENDING}${contentTransferEncodingString}: binary`;
const splitBody = batchRequestsString.split(subRequestPrefix);
// dropping first element as boundary if we have a batch with multiple requests
let subRequests: string[];
if (splitBody.length > 1) {
subRequests = splitBody.slice(1, splitBody.length);
} else {
subRequests = splitBody;
}
// This goes through each operation in the the request and maps the content
// of the request by deserializing it into a BatchOperation Type
const batchOperations: TableBatchOperation[] = subRequests.map(
(subRequest) => {
let requestType: RegExpMatchArray | null = [];
requestType = subRequest.match("(GET|POST|PUT|MERGE|INSERT|DELETE)");
if (requestType === null || requestType.length < 2) {
throw new Error(
`Couldn't extract verb from sub-Request:\n ${subRequest}`
);
}
const fullRequestURI = subRequest.match(/((http+s?)(\S)+)/);
if (fullRequestURI === null || fullRequestURI.length < 3) {
throw new Error(
`Couldn't extract full request URL from sub-Request:\n ${subRequest}`
);
}
// extract the request path
const pathString = fullRequestURI[1];
const path = pathString.match(/\S+devstoreaccount1\/(\w+)/);
if (path === null || path.length < 2) {
throw new Error(
`Couldn't extract path from URL in sub-Request:\n ${subRequest}`
);
}
const jsonOperationBody = subRequest.match(/{+.+}+/);
// ToDo: not sure if this logic is valid, it might be better
// to just have an empty body and then error out when determining routing of request in Handler
if (
subRequests.length > 1 &&
null !== requestType &&
requestType[0] !== "DELETE" &&
(jsonOperationBody === null || jsonOperationBody.length < 1)
) {
throw new Error(
`Couldn't extract path from sub-Request:\n ${subRequest}`
);
}
let headers: string;
let jsonBody: string;
let subStringStart: number;
let subStringEnd: number;
// currently getting an invalid header in the first position
// during table entity test for insert & merge
subStringStart = subRequest.indexOf(fullRequestURI[1]);
subStringStart += fullRequestURI[1].length + 1; // for the space
if (jsonOperationBody != null) {
// we need the jsonBody and request path extracted to be able to extract headers.
subStringEnd = subRequest.indexOf(jsonOperationBody[0]);
jsonBody = jsonOperationBody[0];
} else {
subStringEnd = subRequest.length - this.changesetBoundary.length - 2;
jsonBody = "";
}
headers = subRequest.substring(subStringStart, subStringEnd);
const operation = new TableBatchOperation(BatchType.table, headers);
if (null !== requestType) {
operation.httpMethod = requestType[0] as HttpMethod;
}
operation.path = path[1];
operation.uri = fullRequestURI[0];
operation.jsonRequestBody = jsonBody;
return operation;
}
);
return batchOperations;
}
/**
* Serializes an Insert entity response
*
* @param {BatchRequest} request
* @param {Models.TableInsertEntityResponse} response
* @return {*} {string}
* @memberof TableBatchSerialization
*/
public serializeTableInsertEntityBatchResponse(
request: BatchRequest,
response: Models.TableInsertEntityResponse
): string {
let serializedResponses: string = "";
serializedResponses = this.SetContentTypeAndEncoding(serializedResponses);
serializedResponses = this.serializeHttpStatusCode(
serializedResponses,
response
);
// ToDo: Correct the handling of Content-ID
if (request.contentID !== undefined) {
serializedResponses +=
"Content-ID: " + request.contentID.toString() + "\r\n";
}
serializedResponses = this.AddNoSniffNoCache(serializedResponses);
serializedResponses = this.serializePreferenceApplied(
request,
serializedResponses
);
serializedResponses = this.serializeDataServiceVersion(
request,
serializedResponses
);
serializedResponses +=
"Location: " + this.SerializeEntityPath(serializedResponses, request);
serializedResponses +=
"DataServiceId: " +
this.SerializeEntityPath(serializedResponses, request);
if (null !== response.eTag && undefined !== response.eTag) {
// prettier-ignore
serializedResponses += "ETag: " + response.eTag.replace(":","%3A");
}
return serializedResponses;
}
/**
* creates the serialized entitygrouptransaction / batch response body
* which we return to the users batch request
*
* @param {BatchRequest} request
* @param {Models.TableDeleteEntityResponse} response
* @return {*} {string}
* @memberof TableBatchSerialization
*/
public serializeTableDeleteEntityBatchResponse(
request: BatchRequest,
response: Models.TableDeleteEntityResponse
): string {
// ToDo: keeping my life easy to start and defaulting to "return no content"
let serializedResponses: string = "";
// create the initial boundary
serializedResponses = this.SetContentTypeAndEncoding(serializedResponses);
serializedResponses = this.serializeHttpStatusCode(
serializedResponses,
response
);
serializedResponses = this.AddNoSniffNoCache(serializedResponses);
serializedResponses = this.serializeDataServiceVersion(
request,
serializedResponses
);
return serializedResponses;
}
/**
* Serializes the Update Entity Batch Response
*
* @param {BatchRequest} request
* @param {Models.TableUpdateEntityResponse} response
* @return {*} {string}
* @memberof TableBatchSerialization
*/
public serializeTableUpdateEntityBatchResponse(
request: BatchRequest,
response: Models.TableUpdateEntityResponse
): string {
let serializedResponses: string = "";
// create the initial boundary
serializedResponses = this.SetContentTypeAndEncoding(serializedResponses);
serializedResponses = this.serializeHttpStatusCode(
serializedResponses,
response
);
// ToDo_: Correct the handling of content-ID
if (request.contentID) {
serializedResponses +=
"Content-ID: " + request.contentID.toString() + "\r\n";
}
serializedResponses = this.AddNoSniffNoCache(serializedResponses);
serializedResponses = this.serializePreferenceApplied(
request,
serializedResponses
);
serializedResponses = this.serializeDataServiceVersion(
request,
serializedResponses
);
if (null !== response.eTag && undefined !== response.eTag) {
serializedResponses += "ETag: " + response.eTag.replace(":", "%3A");
}
return serializedResponses;
}
/**
* Serializes the preference applied header
*
* @private
* @param {BatchRequest} request
* @param {string} serializedResponses
* @return {*}
* @memberof TableBatchSerialization
*/
private serializePreferenceApplied(
request: BatchRequest,
serializedResponses: string
) {
if (request.getHeader("Preference-Applied")) {
serializedResponses +=
"Preference-Applied: " +
request.getHeader("Preference-Applied") +
"\r\n";
}
return serializedResponses;
}
/**
* Serializes the Merge Entity Response
*
* @param {BatchRequest} request
* @param {Models.TableMergeEntityResponse} response
* @return {*} {string}
* @memberof TableBatchSerialization
*/
public serializeTablMergeEntityBatchResponse(
request: BatchRequest,
response: Models.TableMergeEntityResponse
): string {
let serializedResponses: string = "";
serializedResponses = this.SetContentTypeAndEncoding(serializedResponses);
serializedResponses = this.serializeHttpStatusCode(
serializedResponses,
response
);
serializedResponses = this.AddNoSniffNoCache(serializedResponses);
// ToDo_: Correct the handling of content-ID
if (request.contentID) {
serializedResponses +=
"Content-ID: " + request.contentID.toString() + "\r\n";
}
// ToDo: not sure about other headers like cache control etc right now
// Service defaults to v1.0
serializedResponses = this.serializeDataServiceVersion(
request,
serializedResponses
);
if (null !== response.eTag && undefined !== response.eTag) {
serializedResponses += "ETag: " + response.eTag.replace(":", "%3A");
}
return serializedResponses;
}
/**
* Serializes the Query Entity Response when using Partition and Row Key
*
* @param {BatchRequest} request
* @param {Models.TableQueryEntitiesWithPartitionAndRowKeyResponse} response
* @return {*} {Promise<string>}
* @memberof TableBatchSerialization
*/
public async serializeTableQueryEntityWithPartitionAndRowKeyBatchResponse(
request: BatchRequest,
response: Models.TableQueryEntitiesWithPartitionAndRowKeyResponse
): Promise<string> {
let serializedResponses: string = "";
// create the initial boundary
serializedResponses = this.SetContentTypeAndEncoding(serializedResponses);
serializedResponses = this.serializeHttpStatusCode(
serializedResponses,
response
);
serializedResponses = this.serializeDataServiceVersion(
request,
serializedResponses
);
serializedResponses += "Content-Type: ";
serializedResponses += request.params.queryOptions?.format;
serializedResponses += ";streaming=true;charset=utf-8\r\n"; // getting this from service, so adding here as well
serializedResponses = this.AddNoSniffNoCache(serializedResponses);
if (response.eTag) {
serializedResponses += "ETag: " + response.eTag.replace(":", "%3A");
}
serializedResponses += "\r\n";
// now we need to return the JSON body
// ToDo: I don't like the stream to string to stream conversion here...
// just not sure there is any way around it
if (response.body != null) {
try {
serializedResponses += await TableBatchUtils.StreamToString(
response.body
);
} catch {
// do nothing
throw new Error("failed to deserialize body");
}
}
serializedResponses += "\r\n";
return serializedResponses;
}
/**
* Serializes query entity response
*
* @param {BatchRequest} request
* @param {Models.TableQueryEntitiesResponse} response
* @return {*} {Promise<string>}
* @memberof TableBatchSerialization
*/
public async serializeTableQueryEntityBatchResponse(
request: BatchRequest,
response: Models.TableQueryEntitiesResponse
): Promise<string> {
let serializedResponses: string = "";
serializedResponses = this.SetContentTypeAndEncoding(serializedResponses);
serializedResponses = this.serializeHttpStatusCode(
serializedResponses,
response
);
serializedResponses = this.serializeDataServiceVersion(
request,
serializedResponses
);
serializedResponses += "Content-Type: ";
serializedResponses += request.params.queryOptions?.format;
serializedResponses += ";streaming=true;charset=utf-8\r\n"; // getting this from service, so adding as well
// Azure Table service defaults to this in the response
// X-Content-Type-Options: nosniff\r\n
serializedResponses = this.AddNoSniffNoCache(serializedResponses);
serializedResponses += "\r\n";
// now we need to return the JSON body
// ToDo: I don't like the stream to string to stream conversion here...
// just not sure there is any way around it
if (response.body != null) {
try {
serializedResponses += await TableBatchUtils.StreamToString(
response.body
);
} catch {
// Throw a more helpful error
throw new Error("failed to deserialize body");
}
}
serializedResponses += "\r\n";
return serializedResponses;
}
/**
* Serializes content type and encoding
*
* @private
* @param {string} serializedResponses
* @return {*}
* @memberof TableBatchSerialization
*/
private SetContentTypeAndEncoding(serializedResponses: string) {
serializedResponses += "\r\nContent-Type: application/http\r\n";
serializedResponses += "Content-Transfer-Encoding: binary\r\n";
serializedResponses += "\r\n";
return serializedResponses;
}
/**
* Serializes Content Type Options and Cache Control
* THese seem to be service defaults
*
* @private
* @param {string} serializedResponses
* @return {*}
* @memberof TableBatchSerialization
*/
private AddNoSniffNoCache(serializedResponses: string) {
serializedResponses += "X-Content-Type-Options: nosniff\r\n";
serializedResponses += "Cache-Control: no-cache\r\n";
return serializedResponses;
}
/**
* Serializes the HTTP response
* ToDo: Need to check where we have implemented this elsewhere and see if we can reuse
*
* @private
* @param {number} statusCode
* @return {*} {string}
* @memberof TableBatchSerialization
*/
private GetStatusMessageString(statusCode: number): string {
switch (statusCode) {
case 200:
return "OK";
case 201:
return "Created";
case 204:
return "No Content";
case 404:
return "Not Found";
default:
return "STATUS_CODE_NOT_IMPLEMENTED";
}
}
/**
* extract a header request string
*
* @private
* @param {string} batchRequestsString
* @param {string} regExPattern
* @return {*}
* @memberof TableBatchSerialization
*/
private extractRequestHeaderString(
batchRequestsString: string,
regExPattern: string
) {
const headerStringMatches = batchRequestsString.match(regExPattern);
if (headerStringMatches == null) {
throw StorageError;
}
return headerStringMatches[2];
}
/**
* Serialize HTTP Status Code
*
* @private
* @param {string} serializedResponses
* @param {*} response
* @return {*}
* @memberof TableBatchSerialization
*/
private serializeHttpStatusCode(serializedResponses: string, response: any) {
serializedResponses +=
"HTTP/1.1 " +
response.statusCode.toString() +
" " +
this.GetStatusMessageString(response.statusCode) +
"\r\n";
return serializedResponses;
}
/**
* Serializes the Location and DataServiceId for the response
* These 2 headers should point to the result of the successful insert
* https://docs.microsoft.com/de-de/dotnet/api/microsoft.azure.batch.addtaskresult.location?view=azure-dotnet#Microsoft_Azure_Batch_AddTaskResult_Location
* https://docs.microsoft.com/de-de/dotnet/api/microsoft.azure.batch.protocol.models.taskgetheaders.dataserviceid?view=azure-dotnet
* i.e. Location: http://127.0.0.1:10002/devstoreaccount1/SampleHubVSHistory(PartitionKey='7219c1f2e2674f249bf9589d31ab3c6e',RowKey='sentinel')
*
* @private
* @param {string} serializedResponses
* @param {BatchRequest} request
* @return {string}
* @memberof TableBatchSerialization
*/
private SerializeEntityPath(
serializedResponses: string,
request: BatchRequest
): string {
let parenthesesPosition: number = request.getUrl().indexOf("(");
parenthesesPosition--;
if (parenthesesPosition < 0) {
parenthesesPosition = request.getUrl().length;
}
const trimmedUrl: string = request
.getUrl()
.substring(0, parenthesesPosition);
let entityPath = trimmedUrl + "(PartitionKey=%27";
entityPath += request.params.tableEntityProperties!.PartitionKey;
entityPath += "%27,";
entityPath += "RowKey=%27";
entityPath += request.params.tableEntityProperties!.RowKey;
entityPath += "%27)\r\n";
return entityPath;
}
/**
* serializes data service version
*
* @private
* @param {BatchRequest} request
* @param {string} serializedResponses
* @return {*}
* @memberof TableBatchSerialization
*/
private serializeDataServiceVersion(
request: BatchRequest,
serializedResponses: string
) {
if (undefined !== request.params && request.params.dataServiceVersion) {
serializedResponses +=
"DataServiceVersion: " + request.params.dataServiceVersion + ";\r\n";
}
return serializedResponses;
}
}

Просмотреть файл

@ -0,0 +1,23 @@
export default class TableBatchUtils {
/**
* Helper to convert a ReadableStream to string.
*
* @static
* @param {(NodeJS.ReadableStream | undefined)} stream
* @return {*} {Promise<string>}
* @memberof TableBatchUtils
*/
public static async StreamToString(
stream: NodeJS.ReadableStream | undefined
): Promise<string> {
if (stream === undefined) {
throw new Error("undefined stream passed to function!");
}
const chunks: any[] = [];
return new Promise((resolve, reject) => {
stream.on("data", (chunk) => chunks.push(chunk));
stream.on("error", reject);
stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
});
}
}

Просмотреть файл

@ -0,0 +1,59 @@
import Context from "../generated/Context";
export default class TableStorageContext extends Context {
public get account(): string | undefined {
return this.context.account;
}
public set account(account: string | undefined) {
this.context.account = account;
}
public get tableName(): string | undefined {
return this.context.tableName;
}
public set tableName(tableName: string | undefined) {
this.context.tableName = tableName;
}
public get authenticationPath(): string | undefined {
return this.context.authenticationPath;
}
public set authenticationPath(path: string | undefined) {
this.context.authenticationPath = path;
}
public get partitionKey(): string | undefined {
return this.context.partitionKey;
}
public set partitionKey(partitionKey: string | undefined) {
this.context.partitionKey = partitionKey;
}
public get rowKey(): string | undefined {
return this.context.rowKey;
}
public set rowKey(rowKey: string | undefined) {
this.context.rowKey = rowKey;
}
public get xMsRequestID(): string | undefined {
return this.contextID;
}
public set xMsRequestID(xMsRequestID: string | undefined) {
this.contextID = xMsRequestID;
}
public get accept(): string | undefined {
return this.context.accept;
}
public set accept(accept: string | undefined) {
this.context.accept = accept;
}
}

Просмотреть файл

@ -0,0 +1,64 @@
import { ODATA_TYPE } from "../utils/constants";
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmBinary implements IEdmType {
public static validate(value: any): string {
if (typeof value !== "string") {
throw TypeError(`Not a valid EdmBinary string.`);
}
// TODO: Check base64
return value;
}
public typedValue: string;
public constructor(public value: any) {
this.typedValue = EdmBinary.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, string] {
return [name, this.value];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${JSON.stringify(this.typedValue)}`;
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): [string, string] | undefined {
if (isSystemProperty) {
throw RangeError(`EdmBinary type shouldn't be a system property.`);
}
if (
annotationLevel === AnnotationLevel.MINIMAL ||
annotationLevel === AnnotationLevel.FULL
) {
return [`${name}${ODATA_TYPE}`, "Edm.Binary"];
}
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
const res = this.toJsonPropertyTypePair(
name,
annotationLevel,
isSystemProperty
);
if (!res) {
return;
}
const [key, value] = res;
return `"${key}":"${value}"`;
}
}

Просмотреть файл

@ -0,0 +1,42 @@
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmBoolean implements IEdmType {
public static validate(value: any): boolean {
if (typeof value !== "boolean") {
throw TypeError(`Not a valid EdmBoolean string.`);
}
return value;
}
public typedValue: boolean;
public constructor(public value: any) {
this.typedValue = EdmBoolean.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, boolean] {
return [name, this.typedValue];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${this.typedValue}`;
}
public toJsonPropertyTypePair(
_name: string,
_annotationLevel: AnnotationLevel,
_isSystemProperty: boolean
): [string, string] | undefined {
return;
}
public toJsonPropertyTypeString(
_name: string,
_annotationLevel: AnnotationLevel,
_isSystemProperty: boolean
): string | undefined {
return;
}
}

Просмотреть файл

@ -0,0 +1,63 @@
import { ODATA_TYPE } from "../utils/constants";
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmDateTime implements IEdmType {
public static validate(value: any): string {
if (typeof value !== "string") {
throw TypeError(`Not a valid EdmDateTime string.`);
}
// TODO: Check data time string format
return value;
}
public typedValue: string;
public constructor(public value: any) {
this.typedValue = EdmDateTime.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, string] {
return [name, this.typedValue];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${JSON.stringify(this.typedValue)}`;
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): [string, string] | undefined {
if (
annotationLevel === AnnotationLevel.MINIMAL ||
annotationLevel === AnnotationLevel.FULL
) {
if (annotationLevel === AnnotationLevel.MINIMAL && isSystemProperty) {
return;
}
return [`${name}${ODATA_TYPE}`, "Edm.DateTime"];
}
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
const res = this.toJsonPropertyTypePair(
name,
annotationLevel,
isSystemProperty
);
if (!res) {
return;
}
const [key, value] = res;
return `"${key}":"${value}"`;
}
}

Просмотреть файл

@ -0,0 +1,83 @@
import { ODATA_TYPE } from "../utils/constants";
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmDouble implements IEdmType {
public static validate(value: any): number | string {
if (value === "NaN" || value === "Infinity" || value === "-Infinity") {
return value;
}
if (typeof value === "string") {
// TODO: Support convert from string. parseFloat doesn't strictly checks non number chars
const val = Number.parseFloat(value);
if (!Number.isNaN(val)) {
return val;
}
}
if (typeof value !== "number") {
throw TypeError(`Not a valid EdmDouble string.`);
}
return value;
}
public typedValue: number | string;
public constructor(public value: any) {
this.typedValue = EdmDouble.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, number] {
return [name, this.value];
}
public toJsonPropertyValueString(name: string): string {
if (typeof this.typedValue === "number") {
return `"${name}":${
Number.isInteger(this.value) ? this.typedValue.toFixed(1) : this.value
}`;
} else {
return `"${name}":${JSON.stringify(this.typedValue)}`;
}
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean,
force: boolean = false
): [string, string] | undefined {
if (isSystemProperty) {
throw RangeError(`EdmDouble type shouldn't be a system property.`);
}
if (
force ||
(typeof this.typedValue === "string" &&
(annotationLevel === AnnotationLevel.MINIMAL ||
annotationLevel === AnnotationLevel.FULL))
) {
return [`${name}${ODATA_TYPE}`, "Edm.Double"];
}
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
const res = this.toJsonPropertyTypePair(
name,
annotationLevel,
isSystemProperty
);
if (!res) {
return;
}
const [key, value] = res;
return `"${key}":"${value}"`;
}
}

Просмотреть файл

@ -0,0 +1,64 @@
import { ODATA_TYPE } from "../utils/constants";
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmGuid implements IEdmType {
public static validate(value: any): string {
if (typeof value !== "string") {
throw TypeError(`Not a valid EdmGuid string.`);
}
// TODO: Check GUID string format
return value;
}
public typedValue: string;
public constructor(public value: any) {
this.typedValue = EdmGuid.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, string] {
return [name, this.typedValue];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${JSON.stringify(this.typedValue)}`;
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): [string, string] | undefined {
if (isSystemProperty) {
throw RangeError(`EdmGuid type shouldn't be a system property.`);
}
if (
annotationLevel === AnnotationLevel.MINIMAL ||
annotationLevel === AnnotationLevel.FULL
) {
return [`${name}${ODATA_TYPE}`, "Edm.Guid"];
}
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
const res = this.toJsonPropertyTypePair(
name,
annotationLevel,
isSystemProperty
);
if (!res) {
return;
}
const [key, value] = res;
return `"${key}":"${value}"`;
}
}

Просмотреть файл

@ -0,0 +1,44 @@
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmInt32 implements IEdmType {
public static validate(value: any): number {
if (typeof value !== "number") {
throw TypeError(`Not a valid EdmInt32 string.`);
}
// TODO: Check not an integer
return value;
}
public typedValue: number;
public constructor(public value: any) {
this.typedValue = EdmInt32.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, number] {
return [name, this.typedValue];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${this.typedValue}`;
}
public toJsonPropertyTypePair(
_name: string,
_annotationLevel: AnnotationLevel,
_isSystemProperty: boolean
): [string, string] | undefined {
return;
}
public toJsonPropertyTypeString(
_name: string,
_annotationLevel: AnnotationLevel,
_isSystemProperty: boolean
): string | undefined {
return;
}
}

Просмотреть файл

@ -0,0 +1,64 @@
import { ODATA_TYPE } from "../utils/constants";
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmInt64 implements IEdmType {
public static validate(value: any): string {
if (typeof value !== "string") {
throw TypeError(`Not a valid EdmInt64 string.`);
}
// TODO: Check base64
return value;
}
public typedValue: string;
public constructor(public value: any) {
this.typedValue = EdmInt64.validate(value);
}
public toJsonPropertyValuePair(name: string): [string, string] {
return [name, this.typedValue];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${JSON.stringify(this.typedValue)}`;
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): [string, string] | undefined {
if (isSystemProperty) {
throw RangeError(`EdmInt64 type shouldn't be a system property.`);
}
if (
annotationLevel === AnnotationLevel.MINIMAL ||
annotationLevel === AnnotationLevel.FULL
) {
return [`${name}${ODATA_TYPE}`, "Edm.Int64"];
}
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
const res = this.toJsonPropertyTypePair(
name,
annotationLevel,
isSystemProperty
);
if (!res) {
return;
}
const [key, value] = res;
return `"${key}":"${value}"`;
}
}

Просмотреть файл

@ -0,0 +1,40 @@
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmNull implements IEdmType {
public static validate(value: any): void {
if (typeof value !== "object" && value !== null) {
throw TypeError(`Not a valid EdmNull string.`);
}
}
public constructor(public value: any) {
EdmNull.validate(value);
}
public toJsonPropertyValuePair(
name: string
): [string, string | number | boolean] | undefined {
return;
}
public toJsonPropertyValueString(name: string): string | undefined {
return;
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): [string, string] | undefined {
return;
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
return;
}
}

Просмотреть файл

@ -0,0 +1,46 @@
import { AnnotationLevel } from "./EntityProperty";
import { IEdmType } from "./IEdmType";
export class EdmString implements IEdmType {
public static validate(value: any): string {
if (typeof value !== "string") {
throw TypeError(`Not a valid string.`);
}
// TODO: Check GUID string format
return value;
}
public typedValue: string;
public constructor(public value: any) {
this.typedValue = EdmString.validate(value);
}
public toJsonPropertyValuePair(
name: string
): [string, string | number | boolean] {
return [name, this.typedValue];
}
public toJsonPropertyValueString(name: string): string {
return `"${name}":${JSON.stringify(this.typedValue)}`;
}
public toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): [string, string] | undefined {
return;
}
public toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined {
return;
}
}

Просмотреть файл

@ -0,0 +1,215 @@
import { Entity } from "../persistence/ITableMetadataStore";
import {
FULL_METADATA_ACCEPT,
MINIMAL_METADATA_ACCEPT,
NO_METADATA_ACCEPT
} from "../utils/constants";
import { EdmBinary } from "./EdmBinary";
import { EdmBoolean } from "./EdmBoolean";
import { EdmDateTime } from "./EdmDateTime";
import { EdmDouble } from "./EdmDouble";
import { EdmGuid } from "./EdmGuid";
import { EdmInt32 } from "./EdmInt32";
import { EdmInt64 } from "./EdmInt64";
import { EdmNull } from "./EdmNull";
import { EdmString } from "./EdmString";
import { EdmType, getEdmType, IEdmType } from "./IEdmType";
export enum AnnotationLevel {
"FULL",
"MINIMAL",
"NO"
}
export function toAnnotationLevel(level: string): AnnotationLevel {
switch (level) {
case MINIMAL_METADATA_ACCEPT:
return AnnotationLevel.MINIMAL;
case FULL_METADATA_ACCEPT:
return AnnotationLevel.FULL;
case NO_METADATA_ACCEPT:
return AnnotationLevel.NO;
default:
throw TypeError(`Invalid OData annonation level ${level}.`);
}
}
export class EntityProperty {
public constructor(
public name: string,
public value: any,
public edmType: IEdmType,
public isSystemProperty: boolean = false
) {}
public toJsonPropertyValuePair():
| [string, string | boolean | number]
| undefined {
return this.edmType.toJsonPropertyValuePair(this.name);
}
public toJsonPropertyValueString(): string | undefined {
return this.edmType.toJsonPropertyValueString(this.name);
}
public toJsonPropertyTypePair(
annotationLevel: AnnotationLevel,
force: boolean = false
): [string, string] | undefined {
return this.edmType.toJsonPropertyTypePair(
this.name,
annotationLevel,
this.isSystemProperty,
force
);
}
public toJsonPropertyTypeString(
annotationLevel: AnnotationLevel
): string | undefined {
return this.edmType.toJsonPropertyTypeString(
this.name,
annotationLevel,
this.isSystemProperty
);
}
public toResponseString(
annotationLevel: AnnotationLevel | string
): string | undefined {
const level =
typeof annotationLevel === "string"
? toAnnotationLevel(annotationLevel)
: annotationLevel;
const typeString = this.toJsonPropertyTypeString(level);
const propertyString = this.toJsonPropertyValueString();
if (typeString) {
return [typeString, propertyString].join(",");
} else {
return propertyString;
}
}
public normalize(entity: Entity): void {
// Set back to Entity
const pair = this.toJsonPropertyValuePair();
if (!pair) {
return;
}
const [key, value] = pair;
entity.properties[key] = value;
const res = this.toJsonPropertyTypePair(AnnotationLevel.FULL, true);
if (res) {
const [typeKey, typeValue] = res;
entity.properties[typeKey] = typeValue;
}
}
}
export function parseEntityProperty(
name: string,
value: any,
edmType?: EdmType | string,
isSystemProperty: boolean = false
): EntityProperty {
if (edmType !== undefined) {
// Validate values per input EdmType
const type = typeof edmType === "string" ? getEdmType(edmType) : edmType;
switch (type) {
case EdmType.Binary:
// EdmBinary.validate(value);
return new EntityProperty(
name,
value,
new EdmBinary(value),
isSystemProperty
);
case EdmType.Boolean:
// EdmBoolean.validate(value);
return new EntityProperty(
name,
value,
new EdmBoolean(value),
isSystemProperty
);
case EdmType.DateTime:
EdmDateTime.validate(value);
return new EntityProperty(
name,
value,
new EdmDateTime(value),
isSystemProperty
);
case EdmType.Double:
EdmDouble.validate(value);
return new EntityProperty(
name,
value,
new EdmDouble(value),
isSystemProperty
);
case EdmType.Guid:
EdmGuid.validate(value);
return new EntityProperty(
name,
value,
new EdmGuid(value),
isSystemProperty
);
case EdmType.Int32:
EdmInt32.validate(value);
return new EntityProperty(
name,
value,
new EdmInt32(value),
isSystemProperty
);
case EdmType.Int64:
EdmInt64.validate(value);
return new EntityProperty(
name,
value,
new EdmInt64(value),
isSystemProperty
);
case EdmType.String:
EdmString.validate(value);
return new EntityProperty(
name,
value,
new EdmString(value),
isSystemProperty
);
default:
throw TypeError(`Invalid EdmType ${type}.`);
}
} else {
// Extract type from value type
switch (typeof value) {
case "string":
EdmString.validate(value);
return new EntityProperty(name, value, new EdmString(value));
case "number":
if (Number.isInteger(value)) {
EdmInt32.validate(value);
return new EntityProperty(name, value, new EdmInt32(value));
} else {
EdmDouble.validate(value);
return new EntityProperty(name, value, new EdmDouble(value));
}
case "boolean":
EdmBoolean.validate(value);
return new EntityProperty(name, value, new EdmBoolean(value));
case "object":
if (value === null) {
return new EntityProperty(name, value, new EdmNull(value));
}
default:
throw TypeError(`Invalid value when parsing EdmType ${value}.`);
}
}
}

Просмотреть файл

@ -0,0 +1,56 @@
import { AnnotationLevel } from "./EntityProperty";
export enum EdmType {
"Binary",
"Boolean",
"DateTime",
"Double",
"Guid",
"Int32",
"Int64",
"String",
"Null"
}
export interface IEdmType {
toJsonPropertyValuePair(
name: string
): [string, string | number | boolean] | undefined;
toJsonPropertyValueString(name: string): string | undefined;
toJsonPropertyTypePair(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean,
force?: boolean
): [string, string] | undefined;
toJsonPropertyTypeString(
name: string,
annotationLevel: AnnotationLevel,
isSystemProperty: boolean
): string | undefined;
}
export function getEdmType(type: string): EdmType {
switch (type) {
case "Edm.Binary":
return EdmType.Binary;
case "Edm.Boolean":
return EdmType.Boolean;
case "Edm.DateTime":
return EdmType.DateTime;
case "Edm.Double":
return EdmType.Double;
case "Edm.Guid":
return EdmType.Guid;
case "Edm.Int32":
return EdmType.Int32;
case "Edm.Int64":
return EdmType.Int64;
case "Edm.String":
return EdmType.String;
case "Edm.Null":
return EdmType.Null;
default:
throw TypeError(`${type} is not a valid Edm Type.`);
}
}

Просмотреть файл

@ -0,0 +1,101 @@
import { Entity } from "../persistence/ITableMetadataStore";
import { ODATA_TYPE } from "../utils/constants";
import { getTimestampString } from "../utils/utils";
import { EdmString } from "./EdmString";
import { EntityProperty, parseEntityProperty } from "./EntityProperty";
// import { EdmType } from "./IEdmType";
export class NormalizedEntity {
public ref: Entity;
public properties: EntityProperty[] = [];
public propertiesMap: { [property: string]: EntityProperty } = {};
public constructor(entity: Entity) {
this.ref = entity;
// Partition Key
const partitionKeyProperty = new EntityProperty(
"PartitionKey",
entity.PartitionKey,
new EdmString(entity.PartitionKey),
true
);
this.properties.push(partitionKeyProperty);
this.propertiesMap.PartitionKey = partitionKeyProperty;
// Row Key
const rowKeyProperty = new EntityProperty(
"RowKey",
entity.RowKey,
new EdmString(entity.RowKey),
true
);
this.properties.push(rowKeyProperty);
this.propertiesMap.RowKey = rowKeyProperty;
// Sync Timestamp from entity last modified time
entity.properties.Timestamp = getTimestampString(
typeof entity.lastModifiedTime === "string"
? new Date(entity.lastModifiedTime)
: entity.lastModifiedTime
);
entity.properties["Timestamp@odata.type"] = "Edm.DateTime";
for (const key in entity.properties) {
if (Object.prototype.hasOwnProperty.call(entity.properties, key)) {
const element = entity.properties[key];
if (this.propertiesMap[key] !== undefined) {
continue;
}
if (key.endsWith(ODATA_TYPE)) {
continue;
} else {
const type = entity.properties[`${key}${ODATA_TYPE}`];
if (type !== undefined && typeof type !== "string") {
throw RangeError(
`Invalid EdmType value:${type} for key:${key}${ODATA_TYPE}`
);
}
const property = parseEntityProperty(key, element, type, false);
this.properties.push(property);
this.propertiesMap[key] = property;
}
}
}
}
// Convert to HTTP response payload string
public toResponseString(
annotationLevel: string,
injections: { [property: string]: string },
includes?: Set<string>
): string {
const pairs: string[] = [];
for (const key in injections) {
if (Object.prototype.hasOwnProperty.call(injections, key)) {
const value = injections[key];
pairs.push(`"${key}":${JSON.stringify(value)}`);
}
}
for (const pair of this.properties) {
if (!includes || includes.has(pair.name)) {
const str = pair.toResponseString(annotationLevel);
if (str) {
pairs.push(str);
}
}
}
return `{${pairs.join(",")}}`;
}
public normalize(): Entity {
this.ref.properties = {};
for (const entity of this.properties) {
entity.normalize(this.ref);
}
return this.ref;
}
}

Просмотреть файл

@ -0,0 +1,22 @@
import Context from "../generated/Context";
import StorageError from "./StorageError";
/**
* Create customized error types by inheriting ServerError
*
* @export
* @class UnimplementedError
* @extends {StorageError}
*/
export default class NotImplementedError extends StorageError {
public constructor(context: Context) {
super(
501,
"APINotImplemented",
"Current API is not implemented yet. Please vote your wanted features to https://github.com/azure/azurite/issues",
context.contextID || "",
undefined,
context
);
}
}

Просмотреть файл

@ -0,0 +1,92 @@
import {
FULL_METADATA_ACCEPT,
MINIMAL_METADATA_ACCEPT,
NO_METADATA_ACCEPT,
TABLE_API_VERSION
} from "../../table/utils/constants";
import Context from "../generated/Context";
import MiddlewareError from "../generated/errors/MiddlewareError";
import { jsonToXML } from "../generated/utils/xml";
import { getPayloadFormat } from "../utils/utils";
/**
* Represents an Azure Storage Server Error.
*
* @export
* @class StorageError
* @extends {MiddlewareError}
*/
export default class StorageError extends MiddlewareError {
public readonly storageErrorCode: string;
public readonly storageErrorMessage: string;
public readonly storageRequestID: string;
/**
* Creates an instance of StorageError.
*
* @param {number} statusCode HTTP response status code
* @param {string} storageErrorCode Azure Storage error code, will be in response body and header
* @param {string} storageErrorMessage Azure Storage error message
* @param {string} storageRequestID Azure Storage server request ID
* @param {{ [key: string]: string }} [storageAdditionalErrorMessages={}]
* Additional error messages will be included in XML body
* @param [Context] context
* @memberof StorageError
*/
constructor(
statusCode: number,
storageErrorCode: string,
storageErrorMessage: string,
storageRequestID: string,
storageAdditionalErrorMessages: { [key: string]: string } = {},
context: Context
) {
const pyaload = getPayloadFormat(context);
const isJSON =
pyaload === NO_METADATA_ACCEPT ||
pyaload === MINIMAL_METADATA_ACCEPT ||
pyaload === FULL_METADATA_ACCEPT;
const bodyInJSON: any = isJSON
? {
code: storageErrorCode,
message: {
lang: "en-US",
value: `${storageErrorMessage}\nRequestId:${storageRequestID}\nTime:${new Date().toISOString()}`
}
}
: {
Code: storageErrorCode,
Message: `${storageErrorMessage}\nRequestId:${storageRequestID}\nTime:${new Date().toISOString()}`
};
for (const key in storageAdditionalErrorMessages) {
if (storageAdditionalErrorMessages.hasOwnProperty(key)) {
const element = storageAdditionalErrorMessages[key];
bodyInJSON[key] = element;
}
}
const body = isJSON
? JSON.stringify({ "odata.error": bodyInJSON })
: jsonToXML({ Error: bodyInJSON });
super(
statusCode,
storageErrorMessage,
undefined,
{
"x-ms-error-code": storageErrorCode,
"x-ms-request-id": storageRequestID,
"x-ms-version": TABLE_API_VERSION
},
body,
isJSON ? `${pyaload};streaming=true;charset=utf-8` : "application/xml"
);
this.name = "StorageError";
this.storageErrorCode = storageErrorCode;
this.storageErrorMessage = storageErrorMessage;
this.storageRequestID = storageRequestID;
}
}

Просмотреть файл

@ -0,0 +1,258 @@
/**
* A factory class maintains all Azure Storage table service errors.
*
* @export
* @class StorageErrorFactory
*/
import Context from "../generated/Context";
import StorageError from "./StorageError";
const defaultID: string = "DefaultID";
export default class StorageErrorFactory {
public static getInvalidHeaderValue(
context: Context,
additionalMessages?: { [key: string]: string }
): StorageError {
if (additionalMessages === undefined) {
additionalMessages = {};
}
return new StorageError(
400,
"InvalidHeaderValue",
"The value for one of the HTTP headers is not in the correct format.",
context.contextID || defaultID,
additionalMessages,
context
);
}
public static getInvalidInput(
context: Context,
additionalMessages?: { [key: string]: string }
): StorageError {
if (additionalMessages === undefined) {
additionalMessages = {};
}
return new StorageError(
400,
"InvalidInput",
"An error occurred while processing this request.",
context.contextID || defaultID,
additionalMessages,
context
);
}
public static getTableAlreadyExists(context: Context): StorageError {
return new StorageError(
409,
"TableAlreadyExists",
"The table specified already exists.",
context.contextID || defaultID,
undefined,
context
);
}
public static getTableNameEmpty(context: Context): StorageError {
return new StorageError(
400,
"TableNameEmpty",
"The specified table name is empty.",
context.contextID || defaultID,
undefined,
context
);
}
public static getInvalidOperation(
context: Context,
message: string = ""
): StorageError {
return new StorageError(
400,
"InvalidOperation",
message,
context.contextID || "",
undefined,
context
);
}
public static getAuthorizationSourceIPMismatch(
context: Context
): StorageError {
return new StorageError(
403,
"AuthorizationSourceIPMismatch",
"This request is not authorized to perform this operation using this source IP {SourceIP}.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAuthorizationProtocolMismatch(
context: Context
): StorageError {
return new StorageError(
403,
"AuthorizationProtocolMismatch",
"This request is not authorized to perform this operation using this protocol.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAuthorizationPermissionMismatch(
context: Context
): StorageError {
return new StorageError(
403,
"AuthorizationPermissionMismatch",
"This request is not authorized to perform this operation using this permission.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAuthorizationServiceMismatch(
context: Context
): StorageError {
return new StorageError(
403,
"AuthorizationServiceMismatch",
"This request is not authorized to perform this operation using this service.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAuthorizationResourceTypeMismatch(
context: Context
): StorageError {
return new StorageError(
403,
"AuthorizationResourceTypeMismatch",
"This request is not authorized to perform this operation using this resource type.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAccountNameEmpty(context: Context): StorageError {
return new StorageError(
400,
"AccountNameEmpty",
"The specified account name is empty.",
context.contextID || defaultID,
undefined,
context
);
}
public static getTableNotExist(context: Context): StorageError {
return new StorageError(
404,
"TableNotFound",
"The table specified does not exist.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAuthorizationFailure(context: Context): StorageError {
return new StorageError(
403,
"AuthorizationFailure",
// tslint:disable-next-line:max-line-length
"Server failed to authenticate the request. Make sure the value of the Authorization header is formed correctly including the signature.",
context.contextID || defaultID,
undefined,
context
);
}
public static getEntityAlreadyExist(context: Context): StorageError {
return new StorageError(
409,
"EntityAlreadyExist",
"The specified entity already exists.",
context.contextID || defaultID,
undefined,
context
);
}
public static getPropertiesNeedValue(context: Context): StorageError {
return new StorageError(
400,
"PropertiesNeedValue",
"The values are not specified for all properties in the entity.",
context.contextID || defaultID,
undefined,
context
);
}
public static getAtomFormatNotSupported(context: Context): StorageError {
return new StorageError(
415,
"AtomFormatNotSupported",
"Atom format is not supported.",
context.contextID || defaultID,
undefined,
context
);
}
public static getPreconditionFailed(context: Context): StorageError {
return new StorageError(
412,
"UpdateConditionNotSatisfied",
"The update condition specified in the request was not satisfied.",
context.contextID || defaultID,
undefined,
context
);
}
public static getTableNotFound(context: Context): StorageError {
return new StorageError(
404,
"TableNotFound",
"The table specified does not exist.",
context.contextID || defaultID,
undefined,
context
);
}
public static getEntityNotFound(context: Context): StorageError {
return new StorageError(
404,
"ResourceNotFound",
"The specified resource does not exist.",
context.contextID || defaultID,
undefined,
context
);
}
public static getQueryConditionInvalid(context: Context): StorageError {
return new StorageError(
400,
"InvalidInput",
"The query condition specified in the request is invalid.",
context.contextID || defaultID,
undefined,
context
);
}
}

Просмотреть файл

@ -0,0 +1,144 @@
import Operation from "./artifacts/operation";
import IRequest from "./IRequest";
import IResponse from "./IResponse";
export interface IHandlerParameters {
[key: string]: any;
}
/**
* Context holds generated server context information.
* Every incoming HTTP request will initialize a new context.
*
* @export
* @class Context
*/
export default class Context {
public readonly context: any;
public readonly path: string;
/**
* Creates an instance of Context.
* Context holds generated server context information.
* Every incoming HTTP request will initialize a new context.
*
* @param {Context} context An existing Context
* @memberof Context
*/
public constructor(context: Context);
/**
* Creates an instance of Context.
* Context holds generated server context information.
* Every incoming HTTP request will initialize a new context.
*
* @param {Object} holder Holder is an Object which used to keep context information
* @param {string} [path="context"] holder[path] is used as context object by default
* @param {IRequest} [req]
* @param {IResponse} [res]
* @memberof Context
*/
public constructor(
holder: object,
path: string,
req?: IRequest,
res?: IResponse
);
public constructor(
holderOrContext: object | Context,
path: string = "context",
req?: IRequest,
res?: IResponse
) {
if (holderOrContext instanceof Context) {
this.context = holderOrContext.context;
this.path = holderOrContext.path;
} else {
const context = holderOrContext as any;
this.path = path;
if (context[this.path] === undefined) {
context[this.path] = {};
}
if (typeof context[this.path] !== "object") {
throw new TypeError(
`Initialize Context error because holder.${
this.path
} is not an object.`
);
}
this.context = context[this.path];
this.request = req;
this.response = res;
}
}
public get operation(): Operation | undefined {
return this.context.operation;
}
public set operation(operation: Operation | undefined) {
this.context.operation = operation;
}
public set request(request: IRequest | undefined) {
this.context.request = request;
}
public get request(): IRequest | undefined {
return this.context.request;
}
public get dispatchPattern(): string | undefined {
return this.context.dispatchPattern;
}
public set dispatchPattern(path: string | undefined) {
this.context.dispatchPattern = path;
}
public set response(response: IResponse | undefined) {
this.context.response = response;
}
public get response(): IResponse | undefined {
return this.context.response;
}
public get handlerParameters(): IHandlerParameters | undefined {
return this.context.handlerParameters;
}
public set handlerParameters(
handlerParameters: IHandlerParameters | undefined
) {
this.context.handlerParameters = handlerParameters;
}
public get handlerResponses(): any {
return this.context.handlerResponses;
}
public set handlerResponses(handlerResponses: any) {
this.context.handlerResponses = handlerResponses;
}
public get contextID(): string | undefined {
return this.context.contextID;
}
public set contextID(contextID: string | undefined) {
this.context.contextID = contextID;
}
public set startTime(startTime: Date | undefined) {
this.context.startTime = startTime;
}
public get startTime(): Date | undefined {
return this.context.startTime;
}
}

Просмотреть файл

@ -0,0 +1,161 @@
import { ErrorRequestHandler, NextFunction, Request, RequestHandler, Response } from "express";
import Context from "./Context";
import ExpressRequestAdapter from "./ExpressRequestAdapter";
import ExpressResponseAdapter from "./ExpressResponseAdapter";
import IHandlers from "./handlers/IHandlers";
import deserializerMiddleware from "./middleware/deserializer.middleware";
import dispatchMiddleware from "./middleware/dispatch.middleware";
import endMiddleware from "./middleware/end.middleware";
import errorMiddleware from "./middleware/error.middleware";
import HandlerMiddlewareFactory from "./middleware/HandlerMiddlewareFactory";
import serializerMiddleware from "./middleware/serializer.middleware";
import MiddlewareFactory from "./MiddlewareFactory";
import ILogger from "./utils/ILogger";
/**
* ExpressMiddlewareFactory will generate Express compatible middleware according to swagger definitions.
* Generated middleware MUST be used by strict order:
* * dispatchMiddleware
* * DeserializerMiddleware
* * HandlerMiddleware
* * SerializerMiddleware
* * ErrorMiddleware
* * EndMiddleware
*
* @export
* @class MiddlewareFactory
*/
export default class ExpressMiddlewareFactory extends MiddlewareFactory {
/**
* Creates an instance of MiddlewareFactory.
*
* @param {ILogger} logger A valid logger
* @param {string} [contextPath="default_context"] Optional. res.locals[contextPath] will be used to hold context
* @memberof MiddlewareFactory
*/
public constructor(
logger: ILogger,
private readonly contextPath: string = "default_context"
) {
super(logger);
}
/**
* DispatchMiddleware is the 1s middleware should be used among other generated middleware.
*
* @returns {RequestHandler}
* @memberof MiddlewareFactory
*/
public createDispatchMiddleware(): RequestHandler {
return (req: Request, res: Response, next: NextFunction) => {
const request = new ExpressRequestAdapter(req);
const response = new ExpressResponseAdapter(res);
dispatchMiddleware(
new Context(res.locals, this.contextPath, request, response),
request,
next,
this.logger
);
};
}
/**
* DeserializerMiddleware is the 2nd middleware should be used among other generated middleware.
*
* @returns {RequestHandler}
* @memberof MiddlewareFactory
*/
public createDeserializerMiddleware(): RequestHandler {
return (req: Request, res: Response, next: NextFunction) => {
const request = new ExpressRequestAdapter(req);
const response = new ExpressResponseAdapter(res);
deserializerMiddleware(
new Context(res.locals, this.contextPath, request, response),
request,
next,
this.logger
);
};
}
/**
* HandlerMiddleware is the 3rd middleware should be used among other generated middleware.
*
* @param {IHandlers} handlers
* @returns {RequestHandler}
* @memberof MiddlewareFactory
*/
public createHandlerMiddleware(handlers: IHandlers): RequestHandler {
const handlerMiddlewareFactory = new HandlerMiddlewareFactory(
handlers,
this.logger
);
return (req: Request, res: Response, next: NextFunction) => {
const request = new ExpressRequestAdapter(req);
const response = new ExpressResponseAdapter(res);
handlerMiddlewareFactory.createHandlerMiddleware()(
new Context(res.locals, this.contextPath, request, response),
next
);
};
}
/**
* SerializerMiddleware is the 4st middleware should be used among other generated middleware.
*
* @returns {RequestHandler}
* @memberof MiddlewareFactory
*/
public createSerializerMiddleware(): RequestHandler {
return (req: Request, res: Response, next: NextFunction) => {
const request = new ExpressRequestAdapter(req);
const response = new ExpressResponseAdapter(res);
serializerMiddleware(
new Context(res.locals, this.contextPath, request, response),
new ExpressResponseAdapter(res),
next,
this.logger
);
};
}
/**
* ErrorMiddleware is the 5st middleware should be used among other generated middleware.
*
* @returns {ErrorRequestHandler}
* @memberof MiddlewareFactory
*/
public createErrorMiddleware(): ErrorRequestHandler {
return (err: Error, req: Request, res: Response, next: NextFunction) => {
const request = new ExpressRequestAdapter(req);
const response = new ExpressResponseAdapter(res);
errorMiddleware(
new Context(res.locals, this.contextPath, request, response),
err,
new ExpressRequestAdapter(req),
new ExpressResponseAdapter(res),
next,
this.logger
);
};
}
/**
* EndMiddleware is the 6st middleware should be used among other generated middleware.
*
* @returns {RequestHandler}
* @memberof MiddlewareFactory
*/
public createEndMiddleware(): RequestHandler {
return (req: Request, res: Response) => {
const request = new ExpressRequestAdapter(req);
const response = new ExpressResponseAdapter(res);
endMiddleware(
new Context(res.locals, this.contextPath, request, response),
new ExpressResponseAdapter(res),
this.logger
);
};
}
}

Просмотреть файл

@ -0,0 +1,57 @@
import { Request } from "express";
import IRequest, { HttpMethod } from "./IRequest";
export default class ExpressRequestAdapter implements IRequest {
public constructor(private readonly req: Request) {}
public getMethod(): HttpMethod {
return this.req.method.toUpperCase() as HttpMethod;
}
public getUrl(): string {
return this.req.url;
}
public getEndpoint(): string {
return `${this.req.protocol}://${this.getHeader("host") ||
this.req.hostname}`;
}
public getPath(): string {
return this.req.path;
}
public getBodyStream(): NodeJS.ReadableStream {
return this.req;
}
public getBody(): string | undefined {
return this.req.body;
}
public setBody(body: string | undefined): ExpressRequestAdapter {
this.req.body = body;
return this;
}
public getHeader(field: string): string | undefined {
return this.req.header(field);
}
public getHeaders(): { [header: string]: string | string[] | undefined } {
return this.req.headers;
}
public getRawHeaders(): string[] {
return this.req.rawHeaders;
}
public getQuery(key: string): string | undefined {
return this.req.query[key];
}
public getProtocol(): string {
return this.req.protocol;
}
}

Просмотреть файл

@ -0,0 +1,66 @@
import { Response } from "express";
import { OutgoingHttpHeaders } from "http";
import IResponse from "./IResponse";
export default class ExpressResponseAdapter implements IResponse {
public constructor(private readonly res: Response) {}
public setStatusCode(code: number): IResponse {
this.res.status(code);
return this;
}
public getStatusCode(): number {
return this.res.statusCode;
}
public setStatusMessage(message: string): IResponse {
this.res.statusMessage = message;
return this;
}
public getStatusMessage(): string {
return this.res.statusMessage;
}
public setHeader(
field: string,
value?: string | string[] | undefined | number | boolean
): IResponse {
if (typeof value === "number") {
value = `${value}`;
}
if (typeof value === "boolean") {
value = `${value}`;
}
// Cannot remove if block because of a potential TypeScript bug
if (typeof value === "string" || value instanceof Array) {
this.res.setHeader(field, value);
}
return this;
}
public getHeader(field: string): number | string | string[] | undefined {
return this.res.getHeader(field);
}
public getHeaders(): OutgoingHttpHeaders {
return this.res.getHeaders();
}
public headersSent(): boolean {
return this.res.headersSent;
}
public setContentType(value: string): IResponse {
this.res.setHeader("content-type", value);
return this;
}
public getBodyStream(): NodeJS.WritableStream {
return this.res;
}
}

Просмотреть файл

@ -0,0 +1,26 @@
export type HttpMethod =
| "GET"
| "HEAD"
| "POST"
| "PUT"
| "DELETE"
| "CONNECT"
| "OPTIONS"
| "TRACE"
| "MERGE"
| "PATCH";
export default interface IRequest {
getMethod(): HttpMethod;
getUrl(): string;
getEndpoint(): string;
getPath(): string;
getBodyStream(): NodeJS.ReadableStream;
setBody(body: string | undefined): IRequest;
getBody(): string | undefined;
getHeader(field: string): string | undefined;
getHeaders(): { [header: string]: string | string[] | undefined };
getRawHeaders(): string[];
getQuery(key: string): string | undefined;
getProtocol(): string;
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше