This contains all the bug fixes after first official publish and also added some tests.
This commit is contained in:
Родитель
7174fc20fe
Коммит
32691fce55
81
README.md
81
README.md
|
@ -45,20 +45,21 @@ the local Storage Emulator (with the exception of Service Bus features).
|
|||
# Usage
|
||||
## Table Storage
|
||||
|
||||
To ensure a table exists, call **create_table**:
|
||||
To ensure a table exists, call **create\_table**:
|
||||
|
||||
```Python
|
||||
from azure.storage import TableService
|
||||
ts = TableService(account_name, account_key)
|
||||
table = ts.create_table('tasktable')
|
||||
ts.create_table('tasktable')
|
||||
```
|
||||
|
||||
A new entity can be added by calling **insert_entity**:
|
||||
A new entity can be added by calling **insert\_entity**:
|
||||
|
||||
```Python
|
||||
from datetime import datetime
|
||||
ts = TableService(account_name, account_key)
|
||||
table = ts.create_table('tasktable')
|
||||
table.insert_entity(
|
||||
ts.create_table('tasktable')
|
||||
ts.insert_entity(
|
||||
'tasktable',
|
||||
{
|
||||
'PartitionKey' : 'tasksSeattle',
|
||||
|
@ -69,7 +70,7 @@ table.insert_entity(
|
|||
)
|
||||
```
|
||||
|
||||
The method **get_entity** can then be used to fetch the entity that was just inserted:
|
||||
The method **get\_entity** can then be used to fetch the entity that was just inserted:
|
||||
|
||||
```Python
|
||||
ts = TableService(account_name, account_key)
|
||||
|
@ -78,27 +79,25 @@ entity = ts.get_entity('tasktable', 'tasksSeattle', '1')
|
|||
|
||||
## Blob Storage
|
||||
|
||||
The **create_container** method can be used to create a
|
||||
The **create\_container** method can be used to create a
|
||||
container in which to store a blob:
|
||||
|
||||
```Python
|
||||
from azure.storage import BlobService
|
||||
blob_service = BlobService()
|
||||
container = blob_service.create_container('taskcontainer')
|
||||
blob_service = BlobService(account_name, account_key)
|
||||
blob_service.create_container('taskcontainer')
|
||||
```
|
||||
|
||||
To upload a file (assuming it is called task1-upload.txt, it contains the exact text "hello world" (no quotation marks), and it is placed in the same folder as the script below), the method **put_blob** can be used:
|
||||
To upload a file (assuming it is called task1-upload.txt, it contains the exact text "hello world" (no quotation marks), and it is placed in the same folder as the script below), the method **put\_blob** can be used:
|
||||
|
||||
```Python
|
||||
from azure.storage import BlobService
|
||||
blob_service = BlobService(account_name, account_key)
|
||||
blob_service.put_blob('taskcontainer', 'task1',
|
||||
blobService = azure.createBlobService()
|
||||
blobService.put_blob('taskcontainer', 'task1', file('task1-upload.txt').read())
|
||||
blob_service.put_blob('taskcontainer', 'task1', file('task1-upload.txt').read(), 'BlockBlob')
|
||||
|
||||
```
|
||||
|
||||
To download the blob and write it to the file system, the **get_blob** method can be used:
|
||||
To download the blob and write it to the file system, the **get\_blob** method can be used:
|
||||
|
||||
```Python
|
||||
from azure.storage import BlobService
|
||||
|
@ -108,15 +107,15 @@ blob = blob_service.get_blob('taskcontainer', 'task1')
|
|||
|
||||
## Storage Queues
|
||||
|
||||
The **create_queue** method can be used to ensure a queue exists:
|
||||
The **create\_queue** method can be used to ensure a queue exists:
|
||||
|
||||
```Python
|
||||
from azure.storage import QueueService
|
||||
queue_service = QueueService(account_name, account_key)
|
||||
queue = queue_service.create_queue('taskqueue')
|
||||
queue_service.create_queue('taskqueue')
|
||||
```
|
||||
|
||||
The **put_message** method can then be called to insert the message into the queue:
|
||||
The **put\_message** method can then be called to insert the message into the queue:
|
||||
|
||||
```Python
|
||||
from azure.storage import QueueService
|
||||
|
@ -124,69 +123,73 @@ queue_service = QueueService(account_name, account_key)
|
|||
queue_service.put_message('taskqueue', 'Hello world!')
|
||||
```
|
||||
|
||||
It is then possible to call the **get___messages** method, process the message and then call **delete_message** on the messages ID. This two-step process ensures messages don't get lost when they are removed from the queue.
|
||||
It is then possible to call the **get\_messages** method, process the message and then call **delete\_message** with the message id and receipt. This two-step process ensures messages don't get lost when they are removed from the queue.
|
||||
|
||||
```Python
|
||||
from azure.storage import QueueService
|
||||
queue_service = QueueService(account_name, account_key)
|
||||
messages = queue_service.get_messages('taskqueue')
|
||||
queue_service.delete_message('taskqueue', messages[0].message_id)
|
||||
queue_service.delete_message('taskqueue', messages[0].message_id, messages[0].pop_receipt)
|
||||
```
|
||||
|
||||
## ServiceBus Queues
|
||||
|
||||
ServiceBus Queues are an alternative to Storage Queues that might be useful in scenarios where more advanced messaging features are needed (larger message sizes, message ordering, single-operaiton destructive reads, scheduled delivery) using push-style delivery (using long polling).
|
||||
|
||||
The **create_queue** method can be used to ensure a queue exists:
|
||||
The **create\_queue** method can be used to ensure a queue exists:
|
||||
|
||||
```Python
|
||||
from azure.servicebus import ServiceBusService
|
||||
sbs = ServiceBusService(service_namespace, account_key)
|
||||
queue = sbs.create_queue('taskqueue');
|
||||
sbs = ServiceBusService(service_namespace, account_key, 'owner')
|
||||
sbs.create_queue('taskqueue')
|
||||
```
|
||||
|
||||
The **send__queue__message** method can then be called to insert the message into the queue:
|
||||
The **send\_queue\_message** method can then be called to insert the message into the queue:
|
||||
|
||||
```Python
|
||||
from azure.servicebus import ServiceBusService
|
||||
sbs = ServiceBusService(service_namespace, account_key)
|
||||
sbs.send_queue_message('taskqueue', 'Hello World!')
|
||||
from azure.servicebus import ServiceBusService, Message
|
||||
sbs = ServiceBusService(service_namespace, account_key, 'owner')
|
||||
msg = Message('Hello World!')
|
||||
sbs.send_queue_message('taskqueue', msg)
|
||||
```
|
||||
|
||||
It is then possible to call the **read__delete___queue__message** method to dequeue the message.
|
||||
It is then possible to call the **receive\_queue\_message** method to dequeue the message.
|
||||
|
||||
```Python
|
||||
from azure.servicebus import ServiceBusService
|
||||
sbs = ServiceBusService(service_namespace, account_key)
|
||||
msg = sbs.read_delete_queue_message('taskqueue')
|
||||
sbs = ServiceBusService(service_namespace, account_key, 'owner')
|
||||
msg = sbs.receive_queue_message('taskqueue')
|
||||
```
|
||||
|
||||
## ServiceBus Topics
|
||||
|
||||
ServiceBus topics are an abstraction on top of ServiceBus Queues that make pub/sub scenarios easy to implement.
|
||||
|
||||
The **create_topic** method can be used to create a server-side topic:
|
||||
The **create\_topic** method can be used to create a server-side topic:
|
||||
|
||||
```Python
|
||||
from azure.servicebus import ServiceBusService
|
||||
sbs = ServiceBusService(service_namespace, account_key)
|
||||
topic = sbs.create_topic('taskdiscussion')
|
||||
sbs = ServiceBusService(service_namespace, account_key, 'owner')
|
||||
sbs.create_topic('taskdiscussion')
|
||||
```
|
||||
|
||||
The **send__topic__message** method can be used to send a message to a topic:
|
||||
The **send\_topic\_message** method can be used to send a message to a topic:
|
||||
|
||||
```Python
|
||||
from azure.servicebus import ServiceBusService
|
||||
sbs = ServiceBusService(service_namespace, account_key)
|
||||
sbs.send_topic_message('taskdiscussion', 'Hello world!')
|
||||
from azure.servicebus import ServiceBusService, Message
|
||||
sbs = ServiceBusService(service_namespace, account_key, 'owner')
|
||||
msg = Message('Hello World!')
|
||||
sbs.send_topic_message('taskdiscussion', msg)
|
||||
```
|
||||
|
||||
A client can then create a subscription and start consuming messages by calling the **create__subscription** method followed by the **receive__subscription__message** method. Please note that any messages sent before the subscription is created will not be received.
|
||||
A client can then create a subscription and start consuming messages by calling the **create\_subscription** method followed by the **receive\_subscription\_message** method. Please note that any messages sent before the subscription is created will not be received.
|
||||
|
||||
```Python
|
||||
from azure.servicebus import ServiceBusService
|
||||
sbs = ServiceBusService(service_namespace, account_key)
|
||||
from azure.servicebus import ServiceBusService, Message
|
||||
sbs = ServiceBusService(service_namespace, account_key, 'owner')
|
||||
sbs.create_subscription('taskdiscussion', 'client1')
|
||||
msg = Message('Hello World!')
|
||||
sbs.send_topic_message('taskdiscussion', msg)
|
||||
msg = sbs.receive_subscription_message('taskdiscussion', 'client1')
|
||||
```
|
||||
|
||||
|
|
|
@ -43,12 +43,12 @@ _ERROR_CANNOT_FIND_PARTITION_KEY = 'Cannot find partition key in request.'
|
|||
_ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.'
|
||||
_ERROR_INCORRECT_TABLE_IN_BATCH = 'Table should be the same in a batch operations'
|
||||
_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = 'Partition Key should be the same in a batch operations'
|
||||
_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = 'Partition Key should be the same in a batch operations'
|
||||
_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = 'Row Keys should not be the same in a batch operations'
|
||||
_ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail'
|
||||
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = 'Message is not peek locked and cannot be deleted.'
|
||||
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = 'Message is not peek locked and cannot be unlocked.'
|
||||
_ERROR_QUEUE_NOT_FOUND = 'Queue is not Found'
|
||||
_ERROR_TOPIC_NOT_FOUND = 'Topic is not Found'
|
||||
_ERROR_QUEUE_NOT_FOUND = 'Queue was not found'
|
||||
_ERROR_TOPIC_NOT_FOUND = 'Topic was not found'
|
||||
_ERROR_CONFLICT = 'Conflict'
|
||||
_ERROR_NOT_FOUND = 'Not found'
|
||||
_ERROR_UNKNOWN = 'Unknown error (%s)'
|
||||
|
@ -58,6 +58,8 @@ _ERROR_ACCESS_POLICY = 'share_access_policy must be either SignedIdentifier or A
|
|||
_ERROR_VALUE_SHOULD_NOT_BE_NULL = '%s should not be None.'
|
||||
_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = 'Cannot serialize the specified value (%s) to an entity. Please use an EntityProperty (which can specify custom types), int, str, bool, or datetime'
|
||||
|
||||
METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata'
|
||||
|
||||
class WindowsAzureData(object):
|
||||
''' This is the base of data class. It is only used to check whether it is instance or not. '''
|
||||
pass
|
||||
|
@ -80,8 +82,11 @@ class WindowsAzureMissingResourceError(WindowsAzureError):
|
|||
self.message = message
|
||||
|
||||
class Feed:
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
pass
|
||||
|
||||
class HeaderDict(dict):
|
||||
def __getitem__(self, index):
|
||||
return super(HeaderDict, self).__getitem__(index.lower())
|
||||
|
||||
def _get_readable_id(id_name):
|
||||
"""simplified an id to be more friendly for us people"""
|
||||
|
@ -97,6 +102,9 @@ def _get_entry_properties(xmlstr, include_id):
|
|||
properties = {}
|
||||
|
||||
for entry in _get_child_nodes(xmldoc, 'entry'):
|
||||
etag = entry.getAttributeNS(METADATA_NS, 'etag')
|
||||
if etag:
|
||||
properties['etag'] = etag
|
||||
for updated in _get_child_nodes(entry, 'updated'):
|
||||
properties['updated'] = updated.firstChild.nodeValue
|
||||
for name in _get_children_from_path(entry, 'author', 'name'):
|
||||
|
@ -109,6 +117,14 @@ def _get_entry_properties(xmlstr, include_id):
|
|||
|
||||
return properties
|
||||
|
||||
def _get_first_child_node_value(parent_node, node_name):
|
||||
xml_attrs = _get_child_nodes(parent_node, node_name)
|
||||
if xml_attrs:
|
||||
xml_attr = xml_attrs[0]
|
||||
if xml_attr.firstChild:
|
||||
value = xml_attr.firstChild.nodeValue
|
||||
return value
|
||||
|
||||
def _get_child_nodes(node, tagName):
|
||||
return [childNode for childNode in node.getElementsByTagName(tagName)
|
||||
if childNode.parentNode == node]
|
||||
|
@ -142,7 +158,7 @@ def _create_entry(entry_body):
|
|||
updated_str += '+00:00'
|
||||
|
||||
entry_start = '''<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom">
|
||||
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom" >
|
||||
<title /><updated>{updated}</updated><author><name /></author><id />
|
||||
<content type="application/xml">
|
||||
{body}</content></entry>'''
|
||||
|
@ -242,9 +258,23 @@ def _clone_node_with_namespaces(node_to_clone, original_doc):
|
|||
return clone
|
||||
|
||||
def _convert_response_to_feeds(response, convert_func):
|
||||
feeds = []
|
||||
if response is None:
|
||||
return None
|
||||
|
||||
feeds = _list_of(Feed)
|
||||
|
||||
x_ms_continuation = HeaderDict()
|
||||
for name, value in response.headers:
|
||||
if 'x-ms-continuation' in name:
|
||||
x_ms_continuation[name[len('x-ms-continuation')+1:]] = value
|
||||
if x_ms_continuation:
|
||||
setattr(feeds, 'x_ms_continuation', x_ms_continuation)
|
||||
|
||||
xmldoc = minidom.parseString(response.body)
|
||||
for xml_entry in _get_children_from_path(xmldoc, 'feed', 'entry'):
|
||||
xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry')
|
||||
if not xml_entries:
|
||||
xml_entries = _get_children_from_path(xmldoc, 'entry') #in some cases, response contains only entry but no feed
|
||||
for xml_entry in xml_entries:
|
||||
new_node = _clone_node_with_namespaces(xml_entry, xmldoc)
|
||||
feeds.append(convert_func(new_node.toxml()))
|
||||
|
||||
|
@ -254,16 +284,19 @@ def _validate_not_none(param_name, param):
|
|||
if param is None:
|
||||
raise TypeError(_ERROR_VALUE_SHOULD_NOT_BE_NULL % (param_name))
|
||||
|
||||
def _html_encode(html):
|
||||
ch_map = (('&', '&'), ('<', '<'), ('>', '>'), ('"', '"'), ('\'', '&apos'))
|
||||
for name, value in ch_map:
|
||||
html = html.replace(name, value)
|
||||
return html
|
||||
|
||||
def _fill_list_of(xmldoc, element_type):
|
||||
xmlelements = _get_child_nodes(xmldoc, element_type.__name__)
|
||||
return [_parse_response_body(xmlelement.toxml(), element_type) for xmlelement in xmlelements]
|
||||
|
||||
def _fill_dict(xmldoc, element_name):
|
||||
xmlelements = _get_child_nodes(xmldoc, element_name)
|
||||
if xmlelements:
|
||||
return_obj = {}
|
||||
for child in xmlelements[0].childNodes:
|
||||
if child.firstChild:
|
||||
return_obj[child.nodeName] = child.firstChild.nodeValue
|
||||
return return_obj
|
||||
|
||||
def _fill_instance_child(xmldoc, element_name, return_type):
|
||||
'''Converts a child of the current dom element to the specified type. The child name
|
||||
'''
|
||||
|
@ -272,7 +305,10 @@ def _fill_instance_child(xmldoc, element_name, return_type):
|
|||
if not xmlelements:
|
||||
return None
|
||||
|
||||
return _fill_instance_element(xmlelements[0], return_type)
|
||||
return_obj = return_type()
|
||||
_fill_data_to_return_object(xmlelements[0], return_obj)
|
||||
|
||||
return return_obj
|
||||
|
||||
def _fill_instance_element(element, return_type):
|
||||
"""Converts a DOM element into the specified object"""
|
||||
|
@ -367,6 +403,19 @@ def _parse_response(response, return_type):
|
|||
'''
|
||||
return _parse_response_body(response.body, return_type)
|
||||
|
||||
def _fill_data_to_return_object(node, return_obj):
|
||||
for name, value in vars(return_obj).iteritems():
|
||||
if isinstance(value, _list_of):
|
||||
setattr(return_obj, name, _fill_list_of(node, value.list_type))
|
||||
elif isinstance(value, WindowsAzureData):
|
||||
setattr(return_obj, name, _fill_instance_child(node, name, value.__class__))
|
||||
elif isinstance(value, dict):
|
||||
setattr(return_obj, name, _fill_dict(node, _get_serialization_name(name)))
|
||||
else:
|
||||
value = _fill_data_minidom(node, name, value)
|
||||
if value is not None:
|
||||
setattr(return_obj, name, value)
|
||||
|
||||
def _parse_response_body(respbody, return_type):
|
||||
'''
|
||||
parse the xml and fill all the data into a class of return_type
|
||||
|
@ -374,15 +423,7 @@ def _parse_response_body(respbody, return_type):
|
|||
doc = minidom.parseString(respbody)
|
||||
return_obj = return_type()
|
||||
for node in _get_child_nodes(doc, return_type.__name__):
|
||||
for name, value in vars(return_obj).iteritems():
|
||||
if isinstance(value, _list_of):
|
||||
setattr(return_obj, name, _fill_list_of(node, value.list_type))
|
||||
elif isinstance(value, WindowsAzureData):
|
||||
setattr(return_obj, name, _fill_instance_child(node, name, value.__class__))
|
||||
else:
|
||||
value = _fill_data_minidom(node, name, value)
|
||||
if value is not None:
|
||||
setattr(return_obj, name, value)
|
||||
_fill_data_to_return_object(node, return_obj)
|
||||
|
||||
return return_obj
|
||||
|
||||
|
@ -446,11 +487,12 @@ def _dont_fail_not_exist(error):
|
|||
|
||||
def _parse_response_for_dict(response):
|
||||
''' Extracts name-values from response header. Filter out the standard http headers.'''
|
||||
|
||||
|
||||
if response is None:
|
||||
return None
|
||||
http_headers = ['server', 'date', 'location', 'host',
|
||||
'via', 'proxy-connection', 'x-ms-version', 'connection',
|
||||
'content-length']
|
||||
return_dict = {}
|
||||
'via', 'proxy-connection', 'connection']
|
||||
return_dict = HeaderDict()
|
||||
if response.headers:
|
||||
for name, value in response.headers:
|
||||
if not name.lower() in http_headers:
|
||||
|
@ -461,6 +503,8 @@ def _parse_response_for_dict(response):
|
|||
def _parse_response_for_dict_prefix(response, prefix):
|
||||
''' Extracts name-values for names starting with prefix from response header. Filter out the standard http headers.'''
|
||||
|
||||
if response is None:
|
||||
return None
|
||||
return_dict = {}
|
||||
orig_dict = _parse_response_for_dict(response)
|
||||
if orig_dict:
|
||||
|
@ -475,6 +519,8 @@ def _parse_response_for_dict_prefix(response, prefix):
|
|||
|
||||
def _parse_response_for_dict_filter(response, filter):
|
||||
''' Extracts name-values for names in filter from response header. Filter out the standard http headers.'''
|
||||
if response is None:
|
||||
return None
|
||||
return_dict = {}
|
||||
orig_dict = _parse_response_for_dict(response)
|
||||
if orig_dict:
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
HTTP_RESPONSE_NO_CONTENT = 204
|
||||
|
||||
class HTTPError(Exception):
|
||||
''' HTTP Exception when response status code >= 300 '''
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
from ctypes import c_void_p, c_long, c_ulong, c_longlong, c_ulonglong, c_short, c_ushort, c_wchar_p, c_byte
|
||||
from ctypes import c_void_p, c_long, c_ulong, c_longlong, c_ulonglong, c_short, c_ushort, c_wchar_p, c_byte, c_size_t
|
||||
from ctypes import byref, Structure, Union, POINTER, WINFUNCTYPE, HRESULT, oledll, WinDLL, cast, create_string_buffer
|
||||
import ctypes
|
||||
import urllib2
|
||||
|
@ -41,11 +41,23 @@ _ole32 = oledll.ole32
|
|||
_oleaut32 = WinDLL('oleaut32')
|
||||
_CLSIDFromString = _ole32.CLSIDFromString
|
||||
_CoInitialize = _ole32.CoInitialize
|
||||
_CoInitialize.argtypes = [c_void_p]
|
||||
|
||||
_CoCreateInstance = _ole32.CoCreateInstance
|
||||
|
||||
_SysAllocString = _oleaut32.SysAllocString
|
||||
_SysAllocString.restype = c_void_p
|
||||
_SysAllocString.argtypes = [c_wchar_p]
|
||||
|
||||
_SysFreeString = _oleaut32.SysFreeString
|
||||
_SysFreeString.argtypes = [c_void_p]
|
||||
|
||||
_SafeArrayDestroy = _oleaut32.SafeArrayDestroy
|
||||
_SafeArrayDestroy.argtypes = [c_void_p]
|
||||
|
||||
_CoTaskMemAlloc = _ole32.CoTaskMemAlloc
|
||||
_CoTaskMemAlloc.restype = c_void_p
|
||||
_CoTaskMemAlloc.argtypes = [c_size_t]
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
class BSTR(c_wchar_p):
|
||||
|
@ -215,17 +227,7 @@ class _WinHttpRequest(c_void_p):
|
|||
status_text = bstr_status_text.value
|
||||
_SysFreeString(bstr_status_text)
|
||||
return status_text
|
||||
|
||||
def response_text(self):
|
||||
''' Gets response body as text. '''
|
||||
|
||||
bstr_resptext = c_void_p()
|
||||
_WinHttpRequest._ResponseText(self, byref(bstr_resptext))
|
||||
bstr_resptext = ctypes.cast(bstr_resptext, c_wchar_p)
|
||||
resptext = bstr_resptext.value
|
||||
_SysFreeString(bstr_resptext)
|
||||
return resptext
|
||||
|
||||
|
||||
def response_body(self):
|
||||
'''
|
||||
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str. If it is an xml
|
||||
|
@ -283,7 +285,7 @@ class _HTTPConnection:
|
|||
self.protocol = protocol
|
||||
clsid = GUID('{2087C2F4-2CEF-4953-A8AB-66779B670495}')
|
||||
iid = GUID('{016FE2EC-B2C8-45F8-B23B-39E53A75396B}')
|
||||
_CoInitialize(0)
|
||||
_CoInitialize(None)
|
||||
_CoCreateInstance(byref(clsid), 0, 1, byref(iid), byref(self._httprequest))
|
||||
|
||||
def putrequest(self, method, uri):
|
||||
|
@ -330,7 +332,7 @@ class _HTTPConnection:
|
|||
for resp_header in fixed_headers:
|
||||
if ':' in resp_header:
|
||||
pos = resp_header.find(':')
|
||||
headers.append((resp_header[:pos], resp_header[pos+1:].strip()))
|
||||
headers.append((resp_header[:pos].lower(), resp_header[pos+1:].strip()))
|
||||
|
||||
body = self._httprequest.response_body()
|
||||
length = len(body)
|
||||
|
|
|
@ -23,10 +23,10 @@ from datetime import datetime
|
|||
|
||||
from azure.http import HTTPError
|
||||
from azure import (WindowsAzureError, WindowsAzureData,
|
||||
_create_entry, _get_entry_properties, _html_encode,
|
||||
_create_entry, _get_entry_properties, xml_escape,
|
||||
_get_child_nodes, WindowsAzureMissingResourceError,
|
||||
WindowsAzureConflictError, _get_serialization_name,
|
||||
_get_children_from_path)
|
||||
_get_children_from_path, _get_first_child_node_value)
|
||||
import azure
|
||||
|
||||
#default rule name for subscription
|
||||
|
@ -47,52 +47,90 @@ XML_SCHEMA_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
|
|||
class Queue(WindowsAzureData):
|
||||
''' Queue class corresponding to Queue Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780773'''
|
||||
|
||||
def __init__(self):
|
||||
self.lock_duration = None
|
||||
self.max_size_in_megabytes = None
|
||||
self.duplicate_detection = None
|
||||
self.requires_duplicate_detection = None
|
||||
self.requires_session = None
|
||||
self.default_message_time_to_live = None
|
||||
self.enable_dead_lettering_on_message_expiration = None
|
||||
self.duplicate_detection_history_time_window = None
|
||||
self.max_delivery_count = None
|
||||
self.enable_batched_operations = None
|
||||
self.size_in_bytes = None
|
||||
self.message_count = None
|
||||
def __init__(self,
|
||||
lock_duration=None,
|
||||
max_size_in_megabytes=None,
|
||||
requires_duplicate_detection=None,
|
||||
requires_session=None,
|
||||
default_message_time_to_live=None,
|
||||
dead_lettering_on_message_expiration=None,
|
||||
duplicate_detection_history_time_window=None,
|
||||
max_delivery_count=None,
|
||||
enable_batched_operations=None,
|
||||
size_in_bytes=None,
|
||||
message_count=None):
|
||||
|
||||
self.lock_duration = lock_duration
|
||||
self.max_size_in_megabytes = max_size_in_megabytes
|
||||
self.requires_duplicate_detection = requires_duplicate_detection
|
||||
self.requires_session = requires_session
|
||||
self.default_message_time_to_live = default_message_time_to_live
|
||||
self.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration
|
||||
self.duplicate_detection_history_time_window = duplicate_detection_history_time_window
|
||||
self.max_delivery_count = max_delivery_count
|
||||
self.enable_batched_operations = enable_batched_operations
|
||||
self.size_in_bytes = size_in_bytes
|
||||
self.message_count = message_count
|
||||
|
||||
class Topic(WindowsAzureData):
|
||||
''' Topic class corresponding to Topic Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780749. '''
|
||||
|
||||
def __init__(self):
|
||||
self.default_message_time_to_live = None
|
||||
self.max_size_in_mega_bytes = None
|
||||
self.requires_duplicate_detection = None
|
||||
self.duplicate_detection_history_time_window = None
|
||||
self.enable_batched_operations = None
|
||||
self.size_in_bytes = None
|
||||
def __init__(self,
|
||||
default_message_time_to_live=None,
|
||||
max_size_in_megabytes=None,
|
||||
requires_duplicate_detection=None,
|
||||
duplicate_detection_history_time_window=None,
|
||||
enable_batched_operations=None,
|
||||
size_in_bytes=None):
|
||||
|
||||
self.default_message_time_to_live = default_message_time_to_live
|
||||
self.max_size_in_megabytes = max_size_in_megabytes
|
||||
self.requires_duplicate_detection = requires_duplicate_detection
|
||||
self.duplicate_detection_history_time_window = duplicate_detection_history_time_window
|
||||
self.enable_batched_operations = enable_batched_operations
|
||||
self.size_in_bytes = size_in_bytes
|
||||
|
||||
@property
|
||||
def max_size_in_mega_bytes(self):
|
||||
import warnings
|
||||
warnings.warn('This attribute has been changed to max_size_in_megabytes.')
|
||||
return self.max_size_in_megabytes
|
||||
|
||||
@max_size_in_mega_bytes.setter
|
||||
def max_size_in_mega_bytes(self, value):
|
||||
self.max_size_in_megabytes = value
|
||||
|
||||
|
||||
class Subscription(WindowsAzureData):
|
||||
''' Subscription class corresponding to Subscription Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780763. '''
|
||||
|
||||
def __init__(self):
|
||||
self.lock_duration = None
|
||||
self.requires_session = None
|
||||
self.default_message_time_to_live = None
|
||||
self.dead_lettering_on_message_expiration = None
|
||||
self.dead_lettering_on_filter_evaluation_exceptions = None
|
||||
self.enable_batched_operations = None
|
||||
self.max_delivery_count = None
|
||||
self.message_count = None
|
||||
def __init__(self,
|
||||
lock_duration=None,
|
||||
requires_session=None,
|
||||
default_message_time_to_live=None,
|
||||
dead_lettering_on_message_expiration=None,
|
||||
dead_lettering_on_filter_evaluation_exceptions=None,
|
||||
enable_batched_operations=None,
|
||||
max_delivery_count=None,
|
||||
message_count=None):
|
||||
|
||||
self.lock_duration = lock_duration
|
||||
self.requires_session = requires_session
|
||||
self.default_message_time_to_live = default_message_time_to_live
|
||||
self.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration
|
||||
self.dead_lettering_on_filter_evaluation_exceptions = dead_lettering_on_filter_evaluation_exceptions
|
||||
self.enable_batched_operations = enable_batched_operations
|
||||
self.max_delivery_count = max_delivery_count
|
||||
self.message_count = message_count
|
||||
|
||||
class Rule(WindowsAzureData):
|
||||
''' Rule class corresponding to Rule Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780753. '''
|
||||
|
||||
def __init__(self):
|
||||
self.filter_type = ''
|
||||
self.filter_expression = ''
|
||||
self.action_type = ''
|
||||
self.action_expression = ''
|
||||
def __init__(self, filter_type=None, filter_expression=None, action_type=None, action_expression=None):
|
||||
self.filter_type = filter_type
|
||||
self.filter_expression = filter_expression
|
||||
self.action_type = action_type
|
||||
self.action_expression = action_type
|
||||
|
||||
class Message(WindowsAzureData):
|
||||
''' Message class that used in send message/get mesage apis. '''
|
||||
|
@ -156,7 +194,7 @@ class Message(WindowsAzureData):
|
|||
elif isinstance(value, datetime):
|
||||
request.headers.append((name, '"' + value.strftime('%a, %d %b %Y %H:%M:%S GMT') + '"'))
|
||||
else:
|
||||
request.headers.append((name, str(value)))
|
||||
request.headers.append((name, str(value).lower()))
|
||||
|
||||
# Adds content-type
|
||||
request.headers.append(('Content-Type', self.type))
|
||||
|
@ -268,11 +306,23 @@ def _create_message(response, service_instance):
|
|||
message_location = value
|
||||
elif name.lower() not in ['content-type', 'brokerproperties', 'transfer-encoding', 'server', 'location', 'date']:
|
||||
if '"' in value:
|
||||
custom_properties[name] = value[1:-1]
|
||||
else:
|
||||
custom_properties[name] = value
|
||||
value = value[1:-1]
|
||||
try:
|
||||
custom_properties[name] = datetime.strptime(value, '%a, %d %b %Y %H:%M:%S GMT')
|
||||
except ValueError:
|
||||
custom_properties[name] = value
|
||||
else: #only int, float or boolean
|
||||
if value.lower() == 'true':
|
||||
custom_properties[name] = True
|
||||
elif value.lower() == 'false':
|
||||
custom_properties[name] = False
|
||||
elif str(int(float(value))) == value: #int('3.1') doesn't work so need to get float('3.14') first
|
||||
custom_properties[name] = int(value)
|
||||
else:
|
||||
custom_properties[name] = float(value)
|
||||
|
||||
if message_type == None:
|
||||
message = Message(respbody, service_instance, message_location, custom_properties, broker_properties)
|
||||
message = Message(respbody, service_instance, message_location, custom_properties, 'application/atom+xml;type=entry;charset=utf-8', broker_properties)
|
||||
else:
|
||||
message = Message(respbody, service_instance, message_location, custom_properties, message_type, broker_properties)
|
||||
return message
|
||||
|
@ -332,18 +382,6 @@ def _parse_bool(value):
|
|||
return True
|
||||
return False
|
||||
|
||||
|
||||
_QUEUE_CONVERSION = {
|
||||
'MaxSizeInMegaBytes': int,
|
||||
'RequiresGroupedReceives': _parse_bool,
|
||||
'SupportsDuplicateDetection': _parse_bool,
|
||||
'SizeinBytes': int,
|
||||
'MessageCount': int,
|
||||
'EnableBatchedOperations': _parse_bool,
|
||||
'RequiresSession': _parse_bool,
|
||||
'LockDuration': int,
|
||||
}
|
||||
|
||||
def _convert_xml_to_queue(xmlstr):
|
||||
''' Converts xml response to queue object.
|
||||
|
||||
|
@ -363,18 +401,51 @@ def _convert_xml_to_queue(xmlstr):
|
|||
|
||||
invalid_queue = True
|
||||
#get node for each attribute in Queue class, if nothing found then the response is not valid xml for Queue.
|
||||
for queue_desc in _get_children_from_path(xmldoc, 'entry', 'content', 'QueueDescription'):
|
||||
for attr_name, attr_value in vars(queue).iteritems():
|
||||
xml_attrs = _get_child_nodes(queue_desc, _get_serialization_name(attr_name))
|
||||
if xml_attrs:
|
||||
xml_attr = xml_attrs[0]
|
||||
if xml_attr.firstChild:
|
||||
value = xml_attr.firstChild.nodeValue
|
||||
conversion = _QUEUE_CONVERSION.get(attr_name)
|
||||
if conversion is not None:
|
||||
value = conversion(value)
|
||||
setattr(queue, attr_name, value)
|
||||
invalid_queue = False
|
||||
for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'QueueDescription'):
|
||||
node_value = _get_first_child_node_value(desc, 'LockDuration')
|
||||
if node_value is not None:
|
||||
queue.lock_duration = node_value
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes')
|
||||
if node_value is not None:
|
||||
queue.max_size_in_megabytes = int(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'RequiresDuplicateDetection')
|
||||
if node_value is not None:
|
||||
queue.requires_duplicate_detection = _parse_bool(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'RequiresSession')
|
||||
if node_value is not None:
|
||||
queue.requires_session = _parse_bool(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'DefaultMessageTimeToLive')
|
||||
if node_value is not None:
|
||||
queue.default_message_time_to_live = node_value
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'DeadLetteringOnMessageExpiration')
|
||||
if node_value is not None:
|
||||
queue.dead_lettering_on_message_expiration = _parse_bool(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'DuplicateDetectionHistoryTimeWindow')
|
||||
if node_value is not None:
|
||||
queue.duplicate_detection_history_time_window = node_value
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'EnableBatchedOperations')
|
||||
if node_value is not None:
|
||||
queue.enable_batched_operations = _parse_bool(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount')
|
||||
if node_value is not None:
|
||||
queue.max_delivery_count = int(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'MessageCount')
|
||||
if node_value is not None:
|
||||
queue.message_count = int(node_value)
|
||||
invalid_queue = False
|
||||
node_value = _get_first_child_node_value(desc, 'SizeInBytes')
|
||||
if node_value is not None:
|
||||
queue.size_in_bytes = int(node_value)
|
||||
invalid_queue = False
|
||||
|
||||
if invalid_queue:
|
||||
raise WindowsAzureError(azure._ERROR_QUEUE_NOT_FOUND)
|
||||
|
@ -388,12 +459,6 @@ def _convert_xml_to_queue(xmlstr):
|
|||
def _convert_response_to_topic(response):
|
||||
return _convert_xml_to_topic(response.body)
|
||||
|
||||
_TOPIC_CONVERSION = {
|
||||
'MaxSizeInMegaBytes': int,
|
||||
'RequiresDuplicateDetection': _parse_bool,
|
||||
'DeadLetteringOnFilterEvaluationExceptions': _parse_bool
|
||||
}
|
||||
|
||||
def _convert_xml_to_topic(xmlstr):
|
||||
'''Converts xml response to topic
|
||||
|
||||
|
@ -402,7 +467,7 @@ def _convert_xml_to_topic(xmlstr):
|
|||
<content type='application/xml'>
|
||||
<TopicDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">
|
||||
<DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive>
|
||||
<MaxSizeInMegaBytes>1024</MaxSizeInMegaBytes>
|
||||
<MaxSizeInMegabytes>1024</MaxSizeInMegabytes>
|
||||
<RequiresDuplicateDetection>false</RequiresDuplicateDetection>
|
||||
<DuplicateDetectionHistoryTimeWindow>P7D</DuplicateDetectionHistoryTimeWindow>
|
||||
<DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions>
|
||||
|
@ -414,20 +479,34 @@ def _convert_xml_to_topic(xmlstr):
|
|||
topic = Topic()
|
||||
|
||||
invalid_topic = True
|
||||
|
||||
#get node for each attribute in Topic class, if nothing found then the response is not valid xml for Topic.
|
||||
for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'TopicDescription'):
|
||||
invalid_topic = True
|
||||
for attr_name, attr_value in vars(topic).iteritems():
|
||||
xml_attrs = _get_child_nodes(desc, _get_serialization_name(attr_name))
|
||||
if xml_attrs:
|
||||
xml_attr = xml_attrs[0]
|
||||
if xml_attr.firstChild:
|
||||
value = xml_attr.firstChild.nodeValue
|
||||
conversion = _TOPIC_CONVERSION.get(attr_name)
|
||||
if conversion is not None:
|
||||
value = conversion(value)
|
||||
setattr(topic, attr_name, value)
|
||||
invalid_topic = False
|
||||
node_value = _get_first_child_node_value(desc, 'DefaultMessageTimeToLive')
|
||||
if node_value is not None:
|
||||
topic.default_message_time_to_live = node_value
|
||||
invalid_topic = False
|
||||
node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes')
|
||||
if node_value is not None:
|
||||
topic.max_size_in_megabytes = int(node_value)
|
||||
invalid_topic = False
|
||||
node_value = _get_first_child_node_value(desc, 'RequiresDuplicateDetection')
|
||||
if node_value is not None:
|
||||
topic.requires_duplicate_detection = _parse_bool(node_value)
|
||||
invalid_topic = False
|
||||
node_value = _get_first_child_node_value(desc, 'DuplicateDetectionHistoryTimeWindow')
|
||||
if node_value is not None:
|
||||
topic.duplicate_detection_history_time_window = node_value
|
||||
invalid_topic = False
|
||||
node_value = _get_first_child_node_value(desc, 'EnableBatchedOperations')
|
||||
if node_value is not None:
|
||||
topic.enable_batched_operations = _parse_bool(node_value)
|
||||
invalid_topic = False
|
||||
node_value = _get_first_child_node_value(desc, 'SizeInBytes')
|
||||
if node_value is not None:
|
||||
topic.size_in_bytes = int(node_value)
|
||||
invalid_topic = False
|
||||
|
||||
if invalid_topic:
|
||||
raise WindowsAzureError(azure._ERROR_TOPIC_NOT_FOUND)
|
||||
|
@ -440,15 +519,6 @@ def _convert_xml_to_topic(xmlstr):
|
|||
def _convert_response_to_subscription(response):
|
||||
return _convert_xml_to_subscription(response.body)
|
||||
|
||||
_SUBSCRIPTION_CONVERSION = {
|
||||
'RequiresSession' : _parse_bool,
|
||||
'DeadLetteringOnMessageExpiration': _parse_bool,
|
||||
'DefaultMessageTimeToLive': int,
|
||||
'EnableBatchedOperations': _parse_bool,
|
||||
'MaxDeliveryCount': int,
|
||||
'MessageCount': int,
|
||||
}
|
||||
|
||||
def _convert_xml_to_subscription(xmlstr):
|
||||
'''Converts xml response to subscription
|
||||
|
||||
|
@ -467,18 +537,31 @@ def _convert_xml_to_subscription(xmlstr):
|
|||
xmldoc = minidom.parseString(xmlstr)
|
||||
subscription = Subscription()
|
||||
|
||||
for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'subscriptiondescription'):
|
||||
for attr_name, attr_value in vars(subscription).iteritems():
|
||||
tag_name = attr_name.replace('_', '')
|
||||
xml_attrs = _get_child_nodes(desc, tag_name)
|
||||
if xml_attrs:
|
||||
xml_attr = xml_attrs[0]
|
||||
if xml_attr.firstChild:
|
||||
value = xml_attr.firstChild.nodeValue
|
||||
conversion = _SUBSCRIPTION_CONVERSION.get(attr_name)
|
||||
if conversion is not None:
|
||||
value = conversion(value)
|
||||
setattr(subscription, attr_name, value)
|
||||
for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'SubscriptionDescription'):
|
||||
node_value = _get_first_child_node_value(desc, 'LockDuration')
|
||||
if node_value is not None:
|
||||
subscription.lock_duration = node_value
|
||||
node_value = _get_first_child_node_value(desc, 'RequiresSession')
|
||||
if node_value is not None:
|
||||
subscription.requires_session = _parse_bool(node_value)
|
||||
node_value = _get_first_child_node_value(desc, 'DefaultMessageTimeToLive')
|
||||
if node_value is not None:
|
||||
subscription.default_message_time_to_live = node_value
|
||||
node_value = _get_first_child_node_value(desc, 'DeadLetteringOnFilterEvaluationExceptions')
|
||||
if node_value is not None:
|
||||
subscription.dead_lettering_on_filter_evaluation_exceptions = _parse_bool(node_value)
|
||||
node_value = _get_first_child_node_value(desc, 'DeadLetteringOnMessageExpiration')
|
||||
if node_value is not None:
|
||||
subscription.dead_lettering_on_message_expiration = _parse_bool(node_value)
|
||||
node_value = _get_first_child_node_value(desc, 'EnableBatchedOperations')
|
||||
if node_value is not None:
|
||||
subscription.enable_batched_operations = _parse_bool(node_value)
|
||||
node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount')
|
||||
if node_value is not None:
|
||||
subscription.max_delivery_count = int(node_value)
|
||||
node_value = _get_first_child_node_value(desc, 'MessageCount')
|
||||
if node_value is not None:
|
||||
subscription.message_count = int(node_value)
|
||||
|
||||
for name, value in _get_entry_properties(xmlstr, True).iteritems():
|
||||
setattr(subscription, name, value)
|
||||
|
@ -496,21 +579,21 @@ def convert_subscription_to_xml(subscription):
|
|||
subscription_body = '<SubscriptionDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">'
|
||||
if subscription:
|
||||
if subscription.lock_duration is not None:
|
||||
subscription_body += ''.join(['<LockDuration>', subscription.lock_duration, '</LockDuration>'])
|
||||
subscription_body += ''.join(['<LockDuration>', str(subscription.lock_duration), '</LockDuration>'])
|
||||
if subscription.requires_session is not None:
|
||||
subscription_body += ''.join(['<RequiresSession>', subscription.requires_session, '</RequiresSession>'])
|
||||
subscription_body += ''.join(['<RequiresSession>', str(subscription.requires_session).lower(), '</RequiresSession>'])
|
||||
if subscription.default_message_time_to_live is not None:
|
||||
subscription_body += ''.join(['<DefaultMessageTimeToLive>', subscription.default_message_time_to_live, '</DefaultMessageTimeToLive>'])
|
||||
subscription_body += ''.join(['<DefaultMessageTimeToLive>', str(subscription.default_message_time_to_live), '</DefaultMessageTimeToLive>'])
|
||||
if subscription.dead_lettering_on_message_expiration is not None:
|
||||
subscription_body += ''.join(['<DeadLetteringOnMessageExpiration>', subscription.dead_lettering_on_message_expiration, '</DeadLetteringOnMessageExpiration>'])
|
||||
subscription_body += ''.join(['<DeadLetteringOnMessageExpiration>', str(subscription.dead_lettering_on_message_expiration).lower(), '</DeadLetteringOnMessageExpiration>'])
|
||||
if subscription.dead_lettering_on_filter_evaluation_exceptions is not None:
|
||||
subscription_body += ''.join(['<DeadLetteringOnFilterEvaluationExceptions>', subscription.dead_lettering_on_filter_evaluation_exceptions, '</DeadLetteringOnFilterEvaluationExceptions>'])
|
||||
subscription_body += ''.join(['<DeadLetteringOnFilterEvaluationExceptions>', str(subscription.dead_lettering_on_filter_evaluation_exceptions).lower(), '</DeadLetteringOnFilterEvaluationExceptions>'])
|
||||
if subscription.enable_batched_operations is not None:
|
||||
subscription_body += ''.join(['<EnableBatchedOperations>', subscription.enable_batched_operations, '</EnableBatchedOperations>'])
|
||||
subscription_body += ''.join(['<EnableBatchedOperations>', str(subscription.enable_batched_operations).lower(), '</EnableBatchedOperations>'])
|
||||
if subscription.max_delivery_count is not None:
|
||||
subscription_body += ''.join(['<MaxDeliveryCount>', subscription.max_delivery_count, '</MaxDeliveryCount>'])
|
||||
subscription_body += ''.join(['<MaxDeliveryCount>', str(subscription.max_delivery_count), '</MaxDeliveryCount>'])
|
||||
if subscription.message_count is not None:
|
||||
subscription_body += ''.join(['<MessageCount>', subscription.message_count, '</MessageCount>'])
|
||||
subscription_body += ''.join(['<MessageCount>', str(subscription.message_count), '</MessageCount>'])
|
||||
|
||||
subscription_body += '</SubscriptionDescription>'
|
||||
return _create_entry(subscription_body)
|
||||
|
@ -525,17 +608,18 @@ def convert_rule_to_xml(rule):
|
|||
rule_body = '<RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">'
|
||||
if rule:
|
||||
if rule.filter_type:
|
||||
rule_body += ''.join(['<Filter i:type="', _html_encode(rule.filter_type), '">'])
|
||||
rule_body += ''.join(['<Filter i:type="', xml_escape(rule.filter_type), '">'])
|
||||
if rule.filter_type == 'CorrelationFilter':
|
||||
rule_body += ''.join(['<CorrelationId>', _html_encode(rule.filter_expression), '</CorrelationId>'])
|
||||
rule_body += ''.join(['<CorrelationId>', xml_escape(rule.filter_expression), '</CorrelationId>'])
|
||||
else:
|
||||
rule_body += ''.join(['<SqlExpression>', _html_encode(rule.filter_expression), '</SqlExpression>'])
|
||||
rule_body += ''.join(['<SqlExpression>', xml_escape(rule.filter_expression), '</SqlExpression>'])
|
||||
rule_body += '<CompatibilityLevel>20</CompatibilityLevel>'
|
||||
rule_body += '</Filter>'
|
||||
if rule.action_type:
|
||||
rule_body += ''.join(['<Action i:type="', _html_encode(rule.action_type), '">'])
|
||||
if rule.action_type == 'SqlFilterAction':
|
||||
rule_body += ''.join(['<SqlExpression>', _html_encode(rule.action_expression), '</SqlExpression>'])
|
||||
rule_body += ''.join(['<Action i:type="', xml_escape(rule.action_type), '">'])
|
||||
if rule.action_type == 'SqlRuleAction':
|
||||
rule_body += ''.join(['<SqlExpression>', xml_escape(rule.action_expression), '</SqlExpression>'])
|
||||
rule_body += '<CompatibilityLevel>20</CompatibilityLevel>'
|
||||
rule_body += '</Action>'
|
||||
rule_body += '</RuleDescription>'
|
||||
|
||||
|
@ -553,16 +637,16 @@ def convert_topic_to_xml(topic):
|
|||
if topic:
|
||||
if topic.default_message_time_to_live is not None:
|
||||
topic_body += ''.join(['<DefaultMessageTimeToLive>', str(topic.default_message_time_to_live), '</DefaultMessageTimeToLive>'])
|
||||
if topic.max_size_in_mega_bytes is not None:
|
||||
if topic.max_size_in_megabytes is not None:
|
||||
topic_body += ''.join(['<MaxSizeInMegabytes>', str(topic.max_size_in_megabytes), '</MaxSizeInMegabytes>'])
|
||||
if topic.requires_duplicate_detection is not None:
|
||||
topic_body += ''.join(['<RequiresDuplicateDetection>', str(topic.requires_duplicate_detection), '</RequiresDuplicateDetection>'])
|
||||
topic_body += ''.join(['<RequiresDuplicateDetection>', str(topic.requires_duplicate_detection).lower(), '</RequiresDuplicateDetection>'])
|
||||
if topic.duplicate_detection_history_time_window is not None:
|
||||
topic_body += ''.join(['<DuplicateDetectionHistoryTimeWindow>', str(topic.duplicate_detection_history_time_window), '</DuplicateDetectionHistoryTimeWindow>'])
|
||||
if topic.enable_batched_operations is not None:
|
||||
topic_body += ''.join(['<EnableBatchedOperations>', str(topic.enable_batched_operations), '</EnableBatchedOperations>'])
|
||||
topic_body += ''.join(['<EnableBatchedOperations>', str(topic.enable_batched_operations).lower(), '</EnableBatchedOperations>'])
|
||||
if topic.size_in_bytes is not None:
|
||||
topic_body += ''.join(['<SizeinBytes>', str(topic.size_in_bytes), '</SizeinBytes>'])
|
||||
topic_body += ''.join(['<SizeInBytes>', str(topic.size_in_bytes), '</SizeInBytes>'])
|
||||
topic_body += '</TopicDescription>'
|
||||
|
||||
return _create_entry(topic_body)
|
||||
|
@ -581,21 +665,21 @@ def convert_queue_to_xml(queue):
|
|||
if queue.max_size_in_megabytes is not None:
|
||||
queue_body += ''.join(['<MaxSizeInMegabytes>', str(queue.max_size_in_megabytes), '</MaxSizeInMegabytes>'])
|
||||
if queue.requires_duplicate_detection is not None:
|
||||
queue_body += ''.join(['<RequiresDuplicateDetection>', str(queue.requires_duplicate_detection), '</RequiresDuplicateDetection>'])
|
||||
queue_body += ''.join(['<RequiresDuplicateDetection>', str(queue.requires_duplicate_detection).lower(), '</RequiresDuplicateDetection>'])
|
||||
if queue.requires_session is not None:
|
||||
queue_body += ''.join(['<RequiresSession>', str(queue.requires_session), '</RequiresSession>'])
|
||||
queue_body += ''.join(['<RequiresSession>', str(queue.requires_session).lower(), '</RequiresSession>'])
|
||||
if queue.default_message_time_to_live is not None:
|
||||
queue_body += ''.join(['<DefaultMessageTimeToLive>', str(queue.default_message_time_to_live), '</DefaultMessageTimeToLive>'])
|
||||
if queue.enable_dead_lettering_on_message_expiration is not None:
|
||||
queue_body += ''.join(['<EnableDeadLetteringOnMessageExpiration>', str(queue.enable_dead_lettering_on_message_expiration), '</EnableDeadLetteringOnMessageExpiration>'])
|
||||
if queue.dead_lettering_on_message_expiration is not None:
|
||||
queue_body += ''.join(['<DeadLetteringOnMessageExpiration>', str(queue.dead_lettering_on_message_expiration).lower(), '</DeadLetteringOnMessageExpiration>'])
|
||||
if queue.duplicate_detection_history_time_window is not None:
|
||||
queue_body += ''.join(['<DuplicateDetectionHistoryTimeWindow>', str(queue.duplicate_detection_history_time_window), '</DuplicateDetectionHistoryTimeWindow>'])
|
||||
if queue.max_delivery_count is not None:
|
||||
queue_body += ''.join(['<MaxDeliveryCount>', str(queue.max_delivery_count), '</MaxDeliveryCount>'])
|
||||
if queue.enable_batched_operations is not None:
|
||||
queue_body += ''.join(['<EnableBatchedOperations>', str(queue.enable_batched_operations), '</EnableBatchedOperations>'])
|
||||
queue_body += ''.join(['<EnableBatchedOperations>', str(queue.enable_batched_operations).lower(), '</EnableBatchedOperations>'])
|
||||
if queue.size_in_bytes is not None:
|
||||
queue_body += ''.join(['<SizeinBytes>', str(queue.size_in_bytes), '</SizeinBytes>'])
|
||||
queue_body += ''.join(['<SizeInBytes>', str(queue.size_in_bytes), '</SizeInBytes>'])
|
||||
if queue.message_count is not None:
|
||||
queue_body += ''.join(['<MessageCount>', str(queue.message_count), '</MessageCount>'])
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ import os
|
|||
import urllib2
|
||||
|
||||
from azure.http.httpclient import _HTTPClient
|
||||
from azure.http import HTTPError
|
||||
from azure.http import HTTPError, HTTP_RESPONSE_NO_CONTENT
|
||||
from azure.servicebus import (_update_service_bus_header, _create_message,
|
||||
convert_topic_to_xml, _convert_response_to_topic,
|
||||
convert_queue_to_xml, _convert_response_to_queue,
|
||||
|
@ -27,11 +27,11 @@ from azure.servicebus import (_update_service_bus_header, _create_message,
|
|||
_convert_xml_to_subscription, _convert_xml_to_rule,
|
||||
_service_bus_error_handler, AZURE_SERVICEBUS_NAMESPACE,
|
||||
AZURE_SERVICEBUS_ACCESS_KEY, AZURE_SERVICEBUS_ISSUER)
|
||||
from azure.http import HTTPRequest
|
||||
from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT
|
||||
from azure import (_validate_not_none, Feed,
|
||||
_convert_response_to_feeds, _str_or_none, _int_or_none,
|
||||
_get_request_body, _update_request_uri_query,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError,
|
||||
WindowsAzureError, _parse_response, _convert_class_to_xml,
|
||||
_parse_response_for_dict, _parse_response_for_dict_prefix,
|
||||
_parse_response_for_dict_filter,
|
||||
|
@ -699,7 +699,5 @@ class ServiceBusService:
|
|||
except HTTPError as e:
|
||||
return _service_bus_error_handler(e)
|
||||
|
||||
if not resp:
|
||||
return None
|
||||
return resp
|
||||
|
||||
|
|
|
@ -21,8 +21,8 @@ from xml.dom import minidom
|
|||
import types
|
||||
from datetime import datetime
|
||||
|
||||
from azure import (_create_entry,
|
||||
_get_entry_properties, _html_encode, WindowsAzureError,
|
||||
from azure import (_create_entry, METADATA_NS, _parse_response_for_dict,
|
||||
_get_entry_properties, WindowsAzureError,
|
||||
_get_child_nodes, _get_child_nodesNS,
|
||||
WindowsAzureConflictError,
|
||||
WindowsAzureMissingResourceError, _list_of,
|
||||
|
@ -51,10 +51,13 @@ class ContainerEnumResults(EnumResultsBase):
|
|||
def __init__(self):
|
||||
EnumResultsBase.__init__(self)
|
||||
self.containers = _list_of(Container)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.containers)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.containers)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.containers[index]
|
||||
|
||||
|
@ -65,7 +68,7 @@ class Container(WindowsAzureData):
|
|||
self.name = ''
|
||||
self.url = ''
|
||||
self.properties = Properties()
|
||||
self.metadata = Metadata()
|
||||
self.metadata = {}
|
||||
|
||||
class Properties(WindowsAzureData):
|
||||
''' Blob container's properties class. '''
|
||||
|
@ -74,29 +77,20 @@ class Properties(WindowsAzureData):
|
|||
self.last_modified = ''
|
||||
self.etag = ''
|
||||
|
||||
class Metadata(WindowsAzureData):
|
||||
''' Metadata class. '''
|
||||
|
||||
def __init__(self):
|
||||
self.metadata_name = ''
|
||||
|
||||
class RetentionPolicy(WindowsAzureData):
|
||||
''' RetentionPolicy in service properties. '''
|
||||
|
||||
def __init__(self):
|
||||
self.enabled = False
|
||||
self.__dict__['days'] = None
|
||||
|
||||
def get_days(self):
|
||||
|
||||
def get_days(self):
|
||||
#convert days to int value
|
||||
return int(self.__dict__['days'])
|
||||
|
||||
def set_days(self, value):
|
||||
''' set default days if days is set to empty. '''
|
||||
if value == '':
|
||||
self.__dict__['days'] = 10
|
||||
else:
|
||||
self.__dict__['days'] = value
|
||||
self.__dict__['days'] = value
|
||||
|
||||
days = property(fget=get_days, fset=set_days)
|
||||
|
||||
|
@ -143,10 +137,18 @@ class SignedIdentifier(WindowsAzureData):
|
|||
|
||||
class SignedIdentifiers(WindowsAzureData):
|
||||
''' SignedIdentifier list. '''
|
||||
|
||||
def __init__(self):
|
||||
self.signed_identifiers = _list_of(SignedIdentifier)
|
||||
self.signed_identifiers = _list_of(SignedIdentifier)
|
||||
|
||||
def __iter__(self):
|
||||
return self.signed_identifiers
|
||||
return iter(self.signed_identifiers)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.signed_identifiers)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.signed_identifiers[index]
|
||||
|
||||
class BlobEnumResults(EnumResultsBase):
|
||||
''' Blob list.'''
|
||||
|
@ -154,13 +156,24 @@ class BlobEnumResults(EnumResultsBase):
|
|||
def __init__(self):
|
||||
EnumResultsBase.__init__(self)
|
||||
self.blobs = _list_of(Blob)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.blobs)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.blobs)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.blobs[index]
|
||||
|
||||
class BlobResult(str):
|
||||
|
||||
def __new__(cls, blob, properties):
|
||||
return str.__new__(cls, blob)
|
||||
|
||||
def __init__(self, blob, properties):
|
||||
self.properties = properties
|
||||
|
||||
class Blob(WindowsAzureData):
|
||||
''' Blob class. '''
|
||||
|
||||
|
@ -169,7 +182,7 @@ class Blob(WindowsAzureData):
|
|||
self.snapshot = ''
|
||||
self.url = ''
|
||||
self.properties = BlobProperties()
|
||||
self.metadata = Metadata()
|
||||
self.metadata = {}
|
||||
self.blob_prefix = BlobPrefix()
|
||||
|
||||
class BlobProperties(WindowsAzureData):
|
||||
|
@ -202,20 +215,14 @@ class BlobBlock(WindowsAzureData):
|
|||
|
||||
class BlobBlockList(WindowsAzureData):
|
||||
''' BlobBlockList class '''
|
||||
|
||||
def __init__(self):
|
||||
self.committed_blocks = []
|
||||
self.uncommitted_blocks = []
|
||||
|
||||
class BlockList(WindowsAzureData):
|
||||
''' BlockList used to submit block list. '''
|
||||
|
||||
def __init__(self):
|
||||
self.committed = []
|
||||
self.uncommitted = []
|
||||
self.latest = []
|
||||
|
||||
class PageRange(WindowsAzureData):
|
||||
''' Page Range for page blob. '''
|
||||
|
||||
def __init__(self):
|
||||
self.start = 0
|
||||
self.end = 0
|
||||
|
@ -225,8 +232,15 @@ class PageList:
|
|||
|
||||
def __init__(self):
|
||||
self.page_ranges = _list_of(PageRange)
|
||||
|
||||
def __iter__(self):
|
||||
return self.page_ranges
|
||||
return iter(self.page_ranges)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.page_ranges)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.page_ranges[index]
|
||||
|
||||
class QueueEnumResults(EnumResultsBase):
|
||||
''' Queue list'''
|
||||
|
@ -234,10 +248,13 @@ class QueueEnumResults(EnumResultsBase):
|
|||
def __init__(self):
|
||||
EnumResultsBase.__init__(self)
|
||||
self.queues = _list_of(Queue)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.queues)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.queues)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.queues[index]
|
||||
|
||||
|
@ -247,17 +264,20 @@ class Queue(WindowsAzureData):
|
|||
def __init__(self):
|
||||
self.name = ''
|
||||
self.url = ''
|
||||
self.metadata = Metadata()
|
||||
self.metadata = {}
|
||||
|
||||
class QueueMessagesList(WindowsAzureData):
|
||||
''' Queue message list. '''
|
||||
|
||||
def __init__(self):
|
||||
self.queue_messages = _list_of(QueueMessage)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.queue_messages)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.queue_messages)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.queue_messages[index]
|
||||
|
||||
|
@ -273,17 +293,6 @@ class QueueMessage(WindowsAzureData):
|
|||
self.dequeue_count = ''
|
||||
self.message_text = ''
|
||||
|
||||
class TableEnumResult(EnumResultsBase):
|
||||
def __init__():
|
||||
EnumResultsBase.__init__(self)
|
||||
self.tables = _list_of(Table)
|
||||
def __iter__(self):
|
||||
return iter(self.tables)
|
||||
def __len__(self):
|
||||
return len(self.tables)
|
||||
def __getitem__(self, index):
|
||||
return self.tables[index]
|
||||
|
||||
class Entity(WindowsAzureData):
|
||||
''' Entity class. The attributes of entity will be created dynamically. '''
|
||||
pass
|
||||
|
@ -430,15 +439,18 @@ def _sign_storage_table_request(request, account_name, account_key):
|
|||
auth_string = 'SharedKey ' + account_name + ':' + base64.b64encode(signed_hmac_sha256.digest())
|
||||
return auth_string
|
||||
|
||||
|
||||
|
||||
def _to_python_bool(value):
|
||||
if value.lower() == 'true':
|
||||
return True
|
||||
return False
|
||||
|
||||
def _to_entity_int(data):
|
||||
return 'Edm.Int32', str(data)
|
||||
int_max = (2 << 30) - 1
|
||||
import sys
|
||||
if data > (int_max) or data < (int_max + 1)*(-1):
|
||||
return 'Edm.Int64', str(data)
|
||||
else:
|
||||
return 'Edm.Int32', str(data)
|
||||
|
||||
def _to_entity_bool(value):
|
||||
if value:
|
||||
|
@ -469,7 +481,10 @@ def _from_entity_int(value):
|
|||
return int(value)
|
||||
|
||||
def _from_entity_datetime(value):
|
||||
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
|
||||
if value.endswith('Z'):
|
||||
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
|
||||
else:
|
||||
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S')
|
||||
|
||||
_ENTITY_TO_PYTHON_CONVERSIONS = {
|
||||
'Edm.Int32': _from_entity_int,
|
||||
|
@ -542,9 +557,12 @@ def convert_entity_to_xml(source):
|
|||
|
||||
#form the property node
|
||||
properties_str += ''.join(['<d:', name])
|
||||
if mtype:
|
||||
properties_str += ''.join([' m:type="', mtype, '"'])
|
||||
properties_str += ''.join(['>', xml_escape(value), '</d:', name, '>'])
|
||||
if value == '':
|
||||
properties_str += ' m:null="true" />'
|
||||
else:
|
||||
if mtype:
|
||||
properties_str += ''.join([' m:type="', mtype, '"'])
|
||||
properties_str += ''.join(['>', xml_escape(value), '</d:', name, '>'])
|
||||
|
||||
#generate the entity_body
|
||||
entity_body = entity_body.format(properties=properties_str)
|
||||
|
@ -576,6 +594,10 @@ def convert_block_list_to_xml(block_id_list):
|
|||
|
||||
return xml+'</BlockList>'
|
||||
|
||||
def _create_blob_result(response):
|
||||
blob_properties = _parse_response_for_dict(response)
|
||||
return BlobResult(response.body, blob_properties)
|
||||
|
||||
def convert_response_to_block_list(response):
|
||||
'''
|
||||
Converts xml response to block list class.
|
||||
|
@ -601,8 +623,9 @@ def _remove_prefix(name):
|
|||
return name[colon + 1:]
|
||||
return name
|
||||
|
||||
METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata'
|
||||
def _convert_response_to_entity(response):
|
||||
if response is None:
|
||||
return response
|
||||
return _convert_xml_to_entity(response.body)
|
||||
|
||||
def _convert_xml_to_entity(xmlstr):
|
||||
|
@ -644,7 +667,6 @@ def _convert_xml_to_entity(xmlstr):
|
|||
return None
|
||||
|
||||
entity = Entity()
|
||||
|
||||
#extract each property node and get the type from attribute and node value
|
||||
for xml_property in xml_properties[0].childNodes:
|
||||
if xml_property.firstChild:
|
||||
|
@ -662,19 +684,24 @@ def _convert_xml_to_entity(xmlstr):
|
|||
#if not isnull and no type info, then it is a string and we just need the str type to hold the property.
|
||||
if not isnull and not mtype:
|
||||
setattr(entity, name, value)
|
||||
elif isnull == 'true':
|
||||
if mtype:
|
||||
property = EntityProperty(mtype, None)
|
||||
else:
|
||||
property = EntityProperty('Edm.String', None)
|
||||
else: #need an object to hold the property
|
||||
conv = _ENTITY_TO_PYTHON_CONVERSIONS.get(mtype)
|
||||
if conv is not None:
|
||||
property = conv(value)
|
||||
else:
|
||||
property = EntityProperty()
|
||||
setattr(property, 'value', value)
|
||||
if isnull:
|
||||
property.isnull = str(isnull)
|
||||
if mtype:
|
||||
property.type = str(mtype)
|
||||
property = EntityProperty(mtype, value)
|
||||
setattr(entity, name, property)
|
||||
|
||||
#extract id, updated and name value from feed entry and set them of rule.
|
||||
for name, value in _get_entry_properties(xmlstr, True).iteritems():
|
||||
if name in ['etag']:
|
||||
setattr(entity, name, value)
|
||||
|
||||
return entity
|
||||
|
||||
def _convert_xml_to_table(xmlstr):
|
||||
|
|
|
@ -18,13 +18,13 @@ import urllib2
|
|||
|
||||
from azure.storage import *
|
||||
from azure.storage.storageclient import _StorageClient
|
||||
from azure.storage import (_update_storage_blob_header,
|
||||
from azure.storage import (_update_storage_blob_header, _create_blob_result,
|
||||
convert_block_list_to_xml, convert_response_to_block_list)
|
||||
from azure.http import HTTPRequest
|
||||
from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT
|
||||
from azure import (_validate_not_none, Feed,
|
||||
_convert_response_to_feeds, _str_or_none, _int_or_none,
|
||||
_get_request_body, _update_request_uri_query,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError,
|
||||
WindowsAzureError, _parse_response, _convert_class_to_xml,
|
||||
_parse_response_for_dict, _parse_response_for_dict_prefix,
|
||||
_parse_response_for_dict_filter,
|
||||
|
@ -49,7 +49,8 @@ class BlobService(_StorageClient):
|
|||
with the next list operation.
|
||||
maxresults: Optional. Specifies the maximum number of containers to return.
|
||||
include: Optional. Include this parameter to specify that the container's metadata be
|
||||
returned as part of the response body.
|
||||
returned as part of the response body. set this parameter to string 'metadata' to
|
||||
get container's metadata.
|
||||
'''
|
||||
request = HTTPRequest()
|
||||
request.method = 'GET'
|
||||
|
@ -312,7 +313,7 @@ class BlobService(_StorageClient):
|
|||
request.headers = _update_storage_blob_header(request, self.account_name, self.account_key)
|
||||
response = self._perform_request(request)
|
||||
|
||||
def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, content_encoding=None, content_language=None, content_m_d5=None, cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_blob_cache_control=None, x_ms_meta_name_values=None, x_ms_lease_id=None, x_ms_blob_content_length=None, x_ms_blob_sequence_number=None):
|
||||
def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, content_encoding=None, content_language=None, content_md5=None, cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_blob_cache_control=None, x_ms_meta_name_values=None, x_ms_lease_id=None, x_ms_blob_content_length=None, x_ms_blob_sequence_number=None):
|
||||
'''
|
||||
Creates a new block blob or page blob, or updates the content of an existing block blob.
|
||||
|
||||
|
@ -335,7 +336,7 @@ class BlobService(_StorageClient):
|
|||
('x-ms-blob-type', _str_or_none(x_ms_blob_type)),
|
||||
('Content-Encoding', _str_or_none(content_encoding)),
|
||||
('Content-Language', _str_or_none(content_language)),
|
||||
('Content-MD5', _str_or_none(content_m_d5)),
|
||||
('Content-MD5', _str_or_none(content_md5)),
|
||||
('Cache-Control', _str_or_none(cache_control)),
|
||||
('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)),
|
||||
('x-ms-blob-content-encoding', _str_or_none(x_ms_blob_content_encoding)),
|
||||
|
@ -376,7 +377,7 @@ class BlobService(_StorageClient):
|
|||
request.headers = _update_storage_blob_header(request, self.account_name, self.account_key)
|
||||
response = self._perform_request(request)
|
||||
|
||||
return response.body
|
||||
return _create_blob_result(response)
|
||||
|
||||
def get_blob_metadata(self, container_name, blob_name, snapshot=None, x_ms_lease_id=None):
|
||||
'''
|
||||
|
@ -481,6 +482,8 @@ class BlobService(_StorageClient):
|
|||
request.headers = _update_storage_blob_header(request, self.account_name, self.account_key)
|
||||
response = self._perform_request(request)
|
||||
|
||||
return _parse_response_for_dict_filter(response, filter=['x-ms-snapshot', 'etag', 'last-modified'])
|
||||
|
||||
def copy_blob(self, container_name, blob_name, x_ms_copy_source, x_ms_meta_name_values=None, x_ms_source_if_modified_since=None, x_ms_source_if_unmodified_since=None, x_ms_source_if_match=None, x_ms_source_if_none_match=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, x_ms_lease_id=None, x_ms_source_lease_id=None):
|
||||
'''
|
||||
Copies a blob to a destination within the storage account.
|
||||
|
@ -559,7 +562,7 @@ class BlobService(_StorageClient):
|
|||
request.headers = _update_storage_blob_header(request, self.account_name, self.account_key)
|
||||
response = self._perform_request(request)
|
||||
|
||||
def put_block(self, container_name, blob_name, block, blockid, content_m_d5=None, x_ms_lease_id=None):
|
||||
def put_block(self, container_name, blob_name, block, blockid, content_md5=None, x_ms_lease_id=None):
|
||||
'''
|
||||
Creates a new block to be committed as part of a blob.
|
||||
|
||||
|
@ -580,7 +583,7 @@ class BlobService(_StorageClient):
|
|||
request.host = _get_blob_host(self.account_name, self.use_local_storage)
|
||||
request.path = '/' + str(container_name) + '/' + str(blob_name) + '?comp=block'
|
||||
request.headers = [
|
||||
('Content-MD5', _str_or_none(content_m_d5)),
|
||||
('Content-MD5', _str_or_none(content_md5)),
|
||||
('x-ms-lease-id', _str_or_none(x_ms_lease_id))
|
||||
]
|
||||
request.query = [('blockid', base64.b64encode(_str_or_none(blockid)))]
|
||||
|
@ -589,7 +592,7 @@ class BlobService(_StorageClient):
|
|||
request.headers = _update_storage_blob_header(request, self.account_name, self.account_key)
|
||||
response = self._perform_request(request)
|
||||
|
||||
def put_block_list(self, container_name, blob_name, block_list, content_m_d5=None, x_ms_blob_cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_meta_name_values=None, x_ms_lease_id=None):
|
||||
def put_block_list(self, container_name, blob_name, block_list, content_md5=None, x_ms_blob_cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_meta_name_values=None, x_ms_lease_id=None):
|
||||
'''
|
||||
Writes a blob by specifying the list of block IDs that make up the blob. In order to
|
||||
be written as part of a blob, a block must have been successfully written to the server
|
||||
|
@ -624,7 +627,7 @@ class BlobService(_StorageClient):
|
|||
request.host = _get_blob_host(self.account_name, self.use_local_storage)
|
||||
request.path = '/' + str(container_name) + '/' + str(blob_name) + '?comp=blocklist'
|
||||
request.headers = [
|
||||
('Content-MD5', _str_or_none(content_m_d5)),
|
||||
('Content-MD5', _str_or_none(content_md5)),
|
||||
('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)),
|
||||
('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)),
|
||||
('x-ms-blob-content-encoding', _str_or_none(x_ms_blob_content_encoding)),
|
||||
|
@ -666,7 +669,7 @@ class BlobService(_StorageClient):
|
|||
|
||||
return convert_response_to_block_list(response)
|
||||
|
||||
def put_page(self, container_name, blob_name, page, x_ms_range, x_ms_page_write, timeout=None, content_m_d5=None, x_ms_lease_id=None, x_ms_if_sequence_number_lte=None, x_ms_if_sequence_number_lt=None, x_ms_if_sequence_number_eq=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None):
|
||||
def put_page(self, container_name, blob_name, page, x_ms_range, x_ms_page_write, timeout=None, content_md5=None, x_ms_lease_id=None, x_ms_if_sequence_number_lte=None, x_ms_if_sequence_number_lt=None, x_ms_if_sequence_number_eq=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None):
|
||||
'''
|
||||
Writes a range of pages to a page blob.
|
||||
|
||||
|
@ -698,7 +701,7 @@ class BlobService(_StorageClient):
|
|||
request.path = '/' + str(container_name) + '/' + str(blob_name) + '?comp=page'
|
||||
request.headers = [
|
||||
('x-ms-range', _str_or_none(x_ms_range)),
|
||||
('Content-MD5', _str_or_none(content_m_d5)),
|
||||
('Content-MD5', _str_or_none(content_md5)),
|
||||
('x-ms-page-write', _str_or_none(x_ms_page_write)),
|
||||
('x-ms-lease-id', _str_or_none(x_ms_lease_id)),
|
||||
('x-ms-if-sequence-number-lte', _str_or_none(x_ms_if_sequence_number_lte)),
|
||||
|
|
|
@ -19,11 +19,11 @@ import urllib2
|
|||
from azure.storage import *
|
||||
from azure.storage.storageclient import _StorageClient
|
||||
from azure.storage import (_update_storage_queue_header)
|
||||
from azure.http import HTTPRequest
|
||||
from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT
|
||||
from azure import (_validate_not_none, Feed,
|
||||
_convert_response_to_feeds, _str_or_none, _int_or_none,
|
||||
_get_request_body, _update_request_uri_query,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError,
|
||||
WindowsAzureError, _parse_response, _convert_class_to_xml,
|
||||
_parse_response_for_dict, _parse_response_for_dict_prefix,
|
||||
_parse_response_for_dict_filter,
|
||||
|
@ -96,13 +96,17 @@ class QueueService(_StorageClient):
|
|||
request.headers = _update_storage_queue_header(request, self.account_name, self.account_key)
|
||||
if not fail_on_exist:
|
||||
try:
|
||||
self._perform_request(request)
|
||||
response = self._perform_request(request)
|
||||
if response.status == HTTP_RESPONSE_NO_CONTENT:
|
||||
return False
|
||||
return True
|
||||
except WindowsAzureError as e:
|
||||
_dont_fail_on_exist(e)
|
||||
return False
|
||||
else:
|
||||
self._perform_request(request)
|
||||
response = self._perform_request(request)
|
||||
if response.status == HTTP_RESPONSE_NO_CONTENT:
|
||||
raise WindowsAzureConflictError(azure._ERROR_CONFLICT)
|
||||
return True
|
||||
|
||||
def delete_queue(self, queue_name, fail_not_exist=False):
|
||||
|
|
|
@ -18,6 +18,7 @@ import hmac
|
|||
import hashlib
|
||||
import os
|
||||
|
||||
|
||||
from azure.storage import _storage_error_handler, X_MS_VERSION
|
||||
from azure.http.httpclient import _HTTPClient
|
||||
from azure.http import HTTPError
|
||||
|
@ -38,8 +39,15 @@ class _StorageClient(object):
|
|||
'''
|
||||
|
||||
def __init__(self, account_name=None, account_key=None, protocol='http'):
|
||||
self.account_name = account_name
|
||||
self.account_key = account_key
|
||||
if account_name is not None:
|
||||
self.account_name = account_name.encode('ascii', 'ignore')
|
||||
else:
|
||||
self.account_name = None
|
||||
if account_key is not None:
|
||||
self.account_key = account_key.encode('ascii', 'ignore')
|
||||
else:
|
||||
self.account_key = None
|
||||
|
||||
self.requestid = None
|
||||
self.protocol = protocol
|
||||
|
||||
|
@ -60,7 +68,7 @@ class _StorageClient(object):
|
|||
#get the account and key from environment variables if the app is not run
|
||||
#in azure emulator or use default development storage account and key if
|
||||
#app is run in emulator.
|
||||
if not account_name or not account_key:
|
||||
if not self.account_name or not self.account_key:
|
||||
if self.is_emulated:
|
||||
self.account_name = DEV_ACCOUNT_NAME
|
||||
self.account_key = DEV_ACCOUNT_KEY
|
||||
|
@ -70,15 +78,12 @@ class _StorageClient(object):
|
|||
self.account_name = os.environ[AZURE_STORAGE_ACCOUNT]
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
|
||||
self.account_key = os.environ[AZURE_STORAGE_ACCESS_KEY]
|
||||
else:
|
||||
self.account_name = account_name
|
||||
self.account_key = account_key
|
||||
|
||||
if not self.account_name or not self.account_key:
|
||||
raise WindowsAzureError(azure._ERROR_STORAGE_MISSING_INFO)
|
||||
|
||||
self.x_ms_version = X_MS_VERSION
|
||||
self._httpclient = _HTTPClient(service_instance=self, account_key=account_key, account_name=account_name, x_ms_version=self.x_ms_version, protocol=protocol)
|
||||
self._httpclient = _HTTPClient(service_instance=self, account_key=self.account_key, account_name=self.account_name, x_ms_version=self.x_ms_version, protocol=protocol)
|
||||
self._batchclient = None
|
||||
self._filter = self._perform_request_worker
|
||||
|
||||
|
@ -111,6 +116,4 @@ class _StorageClient(object):
|
|||
except HTTPError as e:
|
||||
_storage_error_handler(e)
|
||||
|
||||
if not resp:
|
||||
return None
|
||||
return resp
|
|
@ -23,11 +23,11 @@ from azure.storage import (_update_storage_table_header,
|
|||
convert_entity_to_xml, _convert_response_to_entity,
|
||||
_convert_xml_to_entity, _sign_storage_table_request)
|
||||
from azure.http.batchclient import _BatchClient
|
||||
from azure.http import HTTPRequest
|
||||
from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT
|
||||
from azure import (_validate_not_none, Feed,
|
||||
_convert_response_to_feeds, _str_or_none, _int_or_none,
|
||||
_get_request_body, _update_request_uri_query,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist,
|
||||
_dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError,
|
||||
WindowsAzureError, _parse_response, _convert_class_to_xml,
|
||||
_parse_response_for_dict, _parse_response_for_dict_prefix,
|
||||
_parse_response_for_dict_filter,
|
||||
|
@ -90,7 +90,7 @@ class TableService(_StorageClient):
|
|||
|
||||
return _parse_response_for_dict(response)
|
||||
|
||||
def query_tables(self, table_name = None, top=None):
|
||||
def query_tables(self, table_name = None, top=None, next_table_name=None):
|
||||
'''
|
||||
Returns a list of tables under the specified account.
|
||||
|
||||
|
@ -105,7 +105,10 @@ class TableService(_StorageClient):
|
|||
else:
|
||||
uri_part_table_name = ""
|
||||
request.path = '/Tables' + uri_part_table_name + ''
|
||||
request.query = [('$top', _int_or_none(top))]
|
||||
request.query = [
|
||||
('$top', _int_or_none(top)),
|
||||
('NextTableName', _str_or_none(next_table_name))
|
||||
]
|
||||
request.path, request.query = _update_request_uri_query_local_storage(request, self.use_local_storage)
|
||||
request.headers = _update_storage_table_header(request)
|
||||
response = self._perform_request(request)
|
||||
|
@ -116,7 +119,9 @@ class TableService(_StorageClient):
|
|||
'''
|
||||
Creates a new table in the storage account.
|
||||
|
||||
table: name of the table to create.
|
||||
table: name of the table to create. Table name may contain only alphanumeric characters
|
||||
and cannot begin with a numeric character. It is case-insensitive and must be from
|
||||
3 to 63 characters long.
|
||||
fail_on_exist: specify whether throw exception when table exists.
|
||||
'''
|
||||
_validate_not_none('table', table)
|
||||
|
@ -184,7 +189,7 @@ class TableService(_StorageClient):
|
|||
|
||||
return _convert_response_to_entity(response)
|
||||
|
||||
def query_entities(self, table_name, filter=None, select=None, top=None):
|
||||
def query_entities(self, table_name, filter=None, select=None, top=None, next_partition_key=None, next_row_key=None):
|
||||
'''
|
||||
Get entities in a table; includes the $filter and $select options.
|
||||
|
||||
|
@ -201,7 +206,9 @@ class TableService(_StorageClient):
|
|||
request.query = [
|
||||
('$filter', _str_or_none(filter)),
|
||||
('$select', _str_or_none(select)),
|
||||
('$top', _int_or_none(top))
|
||||
('$top', _int_or_none(top)),
|
||||
('NextPartitionKey', _str_or_none(next_partition_key)),
|
||||
('NextRowKey', _str_or_none(next_row_key))
|
||||
]
|
||||
request.path, request.query = _update_request_uri_query_local_storage(request, self.use_local_storage)
|
||||
request.headers = _update_storage_table_header(request)
|
||||
|
@ -229,6 +236,8 @@ class TableService(_StorageClient):
|
|||
request.headers = _update_storage_table_header(request)
|
||||
response = self._perform_request(request)
|
||||
|
||||
return _convert_response_to_entity(response)
|
||||
|
||||
def update_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml', if_match='*'):
|
||||
'''
|
||||
Updates an existing entity in a table. The Update Entity operation replaces the entire
|
||||
|
@ -257,6 +266,8 @@ class TableService(_StorageClient):
|
|||
request.headers = _update_storage_table_header(request)
|
||||
response = self._perform_request(request)
|
||||
|
||||
return _parse_response_for_dict_filter(response, filter=['etag'])
|
||||
|
||||
def merge_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml', if_match='*'):
|
||||
'''
|
||||
Updates an existing entity by updating the entity's properties. This operation does
|
||||
|
@ -285,6 +296,8 @@ class TableService(_StorageClient):
|
|||
request.headers = _update_storage_table_header(request)
|
||||
response = self._perform_request(request)
|
||||
|
||||
return _parse_response_for_dict_filter(response, filter=['etag'])
|
||||
|
||||
def delete_entity(self, table_name, partition_key, row_key, content_type='application/atom+xml', if_match='*'):
|
||||
'''
|
||||
Deletes an existing entity in a table.
|
||||
|
@ -338,7 +351,9 @@ class TableService(_StorageClient):
|
|||
request.headers = _update_storage_table_header(request)
|
||||
response = self._perform_request(request)
|
||||
|
||||
def insert_or_merge_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml', if_match='*'):
|
||||
return _parse_response_for_dict_filter(response, filter=['etag'])
|
||||
|
||||
def insert_or_merge_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml'):
|
||||
'''
|
||||
Merges an existing entity or inserts a new entity if it does not exist in the table.
|
||||
Because this operation can insert or update an entity, it is also known as an "upsert"
|
||||
|
@ -358,15 +373,14 @@ class TableService(_StorageClient):
|
|||
request.method = 'MERGE'
|
||||
request.host = _get_table_host(self.account_name, self.use_local_storage)
|
||||
request.path = '/' + str(table_name) + '(PartitionKey=\'' + str(partition_key) + '\',RowKey=\'' + str(row_key) + '\')'
|
||||
request.headers = [
|
||||
('Content-Type', _str_or_none(content_type)),
|
||||
('If-Match', _str_or_none(if_match))
|
||||
]
|
||||
request.headers = [('Content-Type', _str_or_none(content_type))]
|
||||
request.body = _get_request_body(convert_entity_to_xml(entity))
|
||||
request.path, request.query = _update_request_uri_query_local_storage(request, self.use_local_storage)
|
||||
request.headers = _update_storage_table_header(request)
|
||||
response = self._perform_request(request)
|
||||
|
||||
return _parse_response_for_dict_filter(response, filter=['etag'])
|
||||
|
||||
|
||||
def _perform_request_worker(self, request):
|
||||
auth = _sign_storage_table_request(request,
|
||||
|
|
|
@ -21,7 +21,8 @@ marker: Optional. A string value that identifies the portion of the list to be r
|
|||
with the next list operation.
|
||||
maxresults: Optional. Specifies the maximum number of containers to return.
|
||||
include: Optional. Include this parameter to specify that the container's metadata be
|
||||
returned as part of the response body.
|
||||
returned as part of the response body. set this parameter to string 'metadata' to
|
||||
get container's metadata.
|
||||
[return]
|
||||
ContainerEnumResults
|
||||
[url]
|
||||
|
@ -243,7 +244,7 @@ container_name: the name of container to get the blob
|
|||
blob_name: the name of blob
|
||||
x_ms_range: Optional. Return only the bytes of the blob in the specified range.
|
||||
[return]
|
||||
str
|
||||
BlobResult
|
||||
[url]
|
||||
GET http://<account-name>.blob.core.windows.net/<container-name>/<blob-name>
|
||||
[query]
|
||||
|
@ -320,6 +321,8 @@ x_ms_lease_id: Optional. If this header is specified, the operation will be perf
|
|||
1. The blob's lease is currently active
|
||||
2. The lease ID specified in the request matches that of the blob.
|
||||
[return]
|
||||
dict
|
||||
filter=['x-ms-snapshot', 'etag', 'last-modified']
|
||||
[url]
|
||||
PUT http://<account-name>.blob.core.windows.net/<container-name>/<blob-name>?comp=snapshot
|
||||
[query]
|
||||
|
|
|
@ -40,8 +40,8 @@ def to_legalname(name):
|
|||
if ch.isupper():
|
||||
legalname += '_'
|
||||
legalname += ch
|
||||
legalname = legalname.replace('__', '_').replace('_m_d5', '_md5')
|
||||
return legalname.lower()
|
||||
legalname = legalname.replace('__', '_').lower().replace('_m_d5', '_md5')
|
||||
return legalname
|
||||
|
||||
def normalize_xml(xmlstr):
|
||||
if xmlstr:
|
||||
|
@ -99,7 +99,7 @@ def output_import(output_file, class_name):
|
|||
output_str += 'from azure.storage import *\n'
|
||||
output_str += 'from azure.storage.storageclient import _StorageClient\n'
|
||||
if 'Blob' in class_name:
|
||||
output_str += 'from azure.storage import (_update_storage_blob_header,\n'
|
||||
output_str += 'from azure.storage import (_update_storage_blob_header, _create_blob_result,\n'
|
||||
output_str += indent*8 + 'convert_block_list_to_xml, convert_response_to_block_list) \n'
|
||||
elif 'Queue' in class_name:
|
||||
output_str += 'from azure.storage import (_update_storage_queue_header)\n'
|
||||
|
@ -115,7 +115,7 @@ def output_import(output_file, class_name):
|
|||
output_str += 'from azure import (_validate_not_none, Feed,\n'
|
||||
output_str += indent*8 + '_convert_response_to_feeds, _str_or_none, _int_or_none,\n'
|
||||
output_str += indent*8 + '_get_request_body, _update_request_uri_query, \n'
|
||||
output_str += indent*8 + '_dont_fail_on_exist, _dont_fail_not_exist, \n'
|
||||
output_str += indent*8 + '_dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError, \n'
|
||||
output_str += indent*8 + 'WindowsAzureError, _parse_response, _convert_class_to_xml, \n'
|
||||
output_str += indent*8 + '_parse_response_for_dict, _parse_response_for_dict_prefix, \n'
|
||||
output_str += indent*8 + '_parse_response_for_dict_filter, \n'
|
||||
|
@ -260,7 +260,7 @@ def output_list(list_name, request_list, validate_conversions):
|
|||
return output_list_str
|
||||
|
||||
|
||||
def output_method_body(return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param):
|
||||
def output_method_body(method_name, return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param):
|
||||
indent = ' '
|
||||
output_body = ''.join([indent*2, 'request = HTTPRequest()\n'])
|
||||
|
||||
|
@ -341,16 +341,32 @@ def output_method_body(return_type, method_params, uri_param, req_protocol, req_
|
|||
|
||||
for name, value in method_params:
|
||||
if 'fail_on_exist' in name:
|
||||
output_body += indent*2 + 'if not ' + name + ':\n'
|
||||
output_body += indent*3 + 'try:\n'
|
||||
output_body += ''.join([indent*4, 'self._perform_request(request)\n'])
|
||||
output_body += ''.join([indent*4, 'return True\n'])
|
||||
output_body += indent*3 + 'except WindowsAzureError as e:\n'
|
||||
output_body += indent*4 + '_dont_fail_on_exist(e)\n'
|
||||
output_body += indent*4 + 'return False\n'
|
||||
output_body += indent*2 + 'else:\n'
|
||||
output_body += ''.join([indent*3, 'self._perform_request(request)\n'])
|
||||
output_body += ''.join([indent*3, 'return True\n\n'])
|
||||
if method_name == 'create_queue' and 'queue.core' in req_host: #QueueService create_queue
|
||||
output_body += indent*2 + 'if not ' + name + ':\n'
|
||||
output_body += indent*3 + 'try:\n'
|
||||
output_body += ''.join([indent*4, 'response = self._perform_request(request)\n'])
|
||||
output_body += ''.join([indent*4, 'if response.status == 204:\n'])
|
||||
output_body += ''.join([indent*5, 'return False\n'])
|
||||
output_body += ''.join([indent*4, 'return True\n'])
|
||||
output_body += indent*3 + 'except WindowsAzureError as e:\n'
|
||||
output_body += indent*4 + '_dont_fail_on_exist(e)\n'
|
||||
output_body += indent*4 + 'return False\n'
|
||||
output_body += indent*2 + 'else:\n'
|
||||
output_body += ''.join([indent*3, 'response = self._perform_request(request)\n'])
|
||||
output_body += ''.join([indent*3, 'if response.status == 204:\n'])
|
||||
output_body += ''.join([indent*4, 'raise WindowsAzureConflictError(azure._ERROR_CONFLICT)\n'])
|
||||
output_body += ''.join([indent*3, 'return True\n\n'])
|
||||
else:
|
||||
output_body += indent*2 + 'if not ' + name + ':\n'
|
||||
output_body += indent*3 + 'try:\n'
|
||||
output_body += ''.join([indent*4, 'self._perform_request(request)\n'])
|
||||
output_body += ''.join([indent*4, 'return True\n'])
|
||||
output_body += indent*3 + 'except WindowsAzureError as e:\n'
|
||||
output_body += indent*4 + '_dont_fail_on_exist(e)\n'
|
||||
output_body += indent*4 + 'return False\n'
|
||||
output_body += indent*2 + 'else:\n'
|
||||
output_body += ''.join([indent*3, 'self._perform_request(request)\n'])
|
||||
output_body += ''.join([indent*3, 'return True\n\n'])
|
||||
break
|
||||
elif 'fail_not_exist' in name:
|
||||
output_body += indent*2 + 'if not ' + name + ':\n'
|
||||
|
@ -383,13 +399,15 @@ def output_method_body(return_type, method_params, uri_param, req_protocol, req_
|
|||
elif return_type == 'PageList':
|
||||
output_body += indent*2 + 'return _parse_simple_list(response, PageList, PageRange, "page_ranges")'
|
||||
else:
|
||||
if return_type == 'Message':
|
||||
if return_type == 'BlobResult':
|
||||
output_body += indent*2 + 'return _create_blob_result(response)\n\n'
|
||||
elif return_type == 'Message':
|
||||
output_body += indent*2 + 'return _create_message(response, self)\n\n'
|
||||
elif return_type == 'str':
|
||||
output_body += indent*2 + 'return response.body\n\n'
|
||||
elif return_type == 'BlobBlockList':
|
||||
output_body += indent*2 + 'return convert_response_to_block_list(response)\n\n'
|
||||
elif 'Feed' in return_type:
|
||||
elif 'Feed' in return_type:
|
||||
for name in ['table', 'entity', 'topic', 'subscription', 'queue', 'rule']:
|
||||
if name +'\'),' in return_type:
|
||||
convert_func = '_convert_xml_to_' + name
|
||||
|
@ -412,7 +430,7 @@ def output_method(output_file, method_name, method_params, method_comment, retur
|
|||
output_str += output_method_def(method_name, method_params, uri_param, req_param, req_query, req_header)
|
||||
output_str += output_method_comments(method_comment, req_param, req_query, req_header)
|
||||
output_str += output_method_validates(uri_param, req_param, req_query, req_header)
|
||||
output_str += output_method_body(return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param)
|
||||
output_str += output_method_body(method_name, return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param)
|
||||
output_file.write(output_str)
|
||||
|
||||
|
||||
|
@ -686,20 +704,32 @@ if __name__ == '__main__':
|
|||
auto_codegen('queue_input.txt', '../azure/storage/queueservice.py')
|
||||
auto_codegen('servicebus_input.txt', '../azure/servicebus/servicebusservice.py')
|
||||
|
||||
def add_license(license_file_name, output_file_name):
|
||||
license_file = open(license_file_name, 'r')
|
||||
def add_license(license_str, output_file_name):
|
||||
output_file = open(output_file_name, 'r')
|
||||
content = output_file.read()
|
||||
license_txt = license_file.read()
|
||||
license_file.close()
|
||||
output_file.close()
|
||||
output_file = open(output_file_name, 'w')
|
||||
output_file.write(license_txt)
|
||||
output_file.write(license_str)
|
||||
output_file.write(content)
|
||||
output_file.close()
|
||||
|
||||
license_str = '''#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
'''
|
||||
|
||||
add_license('license.txt', '../azure/storage/blobservice.py')
|
||||
add_license('license.txt', '../azure/storage/tableservice.py')
|
||||
add_license('license.txt', '../azure/storage/queueservice.py')
|
||||
add_license('license.txt', '../azure/servicebus/servicebusservice.py')
|
||||
add_license(license_str, '../azure/storage/blobservice.py')
|
||||
add_license(license_str, '../azure/storage/tableservice.py')
|
||||
add_license(license_str, '../azure/storage/queueservice.py')
|
||||
add_license(license_str, '../azure/servicebus/servicebusservice.py')
|
|
@ -470,8 +470,6 @@ def _perform_request(self, request):
|
|||
except HTTPError as e:
|
||||
return _service_bus_error_handler(e)
|
||||
|
||||
if not resp:
|
||||
return None
|
||||
return resp
|
||||
|
||||
[end]
|
||||
|
|
|
@ -46,13 +46,16 @@ top: the maximum number of tables to return
|
|||
GET http://<account-name>.table.core.windows.net/Tables<?table_name:('[table_name]')>
|
||||
[query]
|
||||
$top=
|
||||
NextTableName=
|
||||
|
||||
[method]
|
||||
create_table
|
||||
[comment]
|
||||
Creates a new table in the storage account.
|
||||
|
||||
table: name of the table to create.
|
||||
table: name of the table to create. Table name may contain only alphanumeric characters
|
||||
and cannot begin with a numeric character. It is case-insensitive and must be from
|
||||
3 to 63 characters long.
|
||||
fail_on_exist: specify whether throw exception when table exists.
|
||||
[params]
|
||||
fail_on_exist=False
|
||||
|
@ -105,6 +108,8 @@ GET http://<account-name>.table.core.windows.net/<table-name>()
|
|||
$filter=
|
||||
$select=
|
||||
$top=
|
||||
NextPartitionKey=
|
||||
NextRowKey=
|
||||
|
||||
[method]
|
||||
insert_entity
|
||||
|
@ -113,6 +118,7 @@ Inserts a new entity into a table.
|
|||
|
||||
entity: Required. The entity object to insert. Could be a dict format or entity object.
|
||||
[return]
|
||||
Feed('entity')
|
||||
[url]
|
||||
POST http://<account-name>.table.core.windows.net/<table-name>
|
||||
[requestheader]
|
||||
|
@ -130,6 +136,8 @@ entity: Required. The entity object to insert. Could be a dict format or entity
|
|||
partition_key: PartitionKey of the entity.
|
||||
row_key: RowKey of the entity.
|
||||
[return]
|
||||
dict
|
||||
filter=['etag']
|
||||
[url]
|
||||
PUT http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\')
|
||||
[requestheader]
|
||||
|
@ -148,6 +156,8 @@ entity: Required. The entity object to insert. Can be a dict format or entity ob
|
|||
partition_key: PartitionKey of the entity.
|
||||
row_key: RowKey of the entity.
|
||||
[return]
|
||||
dict
|
||||
filter=['etag']
|
||||
[url]
|
||||
MERGE http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\')
|
||||
[requestheader]
|
||||
|
@ -183,6 +193,8 @@ entity: Required. The entity object to insert. Could be a dict format or entity
|
|||
partition_key: PartitionKey of the entity.
|
||||
row_key: RowKey of the entity.
|
||||
[return]
|
||||
dict
|
||||
filter=['etag']
|
||||
[url]
|
||||
PUT http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\')
|
||||
[requestheader]
|
||||
|
@ -201,11 +213,12 @@ entity: Required. The entity object to insert. Could be a dict format or entity
|
|||
partition_key: PartitionKey of the entity.
|
||||
row_key: RowKey of the entity.
|
||||
[return]
|
||||
dict
|
||||
filter=['etag']
|
||||
[url]
|
||||
MERGE http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\')
|
||||
[requestheader]
|
||||
Content-Type=application/atom+xml;required:application/atom+xml|#this is required and has to be set to application/atom+xml
|
||||
If-Match=*
|
||||
[requestbody]
|
||||
feed:entity;required:feed
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<ProjectGuid>{c0742a2d-4862-40e4-8a28-036eecdbc614}</ProjectGuid>
|
||||
<ProjectHome>
|
||||
</ProjectHome>
|
||||
<StartupFile>azuretest\test_tableservice.py</StartupFile>
|
||||
<StartupFile>azuretest\test_servicebusservice.py</StartupFile>
|
||||
<WorkingDirectory>.</WorkingDirectory>
|
||||
<OutputPath>.</OutputPath>
|
||||
<Name>azuretest</Name>
|
||||
|
@ -19,8 +19,13 @@
|
|||
<ClusterRunEnvironment>localhost/1/Core/</ClusterRunEnvironment>
|
||||
<ClusterTargetPlatform>X86</ClusterTargetPlatform>
|
||||
<IsWindowsApplication>False</IsWindowsApplication>
|
||||
<InterpreterId>2af0f10d-7135-4994-9156-5d01c9c11b7e</InterpreterId>
|
||||
<InterpreterId>9a7a9026-48c1-4688-9d5d-e5699d47d074</InterpreterId>
|
||||
<InterpreterVersion>2.7</InterpreterVersion>
|
||||
<SearchPath>C:\Users\a-huvalo\Documents\Visual Studio 2010\Projects\PTVS\Open_Source\Incubation\windowsazure\src\</SearchPath>
|
||||
<SccProjectName>$/TCWCS/Python/Main/Open_Source/Incubation/windowsazure/test</SccProjectName>
|
||||
<SccProvider>{4CA58AB2-18FA-4F8D-95D4-32DDF27D184C}</SccProvider>
|
||||
<SccAuxPath>http://tcvstf:8080/tfs/tc</SccAuxPath>
|
||||
<SccLocalPath>.</SccLocalPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
|
@ -34,14 +39,23 @@
|
|||
<Folder Include="azuretest" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="azuretest\clean.py" />
|
||||
<Compile Include="azuretest\doctest_blobservice.py" />
|
||||
<Compile Include="azuretest\doctest_queueservice.py" />
|
||||
<Compile Include="azuretest\doctest_servicebusservicequeue.py" />
|
||||
<Compile Include="azuretest\doctest_servicebusservicetopic.py" />
|
||||
<Compile Include="azuretest\doctest_tableservice.py" />
|
||||
<Compile Include="azuretest\test_blobservice.py" />
|
||||
<Compile Include="azuretest\test_cloudstorageaccount.py" />
|
||||
<Compile Include="azuretest\test_queueservice.py" />
|
||||
<Compile Include="azuretest\test_sharedaccesssignature.py" />
|
||||
<Compile Include="azuretest\test_tableservice.py" />
|
||||
<Compile Include="azuretest\test_servicebusservice.py" />
|
||||
<Compile Include="azuretest\util.py" />
|
||||
<Compile Include="azuretest\__init__.py" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Content Include="run.bash" />
|
||||
<Content Include="run.bat" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildToolsPath)\Microsoft.Common.targets" />
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
from azure import *
|
||||
from azure.storage import *
|
||||
from azure.servicebus import *
|
||||
from azuretest.util import *
|
||||
|
||||
print('WARNING!!!')
|
||||
print('')
|
||||
print('This program cleans the storage account and the service namespace specified')
|
||||
print('by the unit test credentials file (windowsazurecredentials.json) located in')
|
||||
print('your home directory.')
|
||||
print('')
|
||||
print('You should not run this program while tests are running as this will')
|
||||
print('interfere with the tests.')
|
||||
print('')
|
||||
print('The following will be deleted from the storage account:')
|
||||
print(' - All containers')
|
||||
print(' - All tables')
|
||||
print(' - All queues')
|
||||
print('')
|
||||
print('The following will be deleted from the service namespace:')
|
||||
print(' - All queues')
|
||||
print(' - All topics')
|
||||
print('')
|
||||
print('Enter YES to proceed, or anything else to cancel')
|
||||
print('')
|
||||
|
||||
input = raw_input('>')
|
||||
if input == 'YES':
|
||||
print('Cleaning storage account...')
|
||||
|
||||
bc = BlobService(credentials.getStorageServicesName(),
|
||||
credentials.getStorageServicesKey())
|
||||
|
||||
ts = TableService(credentials.getStorageServicesName(),
|
||||
credentials.getStorageServicesKey())
|
||||
|
||||
qs = QueueService(credentials.getStorageServicesName(),
|
||||
credentials.getStorageServicesKey())
|
||||
|
||||
for container in bc.list_containers():
|
||||
bc.delete_container(container.name)
|
||||
|
||||
for table in ts.query_tables():
|
||||
ts.delete_table(table.name)
|
||||
|
||||
for queue in qs.list_queues():
|
||||
qs.delete_queue(queue.name)
|
||||
|
||||
print('Cleaning service namespace...')
|
||||
|
||||
sbs = ServiceBusService(credentials.getServiceBusNamespace(),
|
||||
credentials.getServiceBusKey(),
|
||||
'owner')
|
||||
|
||||
for queue in sbs.list_queues():
|
||||
sbs.delete_queue(queue.name)
|
||||
|
||||
for topic in sbs.list_topics():
|
||||
sbs.delete_topic(topic.name)
|
||||
|
||||
print('Done.')
|
||||
else:
|
||||
print('Canceled.')
|
|
@ -0,0 +1,65 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
How to: Create a Container
|
||||
--------------------------
|
||||
>>> from azure.storage import *
|
||||
>>> blob_service = BlobService(name, key)
|
||||
>>> blob_service.create_container('mycontainer')
|
||||
True
|
||||
|
||||
>>> blob_service.create_container('mycontainer2', x_ms_blob_public_access='container')
|
||||
True
|
||||
|
||||
>>> blob_service.set_container_acl('mycontainer', x_ms_blob_public_access='container')
|
||||
|
||||
How to: Upload a Blob into a Container
|
||||
--------------------------------------
|
||||
>>> myblob = 'hello blob'
|
||||
>>> blob_service.put_blob('mycontainer', 'myblob', myblob, x_ms_blob_type='BlockBlob')
|
||||
|
||||
How to: List the Blobs in a Container
|
||||
-------------------------------------
|
||||
>>> blobs = blob_service.list_blobs('mycontainer')
|
||||
>>> for blob in blobs:
|
||||
... print(blob.name)
|
||||
myblob
|
||||
|
||||
How to: Download Blobs
|
||||
----------------------
|
||||
>>> blob = blob_service.get_blob('mycontainer', 'myblob')
|
||||
>>> print(blob)
|
||||
hello blob
|
||||
|
||||
How to: Delete a Blob
|
||||
---------------------
|
||||
>>> blob_service.delete_blob('mycontainer', 'myblob')
|
||||
|
||||
>>> blob_service.delete_container('mycontainer')
|
||||
True
|
||||
|
||||
>>> blob_service.delete_container('mycontainer2')
|
||||
True
|
||||
|
||||
"""
|
||||
from azuretest.util import *
|
||||
|
||||
name = credentials.getStorageServicesName()
|
||||
key = credentials.getStorageServicesKey()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
|
@ -0,0 +1,81 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
How To: Create a Queue
|
||||
----------------------
|
||||
>>> from azure.storage import *
|
||||
>>> queue_service = QueueService(name, key)
|
||||
>>> queue_service.create_queue('taskqueue')
|
||||
True
|
||||
|
||||
How To: Insert a Message into a Queue
|
||||
-------------------------------------
|
||||
>>> queue_service.put_message('taskqueue', 'Hello World')
|
||||
|
||||
How To: Peek at the Next Message
|
||||
--------------------------------
|
||||
>>> messages = queue_service.peek_messages('taskqueue')
|
||||
>>> for message in messages:
|
||||
... print(message.message_text)
|
||||
...
|
||||
Hello World
|
||||
|
||||
How To: Dequeue the Next Message
|
||||
--------------------------------
|
||||
>>> messages = queue_service.get_messages('taskqueue')
|
||||
>>> for message in messages:
|
||||
... print(message.message_text)
|
||||
... queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt)
|
||||
Hello World
|
||||
|
||||
How To: Change the Contents of a Queued Message
|
||||
-----------------------------------------------
|
||||
>>> queue_service.put_message('taskqueue', 'Hello World')
|
||||
>>> messages = queue_service.get_messages('taskqueue')
|
||||
>>> for message in messages:
|
||||
... res = queue_service.update_message('taskqueue', message.message_id, 'Hello World Again', message.pop_receipt, 0)
|
||||
|
||||
How To: Additional Options for Dequeuing Messages
|
||||
-------------------------------------------------
|
||||
>>> queue_service.put_message('taskqueue', 'Hello World')
|
||||
>>> messages = queue_service.get_messages('taskqueue', numofmessages=16, visibilitytimeout=5*60)
|
||||
>>> for message in messages:
|
||||
... print(message.message_text)
|
||||
... queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt)
|
||||
Hello World Again
|
||||
Hello World
|
||||
|
||||
How To: Get the Queue Length
|
||||
----------------------------
|
||||
>>> queue_metadata = queue_service.get_queue_metadata('taskqueue')
|
||||
>>> count = queue_metadata['x-ms-approximate-messages-count']
|
||||
>>> count
|
||||
u'0'
|
||||
|
||||
How To: Delete a Queue
|
||||
----------------------
|
||||
>>> queue_service.delete_queue('taskqueue')
|
||||
True
|
||||
|
||||
"""
|
||||
from azuretest.util import *
|
||||
|
||||
name = credentials.getStorageServicesName()
|
||||
key = credentials.getStorageServicesKey()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
|
@ -0,0 +1,64 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
How To: Create a Queue
|
||||
----------------------
|
||||
>>> from azure.servicebus import *
|
||||
>>> bus_service = ServiceBusService(ns, key, 'owner')
|
||||
>>> bus_service.create_queue('taskqueue')
|
||||
True
|
||||
|
||||
>>> queue_options = Queue()
|
||||
>>> queue_options.max_size_in_megabytes = '5120'
|
||||
>>> queue_options.default_message_time_to_live = 'PT1M'
|
||||
>>> bus_service.create_queue('taskqueue2', queue_options)
|
||||
True
|
||||
|
||||
How to Send Messages to a Queue
|
||||
-------------------------------
|
||||
>>> msg = Message('Test Message')
|
||||
>>> bus_service.send_queue_message('taskqueue', msg)
|
||||
|
||||
How to Receive Messages from a Queue
|
||||
------------------------------------
|
||||
>>> msg = bus_service.receive_queue_message('taskqueue')
|
||||
>>> print(msg.body)
|
||||
Test Message
|
||||
|
||||
>>> msg = Message('Test Message')
|
||||
>>> bus_service.send_queue_message('taskqueue', msg)
|
||||
|
||||
>>> msg = bus_service.receive_queue_message('taskqueue', peek_lock=True)
|
||||
>>> print(msg.body)
|
||||
Test Message
|
||||
>>> msg.delete()
|
||||
|
||||
|
||||
>>> bus_service.delete_queue('taskqueue')
|
||||
True
|
||||
|
||||
>>> bus_service.delete_queue('taskqueue2')
|
||||
True
|
||||
|
||||
"""
|
||||
from azuretest.util import *
|
||||
|
||||
ns = credentials.getServiceBusNamespace()
|
||||
key = credentials.getServiceBusKey()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
|
@ -0,0 +1,95 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
How to Create a Topic
|
||||
---------------------
|
||||
>>> from azure.servicebus import *
|
||||
>>> bus_service = ServiceBusService(ns, key, 'owner')
|
||||
>>> bus_service.create_topic('mytopic')
|
||||
True
|
||||
|
||||
>>> topic_options = Topic()
|
||||
>>> topic_options.max_size_in_megabytes = '5120'
|
||||
>>> topic_options.default_message_time_to_live = 'PT1M'
|
||||
>>> bus_service.create_topic('mytopic2', topic_options)
|
||||
True
|
||||
|
||||
How to Create Subscriptions
|
||||
---------------------------
|
||||
>>> bus_service.create_subscription('mytopic', 'AllMessages')
|
||||
True
|
||||
|
||||
>>> bus_service.create_subscription('mytopic', 'HighMessages')
|
||||
True
|
||||
|
||||
>>> rule = Rule()
|
||||
>>> rule.filter_type = 'SqlFilter'
|
||||
>>> rule.filter_expression = 'messagenumber > 3'
|
||||
>>> bus_service.create_rule('mytopic', 'HighMessages', 'HighMessageFilter', rule)
|
||||
True
|
||||
|
||||
>>> bus_service.delete_rule('mytopic', 'HighMessages', DEFAULT_RULE_NAME)
|
||||
True
|
||||
|
||||
>>> bus_service.create_subscription('mytopic', 'LowMessages')
|
||||
True
|
||||
|
||||
>>> rule = Rule()
|
||||
>>> rule.filter_type = 'SqlFilter'
|
||||
>>> rule.filter_expression = 'messagenumber <= 3'
|
||||
>>> bus_service.create_rule('mytopic', 'LowMessages', 'LowMessageFilter', rule)
|
||||
True
|
||||
|
||||
>>> bus_service.delete_rule('mytopic', 'LowMessages', DEFAULT_RULE_NAME)
|
||||
True
|
||||
|
||||
How to Send Messages to a Topic
|
||||
-------------------------------
|
||||
>>> for i in range(5):
|
||||
... msg = Message('Msg ' + str(i), custom_properties={'messagenumber':i})
|
||||
... bus_service.send_topic_message('mytopic', msg)
|
||||
|
||||
How to Receive Messages from a Subscription
|
||||
-------------------------------------------
|
||||
>>> msg = bus_service.receive_subscription_message('mytopic', 'LowMessages')
|
||||
>>> print(msg.body)
|
||||
Msg 0
|
||||
|
||||
>>> msg = bus_service.receive_subscription_message('mytopic', 'LowMessages', peek_lock=True)
|
||||
>>> print(msg.body)
|
||||
Msg 1
|
||||
>>> msg.delete()
|
||||
|
||||
How to Delete Topics and Subscriptions
|
||||
--------------------------------------
|
||||
>>> bus_service.delete_subscription('mytopic', 'HighMessages')
|
||||
True
|
||||
|
||||
>>> bus_service.delete_queue('mytopic')
|
||||
True
|
||||
|
||||
>>> bus_service.delete_queue('mytopic2')
|
||||
True
|
||||
|
||||
"""
|
||||
from azuretest.util import *
|
||||
|
||||
ns = credentials.getServiceBusNamespace()
|
||||
key = credentials.getServiceBusKey()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
|
@ -0,0 +1,116 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
How To: Create a Table
|
||||
----------------------
|
||||
>>> from azure.storage import *
|
||||
>>> table_service = TableService(name, key)
|
||||
>>> table_service.create_table('tasktable')
|
||||
True
|
||||
|
||||
How to Add an Entity to a Table
|
||||
-------------------------------
|
||||
>>> task = {'PartitionKey': 'tasksSeattle', 'RowKey': '1', 'description' : 'Take out the trash', 'priority' : 200}
|
||||
>>> table_service.insert_entity('tasktable', task)
|
||||
|
||||
>>> task = Entity()
|
||||
>>> task.PartitionKey = 'tasksSeattle'
|
||||
>>> task.RowKey = '2'
|
||||
>>> task.description = 'Wash the car'
|
||||
>>> task.priority = 100
|
||||
>>> table_service.insert_entity('tasktable', task)
|
||||
|
||||
How to Update an Entity
|
||||
-----------------------
|
||||
>>> task = {'description' : 'Take out the garbage', 'priority' : 250}
|
||||
>>> table_service.update_entity('tasktable', 'tasksSeattle', '1', task)
|
||||
|
||||
>>> task = {'description' : 'Take out the garbage again', 'priority' : 250}
|
||||
>>> table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '1', task)
|
||||
|
||||
>>> task = {'description' : 'Buy detergent', 'priority' : 300}
|
||||
>>> table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '3', task)
|
||||
|
||||
|
||||
How to Change a Group of Entities
|
||||
---------------------------------
|
||||
>>> task10 = {'PartitionKey': 'tasksSeattle', 'RowKey': '10', 'description' : 'Go grocery shopping', 'priority' : 400}
|
||||
>>> task11 = {'PartitionKey': 'tasksSeattle', 'RowKey': '11', 'description' : 'Clean the bathroom', 'priority' : 100}
|
||||
>>> table_service.begin_batch()
|
||||
>>> table_service.insert_entity('tasktable', task10)
|
||||
>>> table_service.insert_entity('tasktable', task11)
|
||||
>>> table_service.commit_batch()
|
||||
|
||||
How to Query for an Entity
|
||||
--------------------------
|
||||
>>> task = table_service.get_entity('tasktable', 'tasksSeattle', '1')
|
||||
>>> print(task.description)
|
||||
Take out the garbage again
|
||||
>>> print(task.priority)
|
||||
250
|
||||
|
||||
>>> task = table_service.get_entity('tasktable', 'tasksSeattle', '10')
|
||||
>>> print(task.description)
|
||||
Go grocery shopping
|
||||
>>> print(task.priority)
|
||||
400
|
||||
|
||||
How to Query a Set of Entities
|
||||
------------------------------
|
||||
>>> tasks = table_service.query_entities('tasktable', "PartitionKey eq 'tasksSeattle'")
|
||||
>>> for task in tasks:
|
||||
... print(task.description)
|
||||
... print(task.priority)
|
||||
Take out the garbage again
|
||||
250
|
||||
Go grocery shopping
|
||||
400
|
||||
Clean the bathroom
|
||||
100
|
||||
Wash the car
|
||||
100
|
||||
Buy detergent
|
||||
300
|
||||
|
||||
How to Query a Subset of Entity Properties
|
||||
------------------------------------------
|
||||
>>> tasks = table_service.query_entities('tasktable', "PartitionKey eq 'tasksSeattle'", 'description')
|
||||
>>> for task in tasks:
|
||||
... print(task.description)
|
||||
Take out the garbage again
|
||||
Go grocery shopping
|
||||
Clean the bathroom
|
||||
Wash the car
|
||||
Buy detergent
|
||||
|
||||
How to Delete an Entity
|
||||
-----------------------
|
||||
>>> table_service.delete_entity('tasktable', 'tasksSeattle', '1')
|
||||
|
||||
How to Delete a Table
|
||||
---------------------
|
||||
>>> table_service.delete_table('tasktable')
|
||||
True
|
||||
|
||||
"""
|
||||
from azuretest.util import *
|
||||
|
||||
name = credentials.getStorageServicesName()
|
||||
key = credentials.getStorageServicesKey()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
|
@ -14,28 +14,27 @@
|
|||
#--------------------------------------------------------------------------
|
||||
|
||||
from azure.storage.blobservice import *
|
||||
from azure.storage import Metrics, BlockList
|
||||
from azure.storage import Metrics
|
||||
from azure.storage.storageclient import AZURE_STORAGE_ACCESS_KEY, AZURE_STORAGE_ACCOUNT, EMULATED, DEV_ACCOUNT_NAME, DEV_ACCOUNT_KEY
|
||||
from azure import WindowsAzureError
|
||||
from azuretest.util import *
|
||||
from azure.http import HTTPRequest, HTTPResponse
|
||||
|
||||
import unittest
|
||||
import time
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
class BlobServiceTest(unittest.TestCase):
|
||||
class BlobServiceTest(AzureTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.bc = BlobService(account_name=credentials.getStorageServicesName(),
|
||||
account_key=credentials.getStorageServicesKey())
|
||||
|
||||
# TODO: it may be overkill to use the machine name from
|
||||
# getUniqueTestRunID, current time may be unique enough
|
||||
__uid = getUniqueTestRunID()
|
||||
|
||||
container_base_name = u'mytestcontainer%s' % (__uid)
|
||||
|
||||
self.container_name = getUniqueNameBasedOnCurrentTime(container_base_name)
|
||||
self.additional_container_names = []
|
||||
|
||||
def tearDown(self):
|
||||
self.cleanup()
|
||||
|
@ -46,26 +45,12 @@ class BlobServiceTest(unittest.TestCase):
|
|||
self.bc.delete_container(self.container_name)
|
||||
except: pass
|
||||
|
||||
for name in self.additional_container_names:
|
||||
try:
|
||||
self.bc.delete_container(name)
|
||||
except: pass
|
||||
|
||||
#--Helpers-----------------------------------------------------------------
|
||||
|
||||
# TODO: move this function out of here so other tests can use them
|
||||
# TODO: find out how to import/use safe_repr instead repr
|
||||
def assertNamedItemInContainer(self, container, item_name, msg=None):
|
||||
for item in container:
|
||||
if item.name == item_name:
|
||||
return
|
||||
|
||||
standardMsg = '%s not found in %s' % (repr(item_name), repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
# TODO: move this function out of here so other tests can use them
|
||||
# TODO: find out how to import/use safe_repr instead repr
|
||||
def assertNamedItemNotInContainer(self, container, item_name, msg=None):
|
||||
for item in container:
|
||||
if item.name == item_name:
|
||||
standardMsg = '%s unexpectedly found in %s' % (repr(item_name), repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
def _create_container(self, container_name):
|
||||
self.bc.create_container(container_name, None, None, True)
|
||||
|
||||
|
@ -79,6 +64,92 @@ class BlobServiceTest(unittest.TestCase):
|
|||
resp = self.bc.put_blob(self.container_name, blob_name, '', 'PageBlob', x_ms_blob_content_length=str(content_length))
|
||||
self.assertIsNone(resp)
|
||||
|
||||
#--Test cases for blob service --------------------------------------------
|
||||
def test_create_blob_service_missing_arguments(self):
|
||||
# Arrange
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCOUNT):
|
||||
del os.environ[AZURE_STORAGE_ACCOUNT]
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
|
||||
del os.environ[AZURE_STORAGE_ACCESS_KEY]
|
||||
if os.environ.has_key(EMULATED):
|
||||
del os.environ[EMULATED]
|
||||
|
||||
# Act
|
||||
with self.assertRaises(WindowsAzureError):
|
||||
bs = BlobService()
|
||||
|
||||
# Assert
|
||||
|
||||
def test_create_blob_service_env_variables(self):
|
||||
# Arrange
|
||||
os.environ[AZURE_STORAGE_ACCOUNT] = credentials.getStorageServicesName()
|
||||
os.environ[AZURE_STORAGE_ACCESS_KEY] = credentials.getStorageServicesKey()
|
||||
|
||||
# Act
|
||||
bs = BlobService()
|
||||
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCOUNT):
|
||||
del os.environ[AZURE_STORAGE_ACCOUNT]
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
|
||||
del os.environ[AZURE_STORAGE_ACCESS_KEY]
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(bs)
|
||||
self.assertEquals(bs.account_name, credentials.getStorageServicesName())
|
||||
self.assertEquals(bs.account_key, credentials.getStorageServicesKey())
|
||||
self.assertEquals(bs.is_emulated, False)
|
||||
|
||||
def test_create_blob_service_emulated_true(self):
|
||||
# Arrange
|
||||
os.environ[EMULATED] = 'true'
|
||||
|
||||
# Act
|
||||
bs = BlobService()
|
||||
|
||||
if os.environ.has_key(EMULATED):
|
||||
del os.environ[EMULATED]
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(bs)
|
||||
self.assertEquals(bs.account_name, DEV_ACCOUNT_NAME)
|
||||
self.assertEquals(bs.account_key, DEV_ACCOUNT_KEY)
|
||||
self.assertEquals(bs.is_emulated, True)
|
||||
|
||||
def test_create_blob_service_emulated_false(self):
|
||||
# Arrange
|
||||
os.environ[EMULATED] = 'false'
|
||||
|
||||
# Act
|
||||
with self.assertRaises(WindowsAzureError):
|
||||
bs = BlobService()
|
||||
|
||||
if os.environ.has_key(EMULATED):
|
||||
del os.environ[EMULATED]
|
||||
|
||||
# Assert
|
||||
|
||||
def test_create_blob_service_emulated_false_env_variables(self):
|
||||
# Arrange
|
||||
os.environ[EMULATED] = 'false'
|
||||
os.environ[AZURE_STORAGE_ACCOUNT] = credentials.getStorageServicesName()
|
||||
os.environ[AZURE_STORAGE_ACCESS_KEY] = credentials.getStorageServicesKey()
|
||||
|
||||
# Act
|
||||
bs = BlobService()
|
||||
|
||||
if os.environ.has_key(EMULATED):
|
||||
del os.environ[EMULATED]
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCOUNT):
|
||||
del os.environ[AZURE_STORAGE_ACCOUNT]
|
||||
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
|
||||
del os.environ[AZURE_STORAGE_ACCESS_KEY]
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(bs)
|
||||
self.assertEquals(bs.account_name, credentials.getStorageServicesName())
|
||||
self.assertEquals(bs.account_key, credentials.getStorageServicesKey())
|
||||
self.assertEquals(bs.is_emulated, False)
|
||||
|
||||
#--Test cases for containers -----------------------------------------
|
||||
def test_create_container_no_options(self):
|
||||
# Arrange
|
||||
|
@ -98,6 +169,17 @@ class BlobServiceTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_container_with_already_existing_container(self):
|
||||
# Arrange
|
||||
|
||||
# Act
|
||||
created1 = self.bc.create_container(self.container_name)
|
||||
created2 = self.bc.create_container(self.container_name)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created1)
|
||||
self.assertFalse(created2)
|
||||
|
||||
def test_create_container_with_already_existing_container_fail_on_exist(self):
|
||||
# Arrange
|
||||
|
||||
|
@ -155,8 +237,63 @@ class BlobServiceTest(unittest.TestCase):
|
|||
|
||||
# Assert
|
||||
self.assertIsNotNone(containers)
|
||||
self.assertGreaterEqual(len(containers), 1)
|
||||
self.assertIsNotNone(containers[0])
|
||||
self.assertNamedItemInContainer(containers, self.container_name)
|
||||
|
||||
def test_list_containers_with_prefix(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
|
||||
# Act
|
||||
containers = self.bc.list_containers(self.container_name)
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(containers)
|
||||
self.assertEqual(len(containers), 1)
|
||||
self.assertIsNotNone(containers[0])
|
||||
self.assertEqual(containers[0].name, self.container_name)
|
||||
self.assertIsNone(containers[0].metadata);
|
||||
|
||||
def test_list_containers_with_include_metadata(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
resp = self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'})
|
||||
|
||||
# Act
|
||||
containers = self.bc.list_containers(self.container_name, None, None, 'metadata')
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(containers)
|
||||
self.assertGreaterEqual(len(containers), 1)
|
||||
self.assertIsNotNone(containers[0])
|
||||
self.assertNamedItemInContainer(containers, self.container_name)
|
||||
self.assertEqual(containers[0].metadata['hello'], 'world')
|
||||
self.assertEqual(containers[0].metadata['bar'], '43')
|
||||
|
||||
def test_list_containers_with_maxresults_and_marker(self):
|
||||
# Arrange
|
||||
self.additional_container_names = [self.container_name + 'a',
|
||||
self.container_name + 'b',
|
||||
self.container_name + 'c',
|
||||
self.container_name + 'd']
|
||||
for name in self.additional_container_names:
|
||||
self.bc.create_container(name)
|
||||
|
||||
# Act
|
||||
containers1 = self.bc.list_containers(self.container_name, None, 2)
|
||||
containers2 = self.bc.list_containers(self.container_name, containers1.next_marker, 2)
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(containers1)
|
||||
self.assertEqual(len(containers1), 2)
|
||||
self.assertNamedItemInContainer(containers1, self.container_name + 'a')
|
||||
self.assertNamedItemInContainer(containers1, self.container_name + 'b')
|
||||
self.assertIsNotNone(containers2)
|
||||
self.assertEqual(len(containers2), 2)
|
||||
self.assertNamedItemInContainer(containers2, self.container_name + 'c')
|
||||
self.assertNamedItemInContainer(containers2, self.container_name + 'd')
|
||||
|
||||
def test_set_container_metadata(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
|
@ -183,12 +320,19 @@ class BlobServiceTest(unittest.TestCase):
|
|||
def test_get_container_metadata(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
self.bc.set_container_acl(self.container_name, None, 'container')
|
||||
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
|
||||
|
||||
# Act
|
||||
md = self.bc.get_container_metadata(self.container_name)
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(md)
|
||||
self.assertEquals(md['x-ms-meta-hello'], 'world')
|
||||
self.assertEquals(md['x-ms-meta-foo'], '42')
|
||||
# TODO:
|
||||
# get_container_properties returns container lease information whereas get_container_metadata doesn't
|
||||
# we should lease the container in the arrange section and verify that we do not receive that info
|
||||
|
||||
def test_get_container_metadata_with_non_existing_container(self):
|
||||
# Arrange
|
||||
|
@ -202,12 +346,19 @@ class BlobServiceTest(unittest.TestCase):
|
|||
def test_get_container_properties(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
self.bc.set_container_acl(self.container_name, None, 'container')
|
||||
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
|
||||
|
||||
# Act
|
||||
props = self.bc.get_container_properties(self.container_name)
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(props)
|
||||
self.assertEquals(props['x-ms-meta-hello'], 'world')
|
||||
self.assertEquals(props['x-ms-meta-foo'], '42')
|
||||
# TODO:
|
||||
# get_container_properties returns container lease information whereas get_container_metadata doesn't
|
||||
# we should lease the container in the arrange section and verify that we receive that info
|
||||
|
||||
def test_get_container_properties_with_non_existing_container(self):
|
||||
# Arrange
|
||||
|
@ -229,6 +380,20 @@ class BlobServiceTest(unittest.TestCase):
|
|||
self.assertIsNotNone(acl)
|
||||
self.assertEqual(len(acl.signed_identifiers), 0)
|
||||
|
||||
def test_get_container_acl_iter(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
|
||||
# Act
|
||||
acl = self.bc.get_container_acl(self.container_name)
|
||||
for signed_identifier in acl:
|
||||
pass
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(acl)
|
||||
self.assertEqual(len(acl.signed_identifiers), 0)
|
||||
self.assertEqual(len(acl), 0)
|
||||
|
||||
def test_get_container_acl_with_non_existing_container(self):
|
||||
# Arrange
|
||||
|
||||
|
@ -274,6 +439,45 @@ class BlobServiceTest(unittest.TestCase):
|
|||
acl = self.bc.get_container_acl(self.container_name)
|
||||
self.assertIsNotNone(acl)
|
||||
|
||||
def test_set_container_acl_with_empty_signed_identifiers(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
|
||||
# Act
|
||||
identifiers = SignedIdentifiers()
|
||||
|
||||
resp = self.bc.set_container_acl(self.container_name, identifiers)
|
||||
|
||||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
acl = self.bc.get_container_acl(self.container_name)
|
||||
self.assertIsNotNone(acl)
|
||||
self.assertEqual(len(acl.signed_identifiers), 0)
|
||||
|
||||
def test_set_container_acl_with_signed_identifiers(self):
|
||||
# Arrange
|
||||
self.bc.create_container(self.container_name)
|
||||
|
||||
# Act
|
||||
si = SignedIdentifier()
|
||||
si.id = 'testid'
|
||||
si.access_policy.start = '2011-10-11'
|
||||
si.access_policy.expiry = '2011-10-12'
|
||||
si.access_policy.permission = 'r'
|
||||
identifiers = SignedIdentifiers()
|
||||
identifiers.signed_identifiers.append(si)
|
||||
|
||||
resp = self.bc.set_container_acl(self.container_name, identifiers)
|
||||
|
||||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
acl = self.bc.get_container_acl(self.container_name)
|
||||
self.assertIsNotNone(acl)
|
||||
self.assertEqual(len(acl.signed_identifiers), 1)
|
||||
self.assertEqual(len(acl), 1)
|
||||
self.assertEqual(acl.signed_identifiers[0].id, 'testid')
|
||||
self.assertEqual(acl[0].id, 'testid')
|
||||
|
||||
def test_set_container_acl_with_non_existing_container(self):
|
||||
# Arrange
|
||||
|
||||
|
@ -379,8 +583,8 @@ class BlobServiceTest(unittest.TestCase):
|
|||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
|
||||
resp = self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob')
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name)
|
||||
|
@ -389,8 +593,170 @@ class BlobServiceTest(unittest.TestCase):
|
|||
|
||||
# Assert
|
||||
self.assertIsNotNone(blobs)
|
||||
self.assertGreaterEqual(len(blobs), 2)
|
||||
self.assertIsNotNone(blobs[0])
|
||||
self.assertNamedItemInContainer(blobs, 'blob1')
|
||||
self.assertNamedItemInContainer(blobs, 'blob2')
|
||||
self.assertEqual(blobs[0].properties.content_length, 11)
|
||||
self.assertEqual(blobs[1].properties.content_type, 'application/octet-stream Charset=UTF-8')
|
||||
|
||||
def test_list_blobs_with_prefix(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob')
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name, 'bloba')
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(blobs)
|
||||
self.assertEqual(len(blobs), 2)
|
||||
self.assertNamedItemInContainer(blobs, 'bloba1')
|
||||
self.assertNamedItemInContainer(blobs, 'bloba2')
|
||||
|
||||
def test_list_blobs_with_maxresults(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'bloba3', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob')
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name, None, None, 2)
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(blobs)
|
||||
self.assertEqual(len(blobs), 2)
|
||||
self.assertNamedItemInContainer(blobs, 'bloba1')
|
||||
self.assertNamedItemInContainer(blobs, 'bloba2')
|
||||
|
||||
def test_list_blobs_with_maxresults_and_marker(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'bloba3', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob')
|
||||
|
||||
# Act
|
||||
blobs1 = self.bc.list_blobs(self.container_name, None, None, 2)
|
||||
blobs2 = self.bc.list_blobs(self.container_name, None, blobs1.next_marker, 2)
|
||||
|
||||
# Assert
|
||||
self.assertEqual(len(blobs1), 2)
|
||||
self.assertEqual(len(blobs2), 2)
|
||||
self.assertNamedItemInContainer(blobs1, 'bloba1')
|
||||
self.assertNamedItemInContainer(blobs1, 'bloba2')
|
||||
self.assertNamedItemInContainer(blobs2, 'bloba3')
|
||||
self.assertNamedItemInContainer(blobs2, 'blobb1')
|
||||
|
||||
def test_list_blobs_with_include_snapshots(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
|
||||
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob')
|
||||
self.bc.snapshot_blob(self.container_name, 'blob1')
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name, include='snapshots')
|
||||
|
||||
# Assert
|
||||
self.assertEqual(len(blobs), 3)
|
||||
self.assertEqual(blobs[0].name, 'blob1')
|
||||
self.assertNotEqual(blobs[0].snapshot, '')
|
||||
self.assertEqual(blobs[1].name, 'blob1')
|
||||
self.assertEqual(blobs[1].snapshot, '')
|
||||
self.assertEqual(blobs[2].name, 'blob2')
|
||||
self.assertEqual(blobs[2].snapshot, '')
|
||||
|
||||
def test_list_blobs_with_include_metadata(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'foo':'1','bar':'bob'})
|
||||
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'})
|
||||
self.bc.snapshot_blob(self.container_name, 'blob1')
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name, include='metadata')
|
||||
|
||||
# Assert
|
||||
self.assertEqual(len(blobs), 2)
|
||||
self.assertEqual(blobs[0].name, 'blob1')
|
||||
self.assertEqual(blobs[0].metadata['foo'], '1')
|
||||
self.assertEqual(blobs[0].metadata['bar'], 'bob')
|
||||
self.assertEqual(blobs[1].name, 'blob2')
|
||||
self.assertEqual(blobs[1].metadata['foo'], '2')
|
||||
self.assertEqual(blobs[1].metadata['bar'], 'car')
|
||||
|
||||
def test_list_blobs_with_include_uncommittedblobs(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_block(self.container_name, 'blob1', 'AAA', '1')
|
||||
self.bc.put_block(self.container_name, 'blob1', 'BBB', '2')
|
||||
self.bc.put_block(self.container_name, 'blob1', 'CCC', '3')
|
||||
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'})
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name, include='uncommittedblobs')
|
||||
|
||||
# Assert
|
||||
self.assertEqual(len(blobs), 2)
|
||||
self.assertEqual(blobs[0].name, 'blob1')
|
||||
self.assertEqual(blobs[1].name, 'blob2')
|
||||
|
||||
#def test_list_blobs_with_include_copy(self):
|
||||
# # Arrange
|
||||
# self._create_container(self.container_name)
|
||||
# data = 'hello world'
|
||||
# self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'status':'original'})
|
||||
# sourceblob = '/%s/%s/%s' % (credentials.getStorageServicesName(),
|
||||
# self.container_name,
|
||||
# 'blob1')
|
||||
# self.bc.copy_blob(self.container_name, 'blob1copy', sourceblob, {'status':'copy'})
|
||||
|
||||
# # Act
|
||||
# blobs = self.bc.list_blobs(self.container_name, include='copy')
|
||||
|
||||
# # Assert
|
||||
# self.assertEqual(len(blobs), 2)
|
||||
# self.assertEqual(blobs[0].name, 'blob1')
|
||||
# self.assertEqual(blobs[1].name, 'blob2')
|
||||
# #TODO: check for metadata related to copy blob
|
||||
|
||||
def test_list_blobs_with_include_multiple(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
data = 'hello world'
|
||||
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'foo':'1','bar':'bob'})
|
||||
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'})
|
||||
self.bc.snapshot_blob(self.container_name, 'blob1')
|
||||
|
||||
# Act
|
||||
blobs = self.bc.list_blobs(self.container_name, include='snapshots,metadata')
|
||||
|
||||
# Assert
|
||||
self.assertEqual(len(blobs), 3)
|
||||
self.assertEqual(blobs[0].name, 'blob1')
|
||||
self.assertNotEqual(blobs[0].snapshot, '')
|
||||
self.assertEqual(blobs[0].metadata['foo'], '1')
|
||||
self.assertEqual(blobs[0].metadata['bar'], 'bob')
|
||||
self.assertEqual(blobs[1].name, 'blob1')
|
||||
self.assertEqual(blobs[1].snapshot, '')
|
||||
self.assertEqual(blobs[1].metadata['foo'], '1')
|
||||
self.assertEqual(blobs[1].metadata['bar'], 'bob')
|
||||
self.assertEqual(blobs[2].name, 'blob2')
|
||||
self.assertEqual(blobs[2].snapshot, '')
|
||||
self.assertEqual(blobs[2].metadata['foo'], '2')
|
||||
self.assertEqual(blobs[2].metadata['bar'], 'car')
|
||||
|
||||
def test_put_blob_block_blob(self):
|
||||
# Arrange
|
||||
|
@ -413,6 +779,35 @@ class BlobServiceTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
|
||||
def test_put_blob_with_lease_id(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
|
||||
lease_id = lease['x-ms-lease-id']
|
||||
|
||||
# Act
|
||||
data = 'hello world again'
|
||||
resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_lease_id=lease_id)
|
||||
|
||||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_lease_id=lease_id)
|
||||
self.assertEqual(blob, 'hello world again')
|
||||
|
||||
def test_put_blob_with_metadata(self):
|
||||
# Arrange
|
||||
self._create_container(self.container_name)
|
||||
|
||||
# Act
|
||||
data = 'hello world'
|
||||
resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'hello':'world','foo':'42'})
|
||||
|
||||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
md = self.bc.get_blob_metadata(self.container_name, 'blob1')
|
||||
self.assertEquals(md['x-ms-meta-hello'], 'world')
|
||||
self.assertEquals(md['x-ms-meta-foo'], '42')
|
||||
|
||||
def test_get_blob_with_existing_blob(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
|
@ -421,7 +816,84 @@ class BlobServiceTest(unittest.TestCase):
|
|||
blob = self.bc.get_blob(self.container_name, 'blob1')
|
||||
|
||||
# Assert
|
||||
self.assertEqual(type(blob), str)
|
||||
self.assertIsInstance(blob, BlobResult)
|
||||
self.assertEquals(blob, 'hello world')
|
||||
|
||||
def test_get_blob_with_snapshot(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
snapshot = self.bc.snapshot_blob(self.container_name, 'blob1')
|
||||
|
||||
# Act
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1', snapshot['x-ms-snapshot'])
|
||||
|
||||
# Assert
|
||||
self.assertIsInstance(blob, BlobResult)
|
||||
self.assertEquals(blob, 'hello world')
|
||||
|
||||
def test_get_blob_with_snapshot_previous(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
snapshot = self.bc.snapshot_blob(self.container_name, 'blob1')
|
||||
self.bc.put_blob(self.container_name, 'blob1', 'hello world again', 'BlockBlob')
|
||||
|
||||
# Act
|
||||
blob_previous = self.bc.get_blob(self.container_name, 'blob1', snapshot['x-ms-snapshot'])
|
||||
blob_latest = self.bc.get_blob(self.container_name, 'blob1')
|
||||
|
||||
# Assert
|
||||
self.assertIsInstance(blob_previous, BlobResult)
|
||||
self.assertIsInstance(blob_latest, BlobResult)
|
||||
self.assertEquals(blob_previous, 'hello world')
|
||||
self.assertEquals(blob_latest, 'hello world again')
|
||||
|
||||
def test_get_blob_with_range(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
|
||||
# Act
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_range='bytes=0-5')
|
||||
|
||||
# Assert
|
||||
self.assertIsInstance(blob, BlobResult)
|
||||
self.assertEquals(blob, 'hello ')
|
||||
|
||||
def test_get_blob_with_range_and_get_content_md5(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
|
||||
# Act
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_range='bytes=0-5', x_ms_range_get_content_md5='true')
|
||||
|
||||
# Assert
|
||||
self.assertIsInstance(blob, BlobResult)
|
||||
self.assertEquals(blob, 'hello ')
|
||||
self.assertEquals(blob.properties['content-md5'], '+BSJN3e8wilf/wXwDlCNpg==')
|
||||
|
||||
def test_get_blob_with_lease(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
|
||||
lease_id = lease['x-ms-lease-id']
|
||||
|
||||
# Act
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_lease_id=lease_id)
|
||||
self.bc.lease_blob(self.container_name, 'blob1', 'release', lease_id)
|
||||
|
||||
# Assert
|
||||
self.assertIsInstance(blob, BlobResult)
|
||||
self.assertEquals(blob, 'hello world')
|
||||
|
||||
def test_get_blob_on_leased_blob_without_lease_id(self):
|
||||
# Arrange
|
||||
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
|
||||
self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
|
||||
|
||||
# Act
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1') # get_blob is allowed without lease id
|
||||
|
||||
# Assert
|
||||
self.assertIsInstance(blob, BlobResult)
|
||||
self.assertEquals(blob, 'hello world')
|
||||
|
||||
def test_get_blob_with_non_existing_container(self):
|
||||
|
@ -453,7 +925,7 @@ class BlobServiceTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
props = self.bc.get_blob_properties(self.container_name, 'blob1')
|
||||
self.assertEquals(props['Content-Language'], 'spanish')
|
||||
self.assertEquals(props['content-language'], 'spanish')
|
||||
|
||||
def test_set_blob_properties_with_non_existing_container(self):
|
||||
# Arrange
|
||||
|
@ -485,6 +957,7 @@ class BlobServiceTest(unittest.TestCase):
|
|||
self.assertIsNotNone(props)
|
||||
self.assertEquals(props['x-ms-blob-type'], 'BlockBlob')
|
||||
self.assertEquals(props['x-ms-lease-status'], 'unlocked')
|
||||
self.assertEquals(props['content-length'], '11')
|
||||
|
||||
def test_get_blob_properties_with_non_existing_container(self):
|
||||
# Arrange
|
||||
|
@ -571,7 +1044,8 @@ class BlobServiceTest(unittest.TestCase):
|
|||
resp = self.bc.snapshot_blob(self.container_name, 'blob1')
|
||||
|
||||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
self.assertIsNotNone(resp)
|
||||
self.assertIsNotNone(resp['x-ms-snapshot'])
|
||||
|
||||
def test_lease_blob_acquire_and_release(self):
|
||||
# Arrange
|
||||
|
@ -625,6 +1099,8 @@ class BlobServiceTest(unittest.TestCase):
|
|||
|
||||
# Assert
|
||||
self.assertIsNone(resp)
|
||||
blob = self.bc.get_blob(self.container_name, 'blob1')
|
||||
self.assertEqual(blob, 'AAABBBCCC')
|
||||
|
||||
def test_get_block_list_no_blocks(self):
|
||||
# Arrange
|
||||
|
@ -724,6 +1200,23 @@ class BlobServiceTest(unittest.TestCase):
|
|||
self.assertEquals(ranges.page_ranges[1].start, 1024)
|
||||
self.assertEquals(ranges.page_ranges[1].end, 1535)
|
||||
|
||||
def test_get_page_ranges_iter(self):
|
||||
# Arrange
|
||||
self._create_container_and_page_blob(self.container_name, 'blob1', 2048)
|
||||
data = 'abcdefghijklmnop' * 32
|
||||
resp1 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=0-511', 'update')
|
||||
resp2 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=1024-1535', 'update')
|
||||
|
||||
# Act
|
||||
ranges = self.bc.get_page_ranges(self.container_name, 'blob1')
|
||||
for range in ranges:
|
||||
pass
|
||||
|
||||
# Assert
|
||||
self.assertEquals(len(ranges), 2)
|
||||
self.assertIsInstance(ranges[0], PageRange)
|
||||
self.assertIsInstance(ranges[1], PageRange)
|
||||
|
||||
def test_with_filter(self):
|
||||
# Single filter
|
||||
called = []
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
from azure.storage import *
|
||||
from azuretest.util import *
|
||||
|
||||
import unittest
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
class CloudStorageAccountTest(AzureTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.account = CloudStorageAccount(account_name=credentials.getStorageServicesName(),
|
||||
account_key=credentials.getStorageServicesKey())
|
||||
|
||||
#--Test cases --------------------------------------------------------
|
||||
def test_create_blob_service(self):
|
||||
# Arrange
|
||||
|
||||
# Act
|
||||
service = self.account.create_blob_service()
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(service)
|
||||
self.assertIsInstance(service, BlobService)
|
||||
self.assertEqual(service.account_name, credentials.getStorageServicesName())
|
||||
self.assertEqual(service.account_key, credentials.getStorageServicesKey())
|
||||
|
||||
def test_create_blob_service_empty_credentials(self):
|
||||
# Arrange
|
||||
|
||||
# Act
|
||||
bad_account = CloudStorageAccount('', '')
|
||||
with self.assertRaises(WindowsAzureError):
|
||||
service = bad_account.create_blob_service()
|
||||
|
||||
# Assert
|
||||
|
||||
def test_create_table_service(self):
|
||||
# Arrange
|
||||
|
||||
# Act
|
||||
service = self.account.create_table_service()
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(service)
|
||||
self.assertIsInstance(service, TableService)
|
||||
self.assertEqual(service.account_name, credentials.getStorageServicesName())
|
||||
self.assertEqual(service.account_key, credentials.getStorageServicesKey())
|
||||
|
||||
def test_create_queue_service(self):
|
||||
# Arrange
|
||||
|
||||
# Act
|
||||
service = self.account.create_queue_service()
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(service)
|
||||
self.assertIsInstance(service, QueueService)
|
||||
self.assertEqual(service.account_name, credentials.getStorageServicesName())
|
||||
self.assertEqual(service.account_key, credentials.getStorageServicesKey())
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -19,27 +19,25 @@ from azure.storage.queueservice import *
|
|||
from azuretest.util import *
|
||||
|
||||
import unittest
|
||||
import time
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
TEST_QUEUE_PREFIX = 'mytestqueue'
|
||||
#------------------------------------------------------------------------------
|
||||
class QueueServiceTest(unittest.TestCase):
|
||||
class QueueServiceTest(AzureTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.queue_client = QueueService(account_name=credentials.getStorageServicesName(),
|
||||
account_key=credentials.getStorageServicesKey())
|
||||
# TODO: it may be overkill to use the machine name from
|
||||
# getUniqueTestRunID, current time may be unique enough
|
||||
|
||||
__uid = getUniqueTestRunID()
|
||||
|
||||
queue_base_name = u'%s' % (__uid)
|
||||
self.test_queues = []
|
||||
self.creatable_queues = []
|
||||
for i in range(10):
|
||||
self.test_queues.append(TEST_QUEUE_PREFIX + getUniqueNameBasedOnCurrentTime(queue_base_name))
|
||||
self.test_queues.append(TEST_QUEUE_PREFIX + str(i) + getUniqueNameBasedOnCurrentTime(queue_base_name))
|
||||
for i in range(4):
|
||||
self.creatable_queues.append('mycreatablequeue' + getUniqueNameBasedOnCurrentTime(queue_base_name))
|
||||
self.creatable_queues.append('mycreatablequeue' + str(i) + getUniqueNameBasedOnCurrentTime(queue_base_name))
|
||||
for queue_name in self.test_queues:
|
||||
self.queue_client.create_queue(queue_name)
|
||||
|
||||
|
@ -107,6 +105,24 @@ class QueueServiceTest(unittest.TestCase):
|
|||
self.assertIsNotNone(result)
|
||||
self.assertEqual(result['x-ms-approximate-messages-count'], '0')
|
||||
|
||||
def test_create_queue_already_exist(self):
|
||||
#Action
|
||||
created1 = self.queue_client.create_queue(self.creatable_queues[0])
|
||||
created2 = self.queue_client.create_queue(self.creatable_queues[0])
|
||||
|
||||
#Asserts
|
||||
self.assertTrue(created1)
|
||||
self.assertFalse(created2)
|
||||
|
||||
def test_create_queue_fail_on_exist(self):
|
||||
#Action
|
||||
created = self.queue_client.create_queue(self.creatable_queues[0], None, True)
|
||||
with self.assertRaises(WindowsAzureError):
|
||||
self.queue_client.create_queue(self.creatable_queues[0], None, True)
|
||||
|
||||
#Asserts
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_queue_with_options(self):
|
||||
#Action
|
||||
self.queue_client.create_queue(self.creatable_queues[1], x_ms_meta_name_values = {'foo':'test', 'bar':'blah'})
|
||||
|
@ -118,9 +134,34 @@ class QueueServiceTest(unittest.TestCase):
|
|||
self.assertEqual('test', result['x-ms-meta-foo'])
|
||||
self.assertEqual('blah', result['x-ms-meta-bar'])
|
||||
|
||||
def test_delete_queue_not_exist(self):
|
||||
#Action
|
||||
deleted = self.queue_client.delete_queue(self.creatable_queues[0])
|
||||
|
||||
#Asserts
|
||||
self.assertFalse(deleted)
|
||||
|
||||
def test_delete_queue_fail_not_exist_not_exist(self):
|
||||
#Action
|
||||
with self.assertRaises(WindowsAzureError):
|
||||
self.queue_client.delete_queue(self.creatable_queues[0], True)
|
||||
|
||||
#Asserts
|
||||
|
||||
def test_delete_queue_fail_not_exist_already_exist(self):
|
||||
#Action
|
||||
created = self.queue_client.create_queue(self.creatable_queues[0])
|
||||
deleted = self.queue_client.delete_queue(self.creatable_queues[0], True)
|
||||
|
||||
#Asserts
|
||||
self.assertTrue(created)
|
||||
self.assertTrue(deleted)
|
||||
|
||||
def test_list_queues(self):
|
||||
#Action
|
||||
queues = self.queue_client.list_queues()
|
||||
for queue in queues:
|
||||
pass
|
||||
|
||||
#Asserts
|
||||
self.assertIsNotNone(queues)
|
||||
|
@ -172,7 +213,7 @@ class QueueServiceTest(unittest.TestCase):
|
|||
self.queue_client.put_message(self.test_queues[0], 'message3')
|
||||
self.queue_client.put_message(self.test_queues[0], 'message4')
|
||||
|
||||
def test_get_messges(self):
|
||||
def test_get_messages(self):
|
||||
#Action
|
||||
self.queue_client.put_message(self.test_queues[1], 'message1')
|
||||
self.queue_client.put_message(self.test_queues[1], 'message2')
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
#------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# This source code is subject to terms and conditions of the Apache License,
|
||||
# Version 2.0. A copy of the license can be found in the License.html file at
|
||||
# the root of this distribution. If you cannot locate the Apache License,
|
||||
# Version 2.0, please send an email to vspython@microsoft.com. By using this
|
||||
# source code in any fashion, you are agreeing to be bound by the terms of the
|
||||
# Apache License, Version 2.0.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# You must not remove this notice, or any other, from this software.
|
||||
#------------------------------------------------------------------------------
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
from azure import *
|
||||
from azure.servicebus import *
|
||||
|
@ -18,14 +20,12 @@ from azuretest.util import *
|
|||
import unittest
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
class ServiceBusTest(unittest.TestCase):
|
||||
class ServiceBusTest(AzureTestCase):
|
||||
def setUp(self):
|
||||
self.sbs = ServiceBusService(credentials.getServiceBusNamespace(),
|
||||
credentials.getServiceBusKey(),
|
||||
'owner')
|
||||
|
||||
# TODO: it may be overkill to use the machine name from
|
||||
# getUniqueTestRunID, current time may be unique enough
|
||||
__uid = getUniqueTestRunID()
|
||||
|
||||
queue_base_name = u'mytestqueue%s' % (__uid)
|
||||
|
@ -48,25 +48,6 @@ class ServiceBusTest(unittest.TestCase):
|
|||
except: pass
|
||||
|
||||
#--Helpers-----------------------------------------------------------------
|
||||
|
||||
# TODO: move this function out of here so other tests can use them
|
||||
# TODO: find out how to import/use safe_repr instead repr
|
||||
def assertNamedItemInContainer(self, container, item_name, msg=None):
|
||||
for item in container:
|
||||
if item.name == item_name:
|
||||
return
|
||||
|
||||
standardMsg = '%s not found in %s' % (repr(item_name), repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
# TODO: move this function out of here so other tests can use them
|
||||
# TODO: find out how to import/use safe_repr instead repr
|
||||
def assertNamedItemNotInContainer(self, container, item_name, msg=None):
|
||||
for item in container:
|
||||
if item.name == item_name:
|
||||
standardMsg = '%s unexpectedly found in %s' % (repr(item_name), repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
def _create_queue(self, queue_name):
|
||||
self.sbs.create_queue(queue_name, None, True)
|
||||
|
||||
|
@ -84,6 +65,44 @@ class ServiceBusTest(unittest.TestCase):
|
|||
def _create_subscription(self, topic_name, subscription_name):
|
||||
self.sbs.create_subscription(topic_name, subscription_name, None, True)
|
||||
|
||||
#--Test cases for service bus service -------------------------------------
|
||||
def test_create_service_bus_missing_arguments(self):
|
||||
# Arrange
|
||||
if os.environ.has_key(AZURE_SERVICEBUS_NAMESPACE):
|
||||
del os.environ[AZURE_SERVICEBUS_NAMESPACE]
|
||||
if os.environ.has_key(AZURE_SERVICEBUS_ACCESS_KEY):
|
||||
del os.environ[AZURE_SERVICEBUS_ACCESS_KEY]
|
||||
if os.environ.has_key(AZURE_SERVICEBUS_ISSUER):
|
||||
del os.environ[AZURE_SERVICEBUS_ISSUER]
|
||||
|
||||
# Act
|
||||
with self.assertRaises(WindowsAzureError):
|
||||
sbs = ServiceBusService()
|
||||
|
||||
# Assert
|
||||
|
||||
def test_create_service_bus_env_variables(self):
|
||||
# Arrange
|
||||
os.environ[AZURE_SERVICEBUS_NAMESPACE] = credentials.getServiceBusNamespace()
|
||||
os.environ[AZURE_SERVICEBUS_ACCESS_KEY] = credentials.getServiceBusKey()
|
||||
os.environ[AZURE_SERVICEBUS_ISSUER] = 'owner'
|
||||
|
||||
# Act
|
||||
sbs = ServiceBusService()
|
||||
|
||||
if os.environ.has_key(AZURE_SERVICEBUS_NAMESPACE):
|
||||
del os.environ[AZURE_SERVICEBUS_NAMESPACE]
|
||||
if os.environ.has_key(AZURE_SERVICEBUS_ACCESS_KEY):
|
||||
del os.environ[AZURE_SERVICEBUS_ACCESS_KEY]
|
||||
if os.environ.has_key(AZURE_SERVICEBUS_ISSUER):
|
||||
del os.environ[AZURE_SERVICEBUS_ISSUER]
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(sbs)
|
||||
self.assertEquals(sbs.service_namespace, credentials.getServiceBusNamespace())
|
||||
self.assertEquals(sbs.account_key, credentials.getServiceBusKey())
|
||||
self.assertEquals(sbs.issuer, 'owner')
|
||||
|
||||
#--Test cases for queues --------------------------------------------------
|
||||
def test_create_queue_no_options(self):
|
||||
# Arrange
|
||||
|
@ -108,12 +127,33 @@ class ServiceBusTest(unittest.TestCase):
|
|||
|
||||
# Act
|
||||
queue_options = Queue()
|
||||
queue_options.max_size_in_megabytes = 5120
|
||||
queue_options.default_message_time_to_live = 'PT1M'
|
||||
queue_options.duplicate_detection_history_time_window = 'PT5M'
|
||||
queue_options.enable_batched_operations = False
|
||||
queue_options.dead_lettering_on_message_expiration = False
|
||||
queue_options.lock_duration = 'PT1M'
|
||||
queue_options.max_delivery_count = 15
|
||||
queue_options.max_size_in_megabytes = 5120
|
||||
queue_options.message_count = 0
|
||||
queue_options.requires_duplicate_detection = False
|
||||
queue_options.requires_session = False
|
||||
queue_options.size_in_bytes = 0
|
||||
created = self.sbs.create_queue(self.queue_name, queue_options)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
queue = self.sbs.get_queue(self.queue_name)
|
||||
self.assertEquals('PT1M', queue.default_message_time_to_live)
|
||||
self.assertEquals('PT5M', queue.duplicate_detection_history_time_window)
|
||||
self.assertEquals(False, queue.enable_batched_operations)
|
||||
self.assertEquals(False, queue.dead_lettering_on_message_expiration)
|
||||
self.assertEquals('PT1M', queue.lock_duration)
|
||||
self.assertEquals(15, queue.max_delivery_count)
|
||||
self.assertEquals(5120, queue.max_size_in_megabytes)
|
||||
self.assertEquals(0, queue.message_count)
|
||||
self.assertEquals(False, queue.requires_duplicate_detection)
|
||||
self.assertEquals(False, queue.requires_session)
|
||||
self.assertEquals(0, queue.size_in_bytes)
|
||||
|
||||
def test_create_queue_with_already_existing_queue(self):
|
||||
# Arrange
|
||||
|
@ -319,7 +359,14 @@ class ServiceBusTest(unittest.TestCase):
|
|||
self._create_queue(self.queue_name)
|
||||
|
||||
# Act
|
||||
sent_msg = Message('message with properties', custom_properties={'hello':'world', 'foo':42})
|
||||
props = {'hello':'world',
|
||||
'foo':42,
|
||||
'active':True,
|
||||
'deceased':False,
|
||||
'large':8555111000,
|
||||
'floating':3.14,
|
||||
'dob':datetime(2011, 12, 14)}
|
||||
sent_msg = Message('message with properties', custom_properties=props)
|
||||
self.sbs.send_queue_message(self.queue_name, sent_msg)
|
||||
received_msg = self.sbs.receive_queue_message(self.queue_name, True, 5)
|
||||
received_msg.delete()
|
||||
|
@ -327,7 +374,12 @@ class ServiceBusTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertIsNotNone(received_msg)
|
||||
self.assertEquals(received_msg.custom_properties['hello'], 'world')
|
||||
self.assertEquals(received_msg.custom_properties['foo'], '42') # TODO: note that the integer became a string
|
||||
self.assertEquals(received_msg.custom_properties['foo'], 42)
|
||||
self.assertEquals(received_msg.custom_properties['active'], True)
|
||||
self.assertEquals(received_msg.custom_properties['deceased'], False)
|
||||
self.assertEquals(received_msg.custom_properties['large'], 8555111000)
|
||||
self.assertEquals(received_msg.custom_properties['floating'], 3.14)
|
||||
self.assertEquals(received_msg.custom_properties['dob'], datetime(2011, 12, 14))
|
||||
|
||||
#--Test cases for topics/subscriptions ------------------------------------
|
||||
def test_create_topic_no_options(self):
|
||||
|
@ -353,12 +405,24 @@ class ServiceBusTest(unittest.TestCase):
|
|||
|
||||
# Act
|
||||
topic_options = Topic()
|
||||
topic_options.max_size_in_megabytes = '5120'
|
||||
topic_options.default_message_time_to_live = 'PT1M'
|
||||
topic_options.duplicate_detection_history_time_window = 'PT5M'
|
||||
topic_options.enable_batched_operations = False
|
||||
topic_options.max_size_in_megabytes = 5120
|
||||
topic_options.requires_duplicate_detection = False
|
||||
topic_options.size_in_bytes = 0
|
||||
#TODO: MaximumNumberOfSubscriptions is not supported?
|
||||
created = self.sbs.create_topic(self.topic_name, topic_options)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
topic = self.sbs.get_topic(self.topic_name)
|
||||
self.assertEquals('PT1M', topic.default_message_time_to_live)
|
||||
self.assertEquals('PT5M', topic.duplicate_detection_history_time_window)
|
||||
self.assertEquals(False, topic.enable_batched_operations)
|
||||
self.assertEquals(5120, topic.max_size_in_megabytes)
|
||||
self.assertEquals(False, topic.requires_duplicate_detection)
|
||||
self.assertEquals(0, topic.size_in_bytes)
|
||||
|
||||
def test_create_topic_with_already_existing_topic(self):
|
||||
# Arrange
|
||||
|
@ -382,6 +446,23 @@ class ServiceBusTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_topic_backwards_compatibility_warning(self):
|
||||
# Arrange
|
||||
topic_options = Topic()
|
||||
topic_options.max_size_in_megabytes = 5120
|
||||
|
||||
# Act
|
||||
val = topic_options.max_size_in_mega_bytes
|
||||
|
||||
# Assert
|
||||
self.assertEqual(val, 5120)
|
||||
|
||||
# Act
|
||||
topic_options.max_size_in_mega_bytes = 1024
|
||||
|
||||
# Assert
|
||||
self.assertEqual(topic_options.max_size_in_megabytes, 1024)
|
||||
|
||||
def test_get_topic_with_existing_topic(self):
|
||||
# Arrange
|
||||
self._create_topic(self.topic_name)
|
||||
|
@ -467,6 +548,35 @@ class ServiceBusTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_subscription_with_options(self):
|
||||
# Arrange
|
||||
self._create_topic(self.topic_name)
|
||||
|
||||
# Act
|
||||
subscription_options = Subscription()
|
||||
subscription_options.dead_lettering_on_filter_evaluation_exceptions = False
|
||||
subscription_options.dead_lettering_on_message_expiration = False
|
||||
subscription_options.default_message_time_to_live = 'PT15M'
|
||||
subscription_options.enable_batched_operations = False
|
||||
subscription_options.lock_duration = 'PT1M'
|
||||
subscription_options.max_delivery_count = 15
|
||||
#message_count is read-only
|
||||
subscription_options.message_count = 0
|
||||
subscription_options.requires_session = False
|
||||
created = self.sbs.create_subscription(self.topic_name, 'MySubscription', subscription_options)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
subscription = self.sbs.get_subscription(self.topic_name, 'MySubscription')
|
||||
self.assertEquals(False, subscription.dead_lettering_on_filter_evaluation_exceptions)
|
||||
self.assertEquals(False, subscription.dead_lettering_on_message_expiration)
|
||||
self.assertEquals('PT15M', subscription.default_message_time_to_live)
|
||||
self.assertEquals(False, subscription.enable_batched_operations)
|
||||
self.assertEquals('PT1M', subscription.lock_duration)
|
||||
#self.assertEquals(15, subscription.max_delivery_count) #no idea why max_delivery_count is always 10
|
||||
self.assertEquals(0, subscription.message_count)
|
||||
self.assertEquals(False, subscription.requires_session)
|
||||
|
||||
def test_create_subscription_fail_on_exist(self):
|
||||
# Arrange
|
||||
self._create_topic(self.topic_name)
|
||||
|
@ -630,7 +740,7 @@ class ServiceBusTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_rule_with_options(self):
|
||||
def test_create_rule_with_options_sql_filter(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
||||
|
@ -643,6 +753,71 @@ class ServiceBusTest(unittest.TestCase):
|
|||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_rule_with_options_true_filter(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
||||
# Act
|
||||
rule1 = Rule()
|
||||
rule1.filter_type = 'TrueFilter'
|
||||
rule1.filter_expression = '1=1'
|
||||
created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_rule_with_options_false_filter(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
||||
# Act
|
||||
rule1 = Rule()
|
||||
rule1.filter_type = 'FalseFilter'
|
||||
rule1.filter_expression = '1=0'
|
||||
created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_rule_with_options_correlation_filter(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
||||
# Act
|
||||
rule1 = Rule()
|
||||
rule1.filter_type = 'CorrelationFilter'
|
||||
rule1.filter_expression = 'myid'
|
||||
created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_rule_with_options_empty_rule_action(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
||||
# Act
|
||||
rule1 = Rule()
|
||||
rule1.action_type = 'EmptyRuleAction'
|
||||
rule1.action_expression = ''
|
||||
created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_create_rule_with_options_sql_rule_action(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
||||
# Act
|
||||
rule1 = Rule()
|
||||
rule1.action_type = 'SqlRuleAction'
|
||||
rule1.action_expression = "SET foo = 5"
|
||||
created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1)
|
||||
|
||||
# Assert
|
||||
self.assertTrue(created)
|
||||
|
||||
def test_list_rules(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
@ -675,6 +850,27 @@ class ServiceBusTest(unittest.TestCase):
|
|||
|
||||
# Assert
|
||||
|
||||
def test_get_rule_with_existing_rule_with_options(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
sent_rule = Rule()
|
||||
sent_rule.filter_type = 'SqlFilter'
|
||||
sent_rule.filter_expression = 'foo > 40'
|
||||
sent_rule.action_type = 'SqlRuleAction'
|
||||
sent_rule.action_expression = 'SET foo = 5'
|
||||
self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', sent_rule)
|
||||
|
||||
# Act
|
||||
received_rule = self.sbs.get_rule(self.topic_name, 'MySubscription', 'MyRule1')
|
||||
|
||||
# Assert
|
||||
self.assertIsNotNone(received_rule)
|
||||
self.assertEquals(received_rule.name, 'MyRule1')
|
||||
self.assertEquals(received_rule.filter_type, sent_rule.filter_type)
|
||||
self.assertEquals(received_rule.filter_expression, sent_rule.filter_expression)
|
||||
self.assertEquals(received_rule.action_type, sent_rule.action_type)
|
||||
self.assertEquals(received_rule.action_expression, sent_rule.action_expression)
|
||||
|
||||
def test_delete_rule_with_existing_rule(self):
|
||||
# Arrange
|
||||
self._create_topic_and_subscription(self.topic_name, 'MySubscription')
|
||||
|
|
|
@ -0,0 +1,137 @@
|
|||
#-------------------------------------------------------------------------
|
||||
# Copyright 2011 Microsoft Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
from azure import DEV_ACCOUNT_NAME, DEV_ACCOUNT_KEY
|
||||
from azure.storage.sharedaccesssignature import (SharedAccessSignature,
|
||||
SharedAccessPolicy,
|
||||
Permission,
|
||||
WebResource)
|
||||
from azure.storage import AccessPolicy
|
||||
from azuretest.util import AzureTestCase
|
||||
|
||||
import unittest
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
SIGNED_START = 'st'
|
||||
SIGNED_EXPIRY = 'se'
|
||||
SIGNED_RESOURCE = 'sr'
|
||||
SIGNED_PERMISSION = 'sp'
|
||||
SIGNED_IDENTIFIER = 'si'
|
||||
SIGNED_SIGNATURE = 'sig'
|
||||
RESOURCE_BLOB = 'blob'
|
||||
RESOURCE_CONTAINER = 'container'
|
||||
SIGNED_RESOURCE_TYPE = 'resource'
|
||||
SHARED_ACCESS_PERMISSION = 'permission'
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
class SharedAccessSignatureTest(AzureTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.sas = SharedAccessSignature(account_name=DEV_ACCOUNT_NAME,
|
||||
account_key=DEV_ACCOUNT_KEY)
|
||||
def tearDown(self):
|
||||
return super(SharedAccessSignatureTest, self).tearDown()
|
||||
|
||||
def test_generate_signature_container(self):
|
||||
accss_plcy = AccessPolicy()
|
||||
accss_plcy.start = '2011-10-11'
|
||||
accss_plcy.expiry = '2011-10-12'
|
||||
accss_plcy.permission = 'r'
|
||||
signed_identifier = 'YWJjZGVmZw=='
|
||||
sap = SharedAccessPolicy(accss_plcy, signed_identifier)
|
||||
signature = self.sas._generate_signature('images',
|
||||
RESOURCE_CONTAINER,
|
||||
sap)
|
||||
self.assertEqual(signature,
|
||||
'VdlALM4TYEYYNf94Bvt3dn48TsA01wk45ltwP3zeKp4=')
|
||||
|
||||
def test_generate_signature_blob(self):
|
||||
accss_plcy = AccessPolicy()
|
||||
accss_plcy.start = '2011-10-11T11:03:40Z'
|
||||
accss_plcy.expiry = '2011-10-12T11:53:40Z'
|
||||
accss_plcy.permission = 'r'
|
||||
sap = SharedAccessPolicy(accss_plcy)
|
||||
|
||||
signature = self.sas._generate_signature('images/pic1.png',
|
||||
RESOURCE_BLOB,
|
||||
sap)
|
||||
self.assertEqual(signature,
|
||||
'7NIEip+VOrQ5ZV80pORPK1MOsJc62wwCNcbMvE+lQ0s=')
|
||||
|
||||
def test_blob_signed_query_string(self):
|
||||
accss_plcy = AccessPolicy()
|
||||
accss_plcy.start = '2011-10-11'
|
||||
accss_plcy.expiry = '2011-10-12'
|
||||
accss_plcy.permission = 'w'
|
||||
sap = SharedAccessPolicy(accss_plcy)
|
||||
qry_str = self.sas.generate_signed_query_string('images/pic1.png',
|
||||
RESOURCE_BLOB,
|
||||
sap)
|
||||
self.assertEqual(qry_str[SIGNED_START], '2011-10-11')
|
||||
self.assertEqual(qry_str[SIGNED_EXPIRY], '2011-10-12')
|
||||
self.assertEqual(qry_str[SIGNED_RESOURCE], RESOURCE_BLOB)
|
||||
self.assertEqual(qry_str[SIGNED_PERMISSION], 'w')
|
||||
self.assertEqual(qry_str[SIGNED_SIGNATURE],
|
||||
'k8uyTrn3pgLXuhwgZhxeAH6mZ/es9k2vqHPJEuIH4CE=')
|
||||
|
||||
def test_container_signed_query_string(self):
|
||||
accss_plcy = AccessPolicy()
|
||||
accss_plcy.start = '2011-10-11'
|
||||
accss_plcy.expiry = '2011-10-12'
|
||||
accss_plcy.permission = 'r'
|
||||
signed_identifier = 'YWJjZGVmZw=='
|
||||
sap = SharedAccessPolicy(accss_plcy, signed_identifier)
|
||||
qry_str = self.sas.generate_signed_query_string('images',
|
||||
RESOURCE_CONTAINER,
|
||||
sap)
|
||||
self.assertEqual(qry_str[SIGNED_START], '2011-10-11')
|
||||
self.assertEqual(qry_str[SIGNED_EXPIRY], '2011-10-12')
|
||||
self.assertEqual(qry_str[SIGNED_RESOURCE], RESOURCE_CONTAINER)
|
||||
self.assertEqual(qry_str[SIGNED_PERMISSION], 'r')
|
||||
self.assertEqual(qry_str[SIGNED_IDENTIFIER], 'YWJjZGVmZw==')
|
||||
self.assertEqual(qry_str[SIGNED_SIGNATURE],
|
||||
'VdlALM4TYEYYNf94Bvt3dn48TsA01wk45ltwP3zeKp4=')
|
||||
|
||||
def test_sign_request(self):
|
||||
accss_plcy = AccessPolicy()
|
||||
accss_plcy.start = '2011-10-11'
|
||||
accss_plcy.expiry = '2011-10-12'
|
||||
accss_plcy.permission = 'r'
|
||||
sap = SharedAccessPolicy(accss_plcy)
|
||||
qry_str = self.sas.generate_signed_query_string('images/pic1.png',
|
||||
RESOURCE_BLOB,
|
||||
sap)
|
||||
|
||||
permission = Permission()
|
||||
permission.path = '/images/pic1.png'
|
||||
permission.query_string = qry_str
|
||||
self.sas.permission_set = [permission]
|
||||
|
||||
web_rsrc = WebResource()
|
||||
web_rsrc.properties[SIGNED_RESOURCE_TYPE] = RESOURCE_BLOB
|
||||
web_rsrc.properties[SHARED_ACCESS_PERMISSION] = 'r'
|
||||
web_rsrc.path = '/images/pic1.png?comp=metadata'
|
||||
web_rsrc.request_url = '/images/pic1.png?comp=metadata'
|
||||
|
||||
web_rsrc = self.sas.sign_request(web_rsrc)
|
||||
|
||||
self.assertEqual(web_rsrc.request_url,
|
||||
'/images/pic1.png?comp=metadata&' +
|
||||
self.sas._convert_query_string(qry_str))
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -14,6 +14,7 @@
|
|||
import json
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
from exceptions import EnvironmentError
|
||||
|
||||
STATUS_OK = 200
|
||||
|
@ -60,9 +61,6 @@ class Credentials(object):
|
|||
def getStorageServicesName(self):
|
||||
return self.ns[u'storageservicesname']
|
||||
|
||||
def getHostServiceID(self):
|
||||
return self.ns[u'hostserviceid']
|
||||
|
||||
credentials = Credentials()
|
||||
|
||||
def getUniqueTestRunID():
|
||||
|
@ -83,7 +81,8 @@ def getUniqueTestRunID():
|
|||
for bad in ["-", "_", " ", "."]:
|
||||
ret_val = ret_val.replace(bad, "")
|
||||
ret_val = ret_val.lower().strip()
|
||||
return ret_val
|
||||
#only return the first 20 characters so the lenghth of queue, table name will be less than 64. It may not be unique but doesn't really matter for the tests.
|
||||
return ret_val[:20]
|
||||
|
||||
def getUniqueNameBasedOnCurrentTime(base_name):
|
||||
'''
|
||||
|
@ -96,3 +95,18 @@ def getUniqueNameBasedOnCurrentTime(base_name):
|
|||
cur_time = cur_time.replace(bad, "")
|
||||
cur_time = cur_time.lower().strip()
|
||||
return base_name + cur_time
|
||||
|
||||
class AzureTestCase(unittest.TestCase):
|
||||
def assertNamedItemInContainer(self, container, item_name, msg=None):
|
||||
for item in container:
|
||||
if item.name == item_name:
|
||||
return
|
||||
|
||||
standardMsg = '%s not found in %s' % (repr(item_name), repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
def assertNamedItemNotInContainer(self, container, item_name, msg=None):
|
||||
for item in container:
|
||||
if item.name == item_name:
|
||||
standardMsg = '%s unexpectedly found in %s' % (repr(item_name), repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
|
Загрузка…
Ссылка в новой задаче