diff --git libcloud/common/azure.py libcloud/common/azure.py
new file mode 100644
index 0000000..1441a07
--- /dev/null
+++ libcloud/common/azure.py
@@ -0,0 +1,186 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import time
+import base64
+import hmac
+
+from hashlib import sha256
+
+from libcloud.utils.py3 import PY3
+from libcloud.utils.py3 import httplib
+from libcloud.utils.py3 import b
+from libcloud.utils.xml import fixxpath, findtext
+from xml.etree          import ElementTree
+
+from libcloud.common.types import InvalidCredsError
+from libcloud.common.types import LibcloudError, MalformedResponseError
+from libcloud.common.base import ConnectionUserAndKey, RawResponse
+from libcloud.common.base import XmlResponse
+
+# Azure API version
+API_VERSION = '2012-02-12'
+
+# The time format for headers in Azure requests
+AZURE_TIME_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
+
+
+class AzureResponse(XmlResponse):
+
+    valid_response_codes = [httplib.NOT_FOUND, httplib.CONFLICT,
+                            httplib.BAD_REQUEST]
+
+    def success(self):
+        i = int(self.status)
+        return i >= 200 and i <= 299 or i in self.valid_response_codes
+
+    def parse_error(self, msg=None):
+        error_msg = 'Unknown error'
+
+        try:
+            # Azure does give some meaningful errors, but is inconsistent
+            # Some APIs respond with an XML error. Others just dump HTML
+            body = self.parse_body()
+
+            if type(body) == ElementTree.Element:
+                code = body.findtext(fixxpath(xpath='Code'))
+                message = body.findtext(fixxpath(xpath='Message'))
+                message = message.split('\n')[0]
+                error_msg = '%s: %s' % (code, message)
+
+        except MalformedResponseError:
+            pass
+
+        if msg:
+            error_msg = '%s - %s' % (msg, error_msg)
+
+        if self.status in [httplib.UNAUTHORIZED, httplib.FORBIDDEN]:
+            raise InvalidCredsError(error_msg)
+
+        raise LibcloudError('%s Status code: %d.' % (error_msg, self.status),
+                            driver=self)
+
+
+class AzureRawResponse(RawResponse):
+    pass
+
+
+class AzureConnection(ConnectionUserAndKey):
+    """
+    Represents a single connection to Azure
+    """
+
+    responseCls = AzureResponse
+    rawResponseCls = AzureRawResponse
+
+    def add_default_params(self, params):
+        return params
+
+    def pre_connect_hook(self, params, headers):
+        headers = copy.deepcopy(headers)
+
+        # We have to add a date header in GMT
+        headers['x-ms-date'] = time.strftime(AZURE_TIME_FORMAT, time.gmtime())
+        headers['x-ms-version'] = API_VERSION
+
+        # Add the authorization header
+        headers['Authorization'] = self._get_azure_auth_signature(
+            method=self.method, headers=headers, params=params,
+            account=self.user_id, secret_key=self.key, path=self.action)
+
+        # Azure cribs about this in 'raw' connections
+        headers.pop('Host', None)
+
+        return params, headers
+
+    def _get_azure_auth_signature(self, method, headers, params,
+                                  account, secret_key, path='/'):
+        """
+        Signature = Base64( HMAC-SHA1( YourSecretAccessKeyID,
+                            UTF-8-Encoding-Of( StringToSign ) ) ) );
+
+        StringToSign = HTTP-VERB + "\n" +
+            Content-Encoding + "\n" +
+            Content-Language + "\n" +
+            Content-Length + "\n" +
+            Content-MD5 + "\n" +
+            Content-Type + "\n" +
+            Date + "\n" +
+            If-Modified-Since + "\n" +
+            If-Match + "\n" +
+            If-None-Match + "\n" +
+            If-Unmodified-Since + "\n" +
+            Range + "\n" +
+            CanonicalizedHeaders +
+            CanonicalizedResource;
+        """
+        special_header_values = []
+        xms_header_values = []
+        param_list = []
+        special_header_keys = ['content-encoding', 'content-language',
+                               'content-length', 'content-md5',
+                               'content-type', 'date', 'if-modified-since',
+                               'if-match', 'if-none-match',
+                               'if-unmodified-since', 'range']
+
+        # Split the x-ms headers and normal headers and make everything
+        # lower case
+        headers_copy = {}
+        for header, value in headers.items():
+            header = header.lower()
+            value = str(value).strip()
+            if header.startswith('x-ms-'):
+                xms_header_values.append((header, value))
+            else:
+                headers_copy[header] = value
+
+        # Get the values for the headers in the specific order
+        for header in special_header_keys:
+            header = header.lower()  # Just for safety
+            if header in headers_copy:
+                special_header_values.append(headers_copy[header])
+            else:
+                special_header_values.append('')
+
+        # Prepare the first section of the string to be signed
+        values_to_sign = [method] + special_header_values
+        # string_to_sign = '\n'.join([method] + special_header_values)
+
+        # The x-ms-* headers have to be in lower case and sorted
+        xms_header_values.sort()
+
+        for header, value in xms_header_values:
+            values_to_sign.append('%s:%s' % (header, value))
+
+        # Add the canonicalized path
+        values_to_sign.append('/%s%s' % (account, path))
+
+        # URL query parameters (sorted and lower case)
+        for key, value in params.items():
+            param_list.append((key.lower(), str(value).strip()))
+
+        param_list.sort()
+
+        for key, value in param_list:
+            values_to_sign.append('%s:%s' % (key, value))
+
+        string_to_sign = b('\n'.join(values_to_sign))
+        secret_key = b(secret_key)
+        b64_hmac = base64.b64encode(
+            hmac.new(secret_key, string_to_sign, digestmod=sha256).digest()
+        )
+
+        return 'SharedKey %s:%s' % (self.user_id, b64_hmac.decode('utf-8'))
diff --git libcloud/storage/base.py libcloud/storage/base.py
index b45d027..f0e4e3a 100644
--- libcloud/storage/base.py
+++ libcloud/storage/base.py
@@ -134,13 +134,14 @@ class Container(object):
         return self.driver.get_object(container_name=self.name,
                                       object_name=object_name)
 
-    def upload_object(self, file_path, object_name, extra=None):
+    def upload_object(self, file_path, object_name, extra=None, **kwargs):
         return self.driver.upload_object(
-            file_path, self, object_name, extra)
+            file_path, self, object_name, extra=extra, **kwargs)
 
-    def upload_object_via_stream(self, iterator, object_name, extra=None):
+    def upload_object_via_stream(self, iterator, object_name, extra=None,
+                                 **kwargs):
         return self.driver.upload_object_via_stream(
-            iterator, self, object_name, extra)
+            iterator, self, object_name, extra=extra, **kwargs)
 
     def download_object(self, obj, destination_path, overwrite_existing=False,
                         delete_on_failure=True):
@@ -615,7 +616,7 @@ class StorageDriver(BaseDriver):
             file_size = os.path.getsize(file_path)
             upload_func_kwargs['chunked'] = False
 
-        if file_size is not None:
+        if file_size is not None and 'Content-Length' not in headers:
             headers['Content-Length'] = file_size
 
         headers['Content-Type'] = content_type
@@ -676,7 +677,7 @@ class StorageDriver(BaseDriver):
         return True, data_hash, bytes_transferred
 
     def _stream_data(self, response, iterator, chunked=False,
-                     calculate_hash=True, chunk_size=None):
+                     calculate_hash=True, chunk_size=None, data=None):
         """
         Stream a data over an http connection.
 
diff --git libcloud/storage/drivers/azure_blobs.py libcloud/storage/drivers/azure_blobs.py
new file mode 100644
index 0000000..61360ae
--- /dev/null
+++ libcloud/storage/drivers/azure_blobs.py
@@ -0,0 +1,979 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+import base64
+import hmac
+import re
+import os
+import binascii
+
+from hashlib import sha256
+from xml.etree.ElementTree import Element, SubElement
+
+from libcloud.utils.py3 import PY3
+from libcloud.utils.py3 import httplib
+from libcloud.utils.py3 import urlquote
+from libcloud.utils.py3 import tostring
+from libcloud.utils.py3 import b
+
+from libcloud.utils.xml import fixxpath, findtext
+from libcloud.utils.files import read_in_chunks
+from libcloud.common.types import LibcloudError
+from libcloud.common.azure import AzureConnection
+
+from libcloud.storage.base import Object, Container, StorageDriver
+from libcloud.storage.types import ContainerIsNotEmptyError
+from libcloud.storage.types import ContainerAlreadyExistsError
+from libcloud.storage.types import InvalidContainerNameError
+from libcloud.storage.types import ContainerDoesNotExistError
+from libcloud.storage.types import ObjectDoesNotExistError
+from libcloud.storage.types import ObjectHashMismatchError
+
+if PY3:
+    from io import FileIO as file
+
+# Desired number of items in each response inside a paginated request
+RESPONSES_PER_REQUEST = 100
+
+# As per the Azure documentation, if the upload file size is less than
+# 64MB, we can upload it in a single request. However, in real life azure
+# servers seem to disconnect randomly after around 5 MB or 200s of upload.
+# So, it is better that for file sizes greater than 4MB, we upload it in
+# chunks.
+# Also, with large sizes, if we use a lease, the lease will timeout after
+# 60 seconds, but the upload might still be in progress. This can be
+# handled in code, but if we use chunked uploads, the lease renewal will
+# happen automatically.
+AZURE_BLOCK_MAX_SIZE = 4 * 1024 * 1024
+
+# Azure block blocks must be maximum 4MB
+# Azure page blobs must be aligned in 512 byte boundaries (4MB fits that)
+AZURE_CHUNK_SIZE = 4 * 1024 * 1024
+
+# Azure page blob must be aligned in 512 byte boundaries
+AZURE_PAGE_CHUNK_SIZE = 512
+
+# The time period (in seconds) for which a lease must be obtained.
+# If set as -1, we get an infinite lease, but that is a bad idea. If
+# after getting an infinite lease, there was an issue in releasing the
+# lease, the object will remain 'locked' forever, unless the lease is
+# released using the lease_id (which is not exposed to the user)
+AZURE_LEASE_PERIOD = 60
+
+
+class AzureBlobLease(object):
+    """
+    A class to help in leasing an azure blob and renewing the lease
+    """
+    def __init__(self, driver, object_path, use_lease):
+        """
+        @param driver: The Azure storage driver that is being used
+        @type driver: L{AzureStorageDriver}
+
+        @param object_path: The path of the object we need to lease
+        @type object_path: C{str}
+
+        @param use_lease: Indicates if we must take a lease or not
+        @type use_lease: C{bool}
+        """
+        self.object_path = object_path
+        self.driver = driver
+        self.use_lease = use_lease
+        self.lease_id = None
+        self.params = {'comp': 'lease'}
+
+    def renew(self):
+        """
+        Renew the lease if it is older than a predefined time period
+        """
+        if self.lease_id is None:
+            return
+
+        headers = {'x-ms-lease-action': 'renew',
+                   'x-ms-lease-id': self.lease_id,
+                   'x-ms-lease-duration': '60'}
+
+        response = self.driver.connection.request(self.object_path,
+                                                  headers=headers,
+                                                  params=self.params,
+                                                  method='PUT')
+
+        if response.status != httplib.OK:
+            raise LibcloudError('Unable to obtain lease', driver=self)
+
+    def update_headers(self, headers):
+        """
+        Update the lease id in the headers
+        """
+        if self.lease_id:
+            headers['x-ms-lease-id'] = self.lease_id
+
+    def __enter__(self):
+        if not self.use_lease:
+            return self
+
+        headers = {'x-ms-lease-action': 'acquire',
+                   'x-ms-lease-duration': '60'}
+
+        response = self.driver.connection.request(self.object_path,
+                                                  headers=headers,
+                                                  params=self.params,
+                                                  method='PUT')
+
+        if response.status == httplib.NOT_FOUND:
+            return self
+        elif response.status != httplib.CREATED:
+            raise LibcloudError('Unable to obtain lease', driver=self)
+
+        self.lease_id = response.headers['x-ms-lease-id']
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if self.lease_id is None:
+            return
+
+        headers = {'x-ms-lease-action': 'release',
+                   'x-ms-lease-id': self.lease_id}
+        response = self.driver.connection.request(self.object_path,
+                                                  headers=headers,
+                                                  params=self.params,
+                                                  method='PUT')
+
+        if response.status != httplib.OK:
+            raise LibcloudError('Unable to release lease', driver=self)
+
+
+class AzureBlobsConnection(AzureConnection):
+    """
+    Represents a single connection to Azure Blobs
+    """
+    host = 'blob.core.windows.net'
+
+
+class AzureBlobsStorageDriver(StorageDriver):
+    name = 'Microsoft Azure (blobs)'
+    website = 'http://windows.azure.com/'
+    connectionCls = AzureBlobsConnection
+    hash_type = 'md5'
+    supports_chunked_encoding = False
+    ex_blob_type = 'BlockBlob'
+
+    def __init__(self, key, secret=None, secure=True, host=None, port=None,
+                 **kwargs):
+
+        # The hostname must be 'account.blobs.core.windows.net'
+        self.connectionCls.host = '%s.%s' % (key, self.connectionCls.host)
+
+        # B64decode() this key and keep it, so that we don't have to do
+        # so for every request. Minor performance improvement
+        secret = base64.b64decode(b(secret))
+
+        super(AzureBlobsStorageDriver, self).__init__(
+                                        key=key, secret=secret,
+                                        secure=secure, host=host,
+                                        port=port, **kwargs)
+
+    def _xml_to_container(self, node):
+        """
+        Converts a container XML node to a container instance
+
+        @param node: XML info of the container
+        @type node: L{xml.etree.ElementTree.Element}
+
+        @return: A container instance
+        @rtype: L{Container}
+        """
+
+        name = node.findtext(fixxpath(xpath='Name'))
+        props = node.find(fixxpath(xpath='Properties'))
+        metadata = node.find(fixxpath(xpath='Metadata'))
+
+        extra = {
+            'url': node.findtext(fixxpath(xpath='Url')),
+            'last_modified': node.findtext(fixxpath(xpath='Last-Modified')),
+            'etag': props.findtext(fixxpath(xpath='Etag')),
+            'lease': {
+                'status': props.findtext(fixxpath(xpath='LeaseStatus')),
+                'state': props.findtext(fixxpath(xpath='LeaseState')),
+                'duration': props.findtext(fixxpath(xpath='LeaseDuration')),
+            },
+            'meta_data': {}
+        }
+
+        for meta in metadata.getchildren():
+            extra['meta_data'][meta.tag] = meta.text
+
+        return Container(name=name, extra=extra, driver=self)
+
+    def _response_to_container(self, container_name, response):
+        """
+        Converts a HTTP response to a container instance
+
+        @param container_name: Name of the container
+        @type container_name: C{str}
+
+        @param response: HTTP Response
+        @type node: L{}
+
+        @return: A container instance
+        @rtype: L{Container}
+        """
+
+        headers = response.headers
+        extra = {
+            'url': 'http://%s%s' % (response.connection.host,
+                                    response.connection.action),
+            'etag': headers['etag'],
+            'last_modified': headers['last-modified'],
+            'lease': {
+                'status': headers.get('x-ms-lease-status', None),
+                'state': headers.get('x-ms-lease-state', None),
+                'duration': headers.get('x-ms-lease-duration', None),
+            },
+            'meta_data': {}
+        }
+
+        for key, value in response.headers.items():
+            if key.startswith('x-ms-meta-'):
+                key = key.split('x-ms-meta-')[1]
+                extra['meta_data'][key] = value
+
+        return Container(name=container_name, extra=extra, driver=self)
+
+    def _xml_to_object(self, container, blob):
+        """
+        Converts a BLOB XML node to an object instance
+
+        @param container: Instance of the container holding the blob
+        @type: L{Container}
+
+        @param blob: XML info of the blob
+        @type blob: L{}
+
+        @return: An object instance
+        @rtype: L{Object}
+        """
+
+        name = blob.findtext(fixxpath(xpath='Name'))
+        props = blob.find(fixxpath(xpath='Properties'))
+        metadata = blob.find(fixxpath(xpath='Metadata'))
+        etag = props.findtext(fixxpath(xpath='Etag'))
+        size = int(props.findtext(fixxpath(xpath='Content-Length')))
+
+        extra = {
+            'content_type': props.findtext(fixxpath(xpath='Content-Type')),
+            'etag': etag,
+            'md5_hash': props.findtext(fixxpath(xpath='Content-MD5')),
+            'last_modified': props.findtext(fixxpath(xpath='Last-Modified')),
+            'url': blob.findtext(fixxpath(xpath='Url')),
+            'hash': props.findtext(fixxpath(xpath='Etag')),
+            'lease': {
+                'status': props.findtext(fixxpath(xpath='LeaseStatus')),
+                'state': props.findtext(fixxpath(xpath='LeaseState')),
+                'duration': props.findtext(fixxpath(xpath='LeaseDuration')),
+            },
+            'content_encoding': props.findtext(fixxpath(
+                                             xpath='Content-Encoding')),
+            'content_language': props.findtext(fixxpath(
+                                             xpath='Content-Language')),
+            'blob_type': props.findtext(fixxpath(xpath='BlobType'))
+        }
+
+        if extra['md5_hash']:
+            extra['md5_hash'] = binascii.hexlify(
+                            base64.b64decode(b(extra['md5_hash'])))
+
+        meta_data = {}
+        for meta in metadata.getchildren():
+            meta_data[meta.tag] = meta.text
+
+        return Object(name=name, size=size, hash=etag, meta_data=meta_data,
+                      extra=extra, container=container, driver=self)
+
+    def _response_to_object(self, object_name, container, response):
+        """
+        Converts a HTTP response to an object (from headers)
+
+        @param object_name: Name of the object
+        @type object_name: C{str}
+
+        @param container: Instance of the container holding the blob
+        @type: L{Container}
+
+        @param response: HTTP Response
+        @type node: L{}
+
+        @return: An object instance
+        @rtype: L{Object}
+        """
+
+        headers = response.headers
+        size = int(headers['content-length'])
+        etag = headers['etag']
+
+        extra = {
+            'url': 'http://%s%s' % (response.connection.host,
+                                    response.connection.action),
+            'etag': etag,
+            'md5_hash': headers.get('content-md5', None),
+            'content_type': headers.get('content-type', None),
+            'content_language': headers.get('content-language', None),
+            'content_encoding': headers.get('content-encoding', None),
+            'last_modified': headers['last-modified'],
+            'lease': {
+                'status': headers.get('x-ms-lease-status', None),
+                'state': headers.get('x-ms-lease-state', None),
+                'duration': headers.get('x-ms-lease-duration', None),
+            },
+            'blob_type': headers['x-ms-blob-type']
+        }
+
+        if extra['md5_hash']:
+            extra['md5_hash'] = binascii.hexlify(
+                            base64.b64decode(b(extra['md5_hash'])))
+
+        meta_data = {}
+        for key, value in response.headers.items():
+            if key.startswith('x-ms-meta-'):
+                key = key.split('x-ms-meta-')[1]
+                meta_data[key] = value
+
+        return Object(name=object_name, size=size, hash=etag, extra=extra,
+                      meta_data=meta_data, container=container, driver=self)
+
+    def iterate_containers(self):
+        """
+        @inherits: L{StorageDriver.iterate_containers}
+        """
+        params = {'comp': 'list',
+                  'maxresults': RESPONSES_PER_REQUEST,
+                  'include': 'metadata'}
+
+        while True:
+            response = self.connection.request('/', params)
+            if response.status != httplib.OK:
+                raise LibcloudError('Unexpected status code: %s' %
+                                    (response.status), driver=self)
+
+            body = response.parse_body()
+            containers = body.find(fixxpath(xpath='Containers'))
+            containers = containers.findall(fixxpath(xpath='Container'))
+
+            for container in containers:
+                yield self._xml_to_container(container)
+
+            params['marker'] = body.findtext('NextMarker')
+            if not params['marker']:
+                break
+
+    def iterate_container_objects(self, container):
+        """
+        @inherits: L{StorageDriver.iterate_container_objects}
+        """
+        params = {'restype': 'container',
+                  'comp': 'list',
+                  'maxresults': RESPONSES_PER_REQUEST,
+                  'include': 'metadata'}
+
+        container_path = self._get_container_path(container)
+
+        while True:
+            response = self.connection.request(container_path,
+                                               params=params)
+
+            if response.status == httplib.NOT_FOUND:
+                raise ContainerDoesNotExistError(value=None,
+                                                 driver=self,
+                                                 container_name=container.name)
+
+            elif response.status != httplib.OK:
+                raise LibcloudError('Unexpected status code: %s' %
+                                    (response.status), driver=self)
+
+            body = response.parse_body()
+            blobs = body.find(fixxpath(xpath='Blobs'))
+            blobs = blobs.findall(fixxpath(xpath='Blob'))
+
+            for blob in blobs:
+                yield self._xml_to_object(container, blob)
+
+            params['marker'] = body.findtext('NextMarker')
+            if not params['marker']:
+                break
+
+    def get_container(self, container_name):
+        """
+        @inherits: L{StorageDriver.get_container}
+        """
+        params = {'restype': 'container'}
+
+        container_path = '/%s' % (container_name)
+
+        response = self.connection.request(container_path, params=params,
+                                           method='HEAD')
+
+        if response.status == httplib.NOT_FOUND:
+            raise ContainerDoesNotExistError('Container %s does not exist' %
+                                             (container_name), driver=self,
+                                             container_name=container_name)
+        elif response.status != httplib.OK:
+            raise LibcloudError('Unexpected status code: %s' %
+                                (response.status), driver=self)
+
+        return self._response_to_container(container_name, response)
+
+    def get_object(self, container_name, object_name):
+        """
+        @inherits: L{StorageDriver.get_object}
+        """
+
+        container = self.get_container(container_name=container_name)
+        object_path = self._get_object_path(container, object_name)
+
+        response = self.connection.request(object_path, method='HEAD')
+
+        if response.status == httplib.OK:
+            obj = self._response_to_object(object_name, container, response)
+            return obj
+
+        raise ObjectDoesNotExistError(value=None, driver=self,
+                                      object_name=object_name)
+
+    def _get_container_path(self, container):
+        """
+        Return a container path
+
+        @param container: Container instance
+        @type  container: L{Container}
+
+        @return: A path for this container.
+        @rtype: C{str}
+        """
+        return '/%s' % (container.name)
+
+    def _get_object_path(self, container, object_name):
+        """
+        Return an object's CDN path.
+
+        @param container: Container instance
+        @type  container: L{Container}
+
+        @param object_name: Object name
+        @type  object_name: L{str}
+
+        @return: A  path for this object.
+        @rtype: C{str}
+        """
+        container_url = self._get_container_path(container)
+        object_name_cleaned = urlquote(object_name)
+        object_path = '%s/%s' % (container_url, object_name_cleaned)
+        return object_path
+
+    def create_container(self, container_name):
+        """
+        @inherits: L{StorageDriver.create_container}
+        """
+        params = {'restype': 'container'}
+
+        container_path = '/%s' % (container_name)
+        response = self.connection.request(container_path, params=params,
+                                           method='PUT')
+
+        if response.status == httplib.CREATED:
+            return self._response_to_container(container_name, response)
+        elif response.status == httplib.CONFLICT:
+            raise ContainerAlreadyExistsError(
+                value='Container with this name already exists. The name must '
+                      'be unique among all the containers in the system',
+                container_name=container_name, driver=self)
+        elif response.status == httplib.BAD_REQUEST:
+            raise InvalidContainerNameError(value='Container name contains ' +
+                                            'invalid characters.',
+                                            container_name=container_name,
+                                            driver=self)
+
+        raise LibcloudError('Unexpected status code: %s' % (response.status),
+                            driver=self)
+
+    def delete_container(self, container):
+        """
+        @inherits: L{StorageDriver.delete_container}
+        """
+        # Azure does not check if the container is empty. So, we will do
+        # a check to ensure that the behaviour is similar to other drivers
+        for obj in container.iterate_objects():
+            raise ContainerIsNotEmptyError(
+                value='Container must be empty before it can be deleted.',
+                container_name=container.name, driver=self)
+
+        params = {'restype': 'container'}
+        container_path = self._get_container_path(container)
+
+        # Note: All the objects in the container must be deleted first
+        response = self.connection.request(container_path, params=params,
+                                           method='DELETE')
+
+        if response.status == httplib.ACCEPTED:
+            return True
+        elif response.status == httplib.NOT_FOUND:
+            raise ContainerDoesNotExistError(value=None,
+                                             driver=self,
+                                             container_name=container.name)
+
+        return False
+
+    def download_object(self, obj, destination_path, overwrite_existing=False,
+                        delete_on_failure=True):
+        """
+        @inherits: L{StorageDriver.download_object}
+        """
+        obj_path = self._get_object_path(obj.container, obj.name)
+        response = self.connection.request(obj_path, raw=True, data=None)
+
+        return self._get_object(obj=obj, callback=self._save_object,
+                                response=response,
+                                callback_kwargs={
+                                    'obj': obj,
+                                    'response': response.response,
+                                    'destination_path': destination_path,
+                                    'overwrite_existing': overwrite_existing,
+                                    'delete_on_failure': delete_on_failure},
+                                success_status_code=httplib.OK)
+
+    def download_object_as_stream(self, obj, chunk_size=None):
+        """
+        @inherits: L{StorageDriver.download_object_as_stream}
+        """
+        obj_path = self._get_object_path(obj.container, obj.name)
+        response = self.connection.request(obj_path, raw=True, data=None)
+
+        return self._get_object(obj=obj, callback=read_in_chunks,
+                                response=response,
+                                callback_kwargs={'iterator': response.response,
+                                                 'chunk_size': chunk_size},
+                                success_status_code=httplib.OK)
+
+    def _upload_in_chunks(self, response, data, iterator, object_path,
+                          blob_type, lease, calculate_hash=True):
+        """
+        Uploads data from an interator in fixed sized chunks to S3
+
+        @param response: Response object from the initial POST request
+        @type response: L{RawResponse}
+
+        @param data: Any data from the initial POST request
+        @type data: C{str}
+
+        @param iterator: The generator for fetching the upload data
+        @type iterator: C{generator}
+
+        @param object_path: The path of the object to which we are uploading
+        @type object_name: C{str}
+
+        @param blob_type: The blob type being uploaded
+        @type blob_type: C{str}
+
+        @param lease: The lease object to be used for renewal
+        @type lease: L{AzureBlobLease}
+
+        @keyword calculate_hash: Indicates if we must calculate the data hash
+        @type calculate_hash: C{bool}
+
+        @return: A tuple of (status, checksum, bytes transferred)
+        @rtype: C{tuple}
+        """
+
+        # Get the upload id from the response xml
+        if response.status != httplib.CREATED:
+            raise LibcloudError('Error initializing upload. Code: %d' %
+                                (response.status), driver=self)
+
+        data_hash = None
+        if calculate_hash:
+            data_hash = self._get_hash_function()
+
+        bytes_transferred = 0
+        count = 1
+        chunks = []
+        headers = {}
+
+        lease.update_headers(headers)
+
+        if blob_type == 'BlockBlob':
+            params = {'comp': 'block'}
+        else:
+            params = {'comp': 'page'}
+
+        # Read the input data in chunk sizes suitable for AWS
+        for data in read_in_chunks(iterator, AZURE_CHUNK_SIZE):
+            data = b(data)
+            content_length = len(data)
+            offset = bytes_transferred
+            bytes_transferred += content_length
+
+            if calculate_hash:
+                data_hash.update(data)
+
+            chunk_hash = self._get_hash_function()
+            chunk_hash.update(data)
+            chunk_hash = base64.b64encode(b(chunk_hash.digest()))
+
+            headers['Content-MD5'] = chunk_hash.decode('utf-8')
+            headers['Content-Length'] = content_length
+
+            if blob_type == 'BlockBlob':
+                # Block id can be any unique string that is base64 encoded
+                # A 10 digit number can hold the max value of 50000 blocks
+                # that are allowed for azure
+                block_id = base64.b64encode(b('%10d' % (count)))
+                block_id = block_id.decode('utf-8')
+                params['blockid'] = block_id
+
+                # Keep this data for a later commit
+                chunks.append(block_id)
+            else:
+                headers['x-ms-page-write'] = 'update'
+                headers['x-ms-range'] = 'bytes=%d-%d' % \
+                                            (offset, bytes_transferred-1)
+
+            # Renew lease before updating
+            lease.renew()
+
+            resp = self.connection.request(object_path, method='PUT',
+                                           data=data, headers=headers,
+                                           params=params)
+
+            if resp.status != httplib.CREATED:
+                resp.parse_error()
+                raise LibcloudError('Error uploading chunk %d. Code: %d' %
+                                    (count, resp.status), driver=self)
+
+            count += 1
+
+        if calculate_hash:
+            data_hash = data_hash.hexdigest()
+
+        if blob_type == 'BlockBlob':
+            self._commit_blocks(object_path, chunks, lease)
+
+        # The Azure service does not return a hash immediately for
+        # chunked uploads. It takes some time for the data to get synced
+        response.headers['content-md5'] = None
+
+        return (True, data_hash, bytes_transferred)
+
+    def _commit_blocks(self, object_path, chunks, lease):
+        """
+        Makes a final commit of the data.
+
+        @param object_path: Server side object path.
+        @type object_path: C{str}
+
+        @param upload_id: A list of (chunk_number, chunk_hash) tuples.
+        @type upload_id: C{list}
+        """
+
+        root = Element('BlockList')
+
+        for block_id in chunks:
+            part = SubElement(root, 'Uncommitted')
+            part.text = str(block_id)
+
+        data = tostring(root)
+        params = {'comp': 'blocklist'}
+        headers = {}
+
+        lease.update_headers(headers)
+        lease.renew()
+
+        response = self.connection.request(object_path, data=data,
+                                           params=params, headers=headers,
+                                           method='PUT')
+
+        if response.status != httplib.CREATED:
+            raise LibcloudError('Error in blocklist commit', driver=self)
+
+    def _check_values(self, blob_type, object_size):
+        """
+        Checks if extension arguments are valid
+
+        @param blob_type: The blob type that is being uploaded
+        @type blob_type: C{str}
+
+        @param object_size: The (max) size of the object being uploaded
+        @type object_size: C{int}
+        """
+
+        if blob_type not in ['BlockBlob', 'PageBlob']:
+            raise LibcloudError('Invalid blob type', driver=self)
+
+        if blob_type == 'PageBlob':
+            if not object_size:
+                raise LibcloudError('Max blob size is mandatory for page blob',
+                                    driver=self)
+
+            if object_size % AZURE_PAGE_CHUNK_SIZE:
+                raise LibcloudError('Max blob size is not aligned to '
+                                    'page boundary', driver=self)
+
+    def upload_object(self, file_path, container, object_name, extra=None,
+                      verify_hash=True, ex_blob_type=None, ex_use_lease=False):
+        """
+        Upload an object currently located on a disk.
+
+        @inherits: L{StorageDriver.upload_object}
+
+        @param ex_blob_type: Storage class
+        @type ex_blob_type: C{str}
+
+        @param ex_use_lease: Indicates if we must take a lease before upload
+        @type ex_use_lease: C{bool}
+        """
+
+        if ex_blob_type is None:
+            ex_blob_type = self.ex_blob_type
+
+        # Get the size of the file
+        file_size = os.stat(file_path).st_size
+
+        # The presumed size of the object
+        object_size = file_size
+
+        self._check_values(ex_blob_type, file_size)
+
+        with file(file_path, 'rb') as file_handle:
+            iterator = iter(file_handle)
+
+            # If size is greater than 64MB or type is Page, upload in chunks
+            if ex_blob_type == 'PageBlob' or file_size > AZURE_BLOCK_MAX_SIZE:
+                # For chunked upload of block blobs, the initial size must
+                # be 0.
+                if ex_blob_type == 'BlockBlob':
+                    object_size = None
+
+                object_path = self._get_object_path(container, object_name)
+
+                upload_func = self._upload_in_chunks
+                upload_func_kwargs = {'iterator': iterator,
+                                      'object_path': object_path,
+                                      'blob_type': ex_blob_type,
+                                      'lease': None}
+            else:
+                upload_func = self._stream_data
+                upload_func_kwargs = {'iterator': iterator,
+                                      'chunked': False,
+                                      'calculate_hash': verify_hash}
+
+            return self._put_object(container=container,
+                                    object_name=object_name,
+                                    object_size=object_size,
+                                    upload_func=upload_func,
+                                    upload_func_kwargs=upload_func_kwargs,
+                                    file_path=file_path, extra=extra,
+                                    verify_hash=verify_hash,
+                                    blob_type=ex_blob_type,
+                                    use_lease=ex_use_lease)
+
+    def upload_object_via_stream(self, iterator, container, object_name,
+                                 verify_hash=False, extra=None,
+                                 ex_use_lease=False, ex_blob_type=None,
+                                 ex_page_blob_size=None):
+        """
+        @inherits: L{StorageDriver.upload_object_via_stream}
+
+        @param ex_blob_type: Storage class
+        @type ex_blob_type: C{str}
+
+        @param ex_page_blob_size: The maximum size to which the
+            page blob can grow to
+        @type ex_page_blob_size: C{int}
+
+        @param ex_use_lease: Indicates if we must take a lease before upload
+        @type ex_use_lease: C{bool}
+        """
+
+        if ex_blob_type is None:
+            ex_blob_type = self.ex_blob_type
+
+        self._check_values(ex_blob_type, ex_page_blob_size)
+
+        object_path = self._get_object_path(container, object_name)
+
+        upload_func = self._upload_in_chunks
+        upload_func_kwargs = {'iterator': iterator,
+                              'object_path': object_path,
+                              'blob_type': ex_blob_type,
+                              'lease': None}
+
+        return self._put_object(container=container,
+                                object_name=object_name,
+                                object_size=ex_page_blob_size,
+                                upload_func=upload_func,
+                                upload_func_kwargs=upload_func_kwargs,
+                                extra=extra, verify_hash=verify_hash,
+                                blob_type=ex_blob_type,
+                                use_lease=ex_use_lease)
+
+    def delete_object(self, obj):
+        """
+        @inherits: L{StorageDriver.delete_object}
+        """
+        object_path = self._get_object_path(obj.container, obj.name)
+        response = self.connection.request(object_path, method='DELETE')
+
+        if response.status == httplib.ACCEPTED:
+            return True
+        elif response.status == httplib.NOT_FOUND:
+            raise ObjectDoesNotExistError(value=None, driver=self,
+                                          object_name=obj.name)
+
+        return False
+
+    def _update_metadata(self, headers, meta_data):
+        """
+        Update the given metadata in the headers
+
+        @param headers: The headers dictionary to be updated
+        @type headers: C{dict}
+
+        @param meta_data: Metadata key value pairs
+        @type meta_data: C{dict}
+        """
+        for key, value in list(meta_data.items()):
+            key = 'x-ms-meta-%s' % (key)
+            headers[key] = value
+
+    def _prepare_upload_headers(self, object_name, object_size,
+                                extra, meta_data, blob_type):
+        """
+        Prepare headers for uploading an object
+
+        @param object_name: The full name of the object being updated
+        @type object_name: C{str}
+
+        @param object_size: The size of the object. In case of PageBlobs,
+            this indicates the maximum size the blob can grow to
+        @type object_size: C{int}
+
+        @param extra: Extra control data for the upload
+        @type extra: C{dict}
+
+        @param meta_data: Metadata key value pairs
+        @type meta_data: C{dict}
+
+        @param blob_type: Page or Block blob type
+        @type blob_type: C{str}
+        """
+        headers = {}
+
+        if blob_type is None:
+            blob_type = self.ex_blob_type
+
+        headers['x-ms-blob-type'] = blob_type
+
+        self._update_metadata(headers, meta_data)
+
+        if object_size is not None:
+            headers['Content-Length'] = object_size
+
+        if blob_type == 'PageBlob':
+            headers['Content-Length'] = 0
+            headers['x-ms-blob-content-length'] = object_size
+
+        return headers
+
+    def _put_object(self, container, object_name, object_size, upload_func,
+                    upload_func_kwargs, file_path=None, extra=None,
+                    verify_hash=True, blob_type=None, use_lease=False):
+        """
+        Control function that does the real job of uploading data to a blob
+        """
+        extra = extra or {}
+        meta_data = extra.get('meta_data', {})
+        content_type = extra.get('content_type', None)
+
+        headers = self._prepare_upload_headers(object_name, object_size,
+                                               extra, meta_data, blob_type)
+
+        object_path = self._get_object_path(container, object_name)
+
+        # Get a lease if required and do the operations
+        with AzureBlobLease(self, object_path, use_lease) as lease:
+            if 'lease' in upload_func_kwargs:
+                upload_func_kwargs['lease'] = lease
+
+            lease.update_headers(headers)
+
+            iterator = iter('')
+            result_dict = self._upload_object(object_name, content_type,
+                                              upload_func, upload_func_kwargs,
+                                              object_path, headers=headers,
+                                              file_path=file_path,
+                                              iterator=iterator)
+
+            response = result_dict['response']
+            bytes_transferred = result_dict['bytes_transferred']
+            data_hash = result_dict['data_hash']
+            headers = response.headers
+            response = response.response
+
+        if response.status != httplib.CREATED:
+            raise LibcloudError(
+                'Unexpected status code, status_code=%s' % (response.status),
+                driver=self)
+
+        server_hash = headers['content-md5']
+
+        if server_hash:
+            server_hash = binascii.hexlify(base64.b64decode(b(server_hash)))
+            server_hash = server_hash.decode('utf-8')
+        else:
+            # TODO: HACK - We could poll the object for a while and get
+            # the hash
+            pass
+
+        if (verify_hash and server_hash and data_hash != server_hash):
+            raise ObjectHashMismatchError(
+                value='MD5 hash checksum does not match',
+                object_name=object_name, driver=self)
+
+        return Object(name=object_name, size=bytes_transferred,
+                      hash=headers['etag'], extra=None,
+                      meta_data=meta_data, container=container,
+                      driver=self)
+
+    def ex_set_object_metadata(self, obj, meta_data):
+        """
+        Set metadata for an object
+
+        @param obj: The blob object
+        @type obj: L{Object}
+
+        @param meta_data: Metadata key value pairs
+        @type meta_data: C{dict}
+        """
+        object_path = self._get_object_path(obj.container, obj.name)
+        params = {'comp': 'metadata'}
+        headers = {}
+
+        self._update_metadata(headers, meta_data)
+
+        response = self.connection.request(object_path, method='PUT',
+                                           params=params,
+                                           headers=headers)
+
+        if response.status != httplib.OK:
+            response.parse_error('Setting metadata')
diff --git libcloud/storage/providers.py libcloud/storage/providers.py
index 94ee89a..7c77eb0 100644
--- libcloud/storage/providers.py
+++ libcloud/storage/providers.py
@@ -45,6 +45,8 @@ DRIVERS = {
         ('libcloud.storage.drivers.nimbus', 'NimbusStorageDriver'),
     Provider.LOCAL:
         ('libcloud.storage.drivers.local', 'LocalStorageDriver'),
+    Provider.AZURE_BLOBS:
+        ('libcloud.storage.drivers.azure_blobs', 'AzureBlobsStorageDriver'),
 
     # Deprecated
     Provider.CLOUDFILES_US:
diff --git libcloud/storage/types.py libcloud/storage/types.py
index d3c3b55..8301b92 100644
--- libcloud/storage/types.py
+++ libcloud/storage/types.py
@@ -57,6 +57,7 @@ class Provider(object):
     NIMBUS = 'nimbus'
     LOCAL = 'local'
     CLOUDFILES = 'cloudfiles'
+    AZURE_BLOBS = 'azure_blobs'
 
     # Deperecated
     CLOUDFILES_US = 'cloudfiles_us'
diff --git libcloud/test/secrets.py-dist libcloud/test/secrets.py-dist
index 3ba7dc5..df22102 100644
--- libcloud/test/secrets.py-dist
+++ libcloud/test/secrets.py-dist
@@ -44,6 +44,9 @@ HOSTVIRTUAL_PARAMS = ('key',)
 STORAGE_S3_PARAMS = ('key', 'secret')
 STORAGE_GOOGLE_STORAGE_PARAMS = ('key', 'secret')
 
+# Azure key is b64 encoded and must be decoded before signing requests
+STORAGE_AZURE_BLOBS_PARAMS = ('account', 'cGFzc3dvcmQ=')
+
 # Loadbalancer
 LB_BRIGHTBOX_PARAMS = ('user', 'key')
 LB_ELB_PARAMS = ('access_id', 'secret', 'region')
diff --git libcloud/test/storage/fixtures/azure_blobs/list_containers_1.xml libcloud/test/storage/fixtures/azure_blobs/list_containers_1.xml
new file mode 100644
index 0000000..2d3678e
--- /dev/null
+++ libcloud/test/storage/fixtures/azure_blobs/list_containers_1.xml
@@ -0,0 +1,29 @@
+﻿<?xml version="1.0" encoding="utf-8"?>
+<EnumerationResults AccountName="https://account.blob.core.windows.net/">
+    <MaxResults>2</MaxResults>
+    <Containers>
+        <Container>
+            <Name>container1</Name>
+            <Url>https://account.blob.core.windows.net/container1</Url>
+            <Properties>
+                <Last-Modified>Mon, 07 Jan 2013 06:31:06 GMT</Last-Modified>
+                <Etag>"0x8CFBAB7B4F23346"</Etag>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata />
+        </Container>
+        <Container>
+            <Name>container2</Name>
+            <Url>https://account.blob.core.windows.net/container2</Url>
+            <Properties>
+                <Last-Modified>Mon, 07 Jan 2013 06:31:07 GMT</Last-Modified>
+                <Etag>"0x8CFBAB7B5B82D8E"</Etag>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata />
+        </Container>
+    </Containers>
+    <NextMarker>/account/container3</NextMarker>
+</EnumerationResults>
diff --git libcloud/test/storage/fixtures/azure_blobs/list_containers_2.xml libcloud/test/storage/fixtures/azure_blobs/list_containers_2.xml
new file mode 100644
index 0000000..3c0e6c3
--- /dev/null
+++ libcloud/test/storage/fixtures/azure_blobs/list_containers_2.xml
@@ -0,0 +1,30 @@
+﻿<?xml version="1.0" encoding="utf-8"?>
+<EnumerationResults AccountName="https://account.blob.core.windows.net/">
+    <Marker>/account/container3</Marker>
+    <MaxResults>2</MaxResults>
+    <Containers>
+        <Container>
+            <Name>container3</Name>
+            <Url>https://account.blob.core.windows.net/container3</Url>
+            <Properties>
+                <Last-Modified>Mon, 07 Jan 2013 06:31:08 GMT</Last-Modified>
+                <Etag>"0x8CFBAB7B6452A71"</Etag>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata />
+        </Container>
+        <Container>
+            <Name>container4</Name>
+            <Url>https://account.blob.core.windows.net/container4</Url>
+            <Properties>
+                <Last-Modified>Fri, 04 Jan 2013 08:32:41 GMT</Last-Modified>
+                <Etag>"0x8CFB86D32305484"</Etag>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata />
+        </Container>
+    </Containers>
+    <NextMarker />
+</EnumerationResults>
diff --git libcloud/test/storage/fixtures/azure_blobs/list_containers_empty.xml libcloud/test/storage/fixtures/azure_blobs/list_containers_empty.xml
new file mode 100644
index 0000000..d79dd65
--- /dev/null
+++ libcloud/test/storage/fixtures/azure_blobs/list_containers_empty.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<EnumerationResults AccountName="http://account.blob.core.windows.net">
+  <Prefix></Prefix>
+  <Marker></Marker>
+  <MaxResults>100</MaxResults>
+  <Containers />
+  <NextMarker />
+</EnumerationResults>
diff --git libcloud/test/storage/fixtures/azure_blobs/list_objects_1.xml libcloud/test/storage/fixtures/azure_blobs/list_objects_1.xml
new file mode 100644
index 0000000..713f8d6
--- /dev/null
+++ libcloud/test/storage/fixtures/azure_blobs/list_objects_1.xml
@@ -0,0 +1,49 @@
+﻿<?xml version="1.0" encoding="utf-8"?>
+<EnumerationResults ContainerName="https://account.blob.core.windows.net/test_container">
+    <MaxResults>2</MaxResults>
+    <Blobs>
+        <Blob>
+            <Name>object1.txt</Name>
+            <Url>https://account.blob.core.windows.net/test_container/object1.txt</Url>
+            <Properties>
+                <Last-Modified>Fri, 04 Jan 2013 09:48:06 GMT</Last-Modified>
+                <Etag>0x8CFB877BB56A6FB</Etag>
+                <Content-Length>0</Content-Length>
+                <Content-Type>application/octet-stream</Content-Type>
+                <Content-Encoding />
+                <Content-Language />
+                <Content-MD5>1B2M2Y8AsgTpgAmY7PhCfg==</Content-MD5>
+                <Cache-Control />
+                <BlobType>BlockBlob</BlobType>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata>
+                <meta1>value1</meta1>
+                <meta2>value2</meta2>
+            </Metadata>
+        </Blob>
+        <Blob>
+            <Name>object2.txt</Name>
+            <Url>https://account.blob.core.windows.net/test_container/object2.txt</Url>
+            <Properties>
+                <Last-Modified>Sat, 05 Jan 2013 03:51:42 GMT</Last-Modified>
+                <Etag>0x8CFB90F1BA8CD8F</Etag>
+                <Content-Length>1048576</Content-Length>
+                <Content-Type>application/octet-stream</Content-Type>
+                <Content-Encoding />
+                <Content-Language />
+                <Content-MD5>ttgbNgpWctgMJ0MPORU+LA==</Content-MD5>
+                <Cache-Control />
+                <BlobType>BlockBlob</BlobType>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata>
+                <meta1>value1</meta1>
+                <meta2>value2</meta2>
+            </Metadata>
+        </Blob>
+    </Blobs>
+    <NextMarker>2!76!MDAwMDExIXNvbWUxMTcudHh0ITAwMDAyOCE5OTk5LTEyLTMxVDIzOjU5OjU5Ljk5OTk5OTlaIQ--</NextMarker>
+</EnumerationResults>
diff --git libcloud/test/storage/fixtures/azure_blobs/list_objects_2.xml libcloud/test/storage/fixtures/azure_blobs/list_objects_2.xml
new file mode 100644
index 0000000..973985b
--- /dev/null
+++ libcloud/test/storage/fixtures/azure_blobs/list_objects_2.xml
@@ -0,0 +1,43 @@
+﻿<?xml version="1.0" encoding="utf-8"?>
+<EnumerationResults ContainerName="https://account.blob.core.windows.net/test_container">
+    <Marker>object3.txt</Marker>
+    <MaxResults>2</MaxResults>
+    <Blobs>
+        <Blob>
+            <Name>object3.txt</Name>
+            <Url>https://account.blob.core.windows.net/test_container/object3.txt</Url>
+            <Properties>
+                <Last-Modified>Sat, 05 Jan 2013 03:52:08 GMT</Last-Modified>
+                <Etag>0x8CFB90F2B6FC022</Etag>
+                <Content-Length>1048576</Content-Length>
+                <Content-Type>application/octet-stream</Content-Type>
+                <Content-Encoding />
+                <Content-Language />
+                <Content-MD5>ttgbNgpWctgMJ0MPORU+LA==</Content-MD5>
+                <Cache-Control />
+                <BlobType>BlockBlob</BlobType>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata />
+        </Blob>
+        <Blob>
+            <Name>object4.txt</Name>
+            <Url>https://account.blob.core.windows.net/test_container/object4.txt</Url>
+            <Properties>
+                <Last-Modified>Fri, 04 Jan 2013 10:20:14 GMT</Last-Modified>
+                <Etag>0x8CFB87C38717450</Etag>
+                <Content-Length>0</Content-Length>
+                <Content-Type>application/octet-stream</Content-Type>
+                <Content-Encoding /><Content-Language />
+                <Content-MD5>1B2M2Y8AsgTpgAmY7PhCfg==</Content-MD5>
+                <Cache-Control />
+                <BlobType>BlockBlob</BlobType>
+                <LeaseStatus>unlocked</LeaseStatus>
+                <LeaseState>available</LeaseState>
+            </Properties>
+            <Metadata />
+        </Blob>
+    </Blobs>
+    <NextMarker />
+</EnumerationResults>
diff --git libcloud/test/storage/fixtures/azure_blobs/list_objects_empty.xml libcloud/test/storage/fixtures/azure_blobs/list_objects_empty.xml
new file mode 100644
index 0000000..2ccbf4f
--- /dev/null
+++ libcloud/test/storage/fixtures/azure_blobs/list_objects_empty.xml
@@ -0,0 +1,6 @@
+﻿<?xml version="1.0" encoding="utf-8"?>
+<EnumerationResults ContainerName="https://account.blob.core.windows.net/test_container">
+    <MaxResults>2</MaxResults>
+    <Blobs />
+    <NextMarker />
+</EnumerationResults>
diff --git libcloud/test/storage/test_azure_blobs.py libcloud/test/storage/test_azure_blobs.py
new file mode 100644
index 0000000..2836909
--- /dev/null
+++ libcloud/test/storage/test_azure_blobs.py
@@ -0,0 +1,942 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import unittest
+import tempfile
+
+from xml.etree import ElementTree as ET
+from libcloud.utils.py3 import httplib
+from libcloud.utils.py3 import urlparse
+
+from libcloud.common.types import InvalidCredsError
+from libcloud.common.types import LibcloudError
+from libcloud.storage.base import Container, Object
+from libcloud.storage.types import ContainerDoesNotExistError
+from libcloud.storage.types import ContainerIsNotEmptyError
+from libcloud.storage.types import ContainerAlreadyExistsError
+from libcloud.storage.types import InvalidContainerNameError
+from libcloud.storage.types import ObjectDoesNotExistError
+from libcloud.storage.types import ObjectHashMismatchError
+from libcloud.storage.drivers.azure_blobs import AzureBlobsStorageDriver
+from libcloud.storage.drivers.azure_blobs import AZURE_BLOCK_MAX_SIZE
+from libcloud.storage.drivers.azure_blobs import AZURE_PAGE_CHUNK_SIZE
+from libcloud.storage.drivers.dummy import DummyIterator
+
+from libcloud.test import StorageMockHttp, MockRawResponse # pylint: disable-msg=E0611
+from libcloud.test import MockHttpTestCase # pylint: disable-msg=E0611
+from libcloud.test.file_fixtures import StorageFileFixtures # pylint: disable-msg=E0611
+from libcloud.test.secrets import STORAGE_AZURE_BLOBS_PARAMS
+
+
+try:
+    parse_qs = urlparse.parse_qs
+except AttributeError:
+    import cgi
+    parse_qs = cgi.parse_qs
+
+
+class AzureBlobsMockHttp(StorageMockHttp, MockHttpTestCase):
+
+    fixtures = StorageFileFixtures('azure_blobs')
+    base_headers = {}
+
+    def _UNAUTHORIZED(self, method, url, body, headers):
+        return (httplib.UNAUTHORIZED,
+                '',
+                self.base_headers,
+                httplib.responses[httplib.UNAUTHORIZED])
+
+    def _list_containers_EMPTY(self, method, url, body, headers):
+        body = self.fixtures.load('list_containers_empty.xml')
+        return (httplib.OK,
+                body,
+                self.base_headers,
+                httplib.responses[httplib.OK])
+
+    def _list_containers(self, method, url, body, headers):
+        query_string = urlparse.urlsplit(url).query
+        query = parse_qs(query_string)
+
+        if 'marker' not in query:
+            body = self.fixtures.load('list_containers_1.xml')
+        else:
+            body = self.fixtures.load('list_containers_2.xml')
+
+        return (httplib.OK,
+                body,
+                self.base_headers,
+                httplib.responses[httplib.OK])
+
+    def _test_container_EMPTY(self, method, url, body, headers):
+        if method == 'DELETE':
+            body = ''
+            return (httplib.ACCEPTED,
+                    body,
+                    self.base_headers,
+                    httplib.responses[httplib.ACCEPTED])
+
+        else:
+            body = self.fixtures.load('list_objects_empty.xml')
+            return (httplib.OK,
+                    body,
+                    self.base_headers,
+                    httplib.responses[httplib.OK])
+
+    def _new__container_INVALID_NAME(self, method, url, body, headers):
+        return (httplib.BAD_REQUEST,
+                body,
+                self.base_headers,
+                httplib.responses[httplib.BAD_REQUEST])
+
+    def _test_container(self, method, url, body, headers):
+        query_string = urlparse.urlsplit(url).query
+        query = parse_qs(query_string)
+
+        if 'marker' not in query:
+            body = self.fixtures.load('list_objects_1.xml')
+        else:
+            body = self.fixtures.load('list_objects_2.xml')
+
+        return (httplib.OK,
+                body,
+                self.base_headers,
+                httplib.responses[httplib.OK])
+
+    def _test_container100(self, method, url, body, headers):
+        body = ''
+
+        if method != 'HEAD':
+            return (httplib.BAD_REQUEST,
+                    body,
+                    self.base_headers,
+                    httplib.responses[httplib.BAD_REQUEST])
+
+        return (httplib.NOT_FOUND,
+                body,
+                self.base_headers,
+                httplib.responses[httplib.NOT_FOUND])
+
+    def _test_container200(self, method, url, body, headers):
+        body = ''
+
+        if method != 'HEAD':
+            return (httplib.BAD_REQUEST,
+                    body,
+                    self.base_headers,
+                    httplib.responses[httplib.BAD_REQUEST])
+
+        headers = {}
+
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        headers['last-modified'] = 'Fri, 04 Jan 2013 09:48:06 GMT'
+        headers['x-ms-lease-status'] = 'unlocked'
+        headers['x-ms-lease-state'] = 'available'
+        headers['x-ms-meta-meta1'] = 'value1'
+
+        return (httplib.OK,
+                body,
+                headers,
+                httplib.responses[httplib.OK])
+
+    def _test_container200_test(self, method, url, body, headers):
+        body = ''
+
+        if method != 'HEAD':
+            return (httplib.BAD_REQUEST,
+                    body,
+                    self.base_headers,
+                    httplib.responses[httplib.BAD_REQUEST])
+
+        headers = {}
+
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        headers['last-modified'] = 'Fri, 04 Jan 2013 09:48:06 GMT'
+        headers['content-length'] = 12345
+        headers['content-type'] = 'application/zip'
+        headers['x-ms-blob-type'] = 'Block'
+        headers['x-ms-lease-status'] = 'unlocked'
+        headers['x-ms-lease-state'] = 'available'
+        headers['x-ms-meta-rabbits'] = 'monkeys'
+
+        return (httplib.OK,
+                body,
+                headers,
+                httplib.responses[httplib.OK])
+
+    def _test2_test_list_containers(self, method, url, body, headers):
+        # test_get_object
+        body = self.fixtures.load('list_containers.xml')
+        headers = {'content-type': 'application/zip',
+                    'etag': '"e31208wqsdoj329jd"',
+                    'x-amz-meta-rabbits': 'monkeys',
+                    'content-length': 12345,
+                    'last-modified': 'Thu, 13 Sep 2012 07:13:22 GMT'
+                    }
+
+        return (httplib.OK,
+                body,
+                headers,
+                httplib.responses[httplib.OK])
+
+    def _new_container_ALREADY_EXISTS(self, method, url, body, headers):
+        # test_create_container
+        return (httplib.CONFLICT,
+                body,
+                headers,
+                httplib.responses[httplib.CONFLICT])
+
+    def _new_container(self, method, url, body, headers):
+        # test_create_container, test_delete_container
+
+        headers = {}
+
+        if method == 'PUT':
+            status = httplib.CREATED
+
+            headers['etag'] = '0x8CFB877BB56A6FB'
+            headers['last-modified'] = 'Fri, 04 Jan 2013 09:48:06 GMT'
+            headers['x-ms-lease-status'] = 'unlocked'
+            headers['x-ms-lease-state'] = 'available'
+            headers['x-ms-meta-meta1'] = 'value1'
+
+        elif method == 'DELETE':
+            status = httplib.NO_CONTENT
+
+        return (status,
+                body,
+                headers,
+                httplib.responses[status])
+
+    def _new_container_DOESNT_EXIST(self, method, url, body, headers):
+        # test_delete_container
+        return (httplib.NOT_FOUND,
+                body,
+                headers,
+                httplib.responses[httplib.NOT_FOUND])
+
+    def _foo_bar_container_NOT_FOUND(self, method, url, body, headers):
+        # test_delete_container_not_found
+        return (httplib.NOT_FOUND,
+                body,
+                headers,
+                httplib.responses[httplib.NOT_FOUND])
+
+    def _foo_bar_container_foo_bar_object_NOT_FOUND(self, method, url, body,
+                                                    headers):
+        # test_delete_object_not_found
+        return (httplib.NOT_FOUND,
+                body,
+                headers,
+                httplib.responses[httplib.NOT_FOUND])
+
+    def _foo_bar_container_foo_bar_object(self, method, url, body, headers):
+        # test_delete_object
+        return (httplib.ACCEPTED,
+                body,
+                headers,
+                httplib.responses[httplib.ACCEPTED])
+
+    def _foo_bar_container_foo_test_upload(self, method, url, body, headers):
+        # test_upload_object_success
+        body = ''
+        headers = {}
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        headers['content-md5'] = 'd4fe4c9829f7ca1cc89db7ad670d2bbd'
+        return (httplib.CREATED,
+                body,
+                headers,
+                httplib.responses[httplib.CREATED])
+
+    def _foo_bar_container_foo_test_upload_block(self, method, url,
+                                                 body, headers):
+        # test_upload_object_success
+        body = ''
+        headers = {}
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        return (httplib.CREATED,
+                body,
+                headers,
+                httplib.responses[httplib.CREATED])
+
+    def _foo_bar_container_foo_test_upload_page(self, method, url,
+                                                body, headers):
+        # test_upload_object_success
+        body = ''
+        headers = {}
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        return (httplib.CREATED,
+                body,
+                headers,
+                httplib.responses[httplib.CREATED])
+
+    def _foo_bar_container_foo_test_upload_blocklist(self, method, url,
+                                                     body, headers):
+        # test_upload_object_success
+        body = ''
+        headers = {}
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        headers['content-md5'] = 'd4fe4c9829f7ca1cc89db7ad670d2bbd'
+
+        return (httplib.CREATED,
+                body,
+                headers,
+                httplib.responses[httplib.CREATED])
+
+    def _foo_bar_container_foo_test_upload_lease(self, method, url,
+                                                 body, headers):
+        # test_upload_object_success
+        action = headers['x-ms-lease-action']
+        rheaders = {'x-ms-lease-id': 'someleaseid'}
+        body = ''
+
+        if action == 'acquire':
+            return (httplib.CREATED,
+                    body,
+                    rheaders,
+                    httplib.responses[httplib.CREATED])
+
+        else:
+            if headers.get('x-ms-lease-id', None) != 'someleaseid':
+                return (httplib.BAD_REQUEST,
+                        body,
+                        rheaders,
+                        httplib.responses[httplib.BAD_REQUEST])
+
+            return (httplib.OK,
+                    body,
+                    headers,
+                    httplib.responses[httplib.CREATED])
+
+
+class AzureBlobsMockRawResponse(MockRawResponse):
+
+    fixtures = StorageFileFixtures('azure_blobs')
+
+    def _foo_bar_container_foo_test_upload_INVALID_HASH(self, method, url,
+                                                        body, headers):
+        body = ''
+        headers = {}
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        headers['content-md5'] = 'd4fe4c9829f7ca1cc89db7ad670d2bbd'
+
+        # test_upload_object_invalid_hash1
+        return (httplib.CREATED,
+                body,
+                headers,
+                httplib.responses[httplib.CREATED])
+
+    def _foo_bar_container_foo_test_upload(self, method, url, body, headers):
+        # test_upload_object_success
+        body = ''
+        headers = {}
+        headers['etag'] = '0x8CFB877BB56A6FB'
+        headers['content-md5'] = 'd4fe4c9829f7ca1cc89db7ad670d2bbd'
+        return (httplib.CREATED,
+                body,
+                headers,
+                httplib.responses[httplib.CREATED])
+
+    def _foo_bar_container_foo_bar_object(self, method, url, body, headers):
+        # test_upload_object_invalid_file_size
+        body = self._generate_random_data(1000)
+        return (httplib.OK,
+                body,
+                headers,
+                httplib.responses[httplib.OK])
+
+    def _foo_bar_container_foo_bar_object_INVALID_SIZE(self, method, url,
+                                                       body, headers):
+        # test_upload_object_invalid_file_size
+        body = ''
+        return (httplib.OK,
+                body,
+                headers,
+                httplib.responses[httplib.OK])
+
+
+class AzureBlobsTests(unittest.TestCase):
+    driver_type = AzureBlobsStorageDriver
+    driver_args = STORAGE_AZURE_BLOBS_PARAMS
+    mock_response_klass = AzureBlobsMockHttp
+    mock_raw_response_klass = AzureBlobsMockRawResponse
+
+    @classmethod
+    def create_driver(self):
+        return self.driver_type(*self.driver_args)
+
+    def setUp(self):
+        self.driver_type.connectionCls.conn_classes = (None,
+                                                     self.mock_response_klass)
+        self.driver_type.connectionCls.rawResponseCls = \
+                self.mock_raw_response_klass
+        self.mock_response_klass.type = None
+        self.mock_raw_response_klass.type = None
+        self.driver = self.create_driver()
+
+    def tearDown(self):
+        self._remove_test_file()
+
+    def _remove_test_file(self):
+        file_path = os.path.abspath(__file__) + '.temp'
+
+        try:
+            os.unlink(file_path)
+        except OSError:
+            pass
+
+    def test_invalid_credentials(self):
+        self.mock_response_klass.type = 'UNAUTHORIZED'
+        try:
+            self.driver.list_containers()
+        except InvalidCredsError:
+            e = sys.exc_info()[1]
+            self.assertEqual(True, isinstance(e, InvalidCredsError))
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_list_containers_empty(self):
+        self.mock_response_klass.type = 'list_containers_EMPTY'
+        containers = self.driver.list_containers()
+        self.assertEqual(len(containers), 0)
+
+    def test_list_containers_success(self):
+        self.mock_response_klass.type = 'list_containers'
+        AzureBlobsStorageDriver.RESPONSES_PER_REQUEST = 2
+        containers = self.driver.list_containers()
+        self.assertEqual(len(containers), 4)
+
+        self.assertTrue('last_modified' in containers[1].extra)
+        self.assertTrue('url' in containers[1].extra)
+        self.assertTrue('etag' in containers[1].extra)
+        self.assertTrue('lease' in containers[1].extra)
+        self.assertTrue('meta_data' in containers[1].extra)
+
+    def test_list_container_objects_empty(self):
+        self.mock_response_klass.type = 'EMPTY'
+        container = Container(name='test_container', extra={},
+                              driver=self.driver)
+        objects = self.driver.list_container_objects(container=container)
+        self.assertEqual(len(objects), 0)
+
+    def test_list_container_objects_success(self):
+        self.mock_response_klass.type = None
+        AzureBlobsStorageDriver.RESPONSES_PER_REQUEST = 2
+
+        container = Container(name='test_container', extra={},
+                              driver=self.driver)
+
+        objects = self.driver.list_container_objects(container=container)
+        self.assertEqual(len(objects), 4)
+
+        obj = objects[1]
+        self.assertEqual(obj.name, 'object2.txt')
+        self.assertEqual(obj.hash, '0x8CFB90F1BA8CD8F')
+        self.assertEqual(obj.size, 1048576)
+        self.assertEqual(obj.container.name, 'test_container')
+        self.assertTrue('meta1' in obj.meta_data)
+        self.assertTrue('meta2' in obj.meta_data)
+        self.assertTrue('last_modified' in obj.extra)
+        self.assertTrue('content_type' in obj.extra)
+        self.assertTrue('content_encoding' in obj.extra)
+        self.assertTrue('content_language' in obj.extra)
+
+    def test_get_container_doesnt_exist(self):
+        self.mock_response_klass.type = None
+        try:
+            self.driver.get_container(container_name='test_container100')
+        except ContainerDoesNotExistError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_get_container_success(self):
+        self.mock_response_klass.type = None
+        container = self.driver.get_container(
+                                    container_name='test_container200')
+
+        self.assertTrue(container.name, 'test_container200')
+        self.assertTrue(container.extra['etag'], '0x8CFB877BB56A6FB')
+        self.assertTrue(container.extra['last_modified'],
+                        'Fri, 04 Jan 2013 09:48:06 GMT')
+        self.assertTrue(container.extra['lease']['status'], 'unlocked')
+        self.assertTrue(container.extra['lease']['state'], 'available')
+        self.assertTrue(container.extra['meta_data']['meta1'], 'value1')
+
+    def test_get_object_container_doesnt_exist(self):
+        # This method makes two requests which makes mocking the response a bit
+        # trickier
+        self.mock_response_klass.type = None
+        try:
+            self.driver.get_object(container_name='test_container100',
+                                   object_name='test')
+        except ContainerDoesNotExistError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_get_object_success(self):
+        # This method makes two requests which makes mocking the response a bit
+        # trickier
+        self.mock_response_klass.type = None
+        obj = self.driver.get_object(container_name='test_container200',
+                                     object_name='test')
+
+        self.assertEqual(obj.name, 'test')
+        self.assertEqual(obj.container.name, 'test_container200')
+        self.assertEqual(obj.size, 12345)
+        self.assertEqual(obj.hash, '0x8CFB877BB56A6FB')
+        self.assertEqual(obj.extra['last_modified'],
+                         'Fri, 04 Jan 2013 09:48:06 GMT')
+        self.assertEqual(obj.extra['content_type'], 'application/zip')
+        self.assertEqual(obj.meta_data['rabbits'], 'monkeys')
+
+    def test_create_container_invalid_name(self):
+        # invalid container name
+        self.mock_response_klass.type = 'INVALID_NAME'
+        try:
+            self.driver.create_container(container_name='new--container')
+        except InvalidContainerNameError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_create_container_already_exists(self):
+        # container with this name already exists
+        self.mock_response_klass.type = 'ALREADY_EXISTS'
+        try:
+            self.driver.create_container(container_name='new-container')
+        except ContainerAlreadyExistsError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_create_container_success(self):
+        # success
+        self.mock_response_klass.type = None
+        name = 'new-container'
+        container = self.driver.create_container(container_name=name)
+        self.assertEqual(container.name, name)
+
+    def test_delete_container_doesnt_exist(self):
+        container = Container(name='new_container', extra=None,
+                              driver=self.driver)
+        self.mock_response_klass.type = 'DOESNT_EXIST'
+        try:
+            self.driver.delete_container(container=container)
+        except ContainerDoesNotExistError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_delete_container_not_empty(self):
+        self.mock_response_klass.type = None
+        AzureBlobsStorageDriver.RESPONSES_PER_REQUEST = 2
+
+        container = Container(name='test_container', extra={},
+                              driver=self.driver)
+
+        try:
+            self.driver.delete_container(container=container)
+        except ContainerIsNotEmptyError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_delete_container_success(self):
+        self.mock_response_klass.type = 'EMPTY'
+        AzureBlobsStorageDriver.RESPONSES_PER_REQUEST = 2
+
+        container = Container(name='test_container', extra={},
+                              driver=self.driver)
+
+        self.assertTrue(self.driver.delete_container(container=container))
+
+    def test_delete_container_not_found(self):
+        self.mock_response_klass.type = 'NOT_FOUND'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        try:
+            self.driver.delete_container(container=container)
+        except ContainerDoesNotExistError:
+            pass
+        else:
+            self.fail('Container does not exist but an exception was not' +
+                      'thrown')
+
+    def test_download_object_success(self):
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
+                     container=container, meta_data=None,
+                     driver=self.driver_type)
+        destination_path = os.path.abspath(__file__) + '.temp'
+        result = self.driver.download_object(obj=obj,
+                                             destination_path=destination_path,
+                                             overwrite_existing=False,
+                                             delete_on_failure=True)
+        self.assertTrue(result)
+
+    def test_download_object_invalid_file_size(self):
+        self.mock_raw_response_klass.type = 'INVALID_SIZE'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
+                     container=container, meta_data=None,
+                     driver=self.driver_type)
+        destination_path = os.path.abspath(__file__) + '.temp'
+        result = self.driver.download_object(obj=obj,
+                                             destination_path=destination_path,
+                                             overwrite_existing=False,
+                                             delete_on_failure=True)
+        self.assertFalse(result)
+
+    def test_download_object_invalid_file_already_exists(self):
+        self.mock_raw_response_klass.type = 'INVALID_SIZE'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
+                     container=container, meta_data=None,
+                     driver=self.driver_type)
+        destination_path = os.path.abspath(__file__)
+        try:
+            self.driver.download_object(obj=obj,
+                                        destination_path=destination_path,
+                                        overwrite_existing=False,
+                                        delete_on_failure=True)
+        except LibcloudError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_download_object_as_stream_success(self):
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+
+        obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
+                     container=container, meta_data=None,
+                     driver=self.driver_type)
+
+        stream = self.driver.download_object_as_stream(obj=obj,
+                                                       chunk_size=None)
+        self.assertTrue(hasattr(stream, '__iter__'))
+
+    def test_upload_object_invalid_ex_blob_type(self):
+        # Invalid hash is detected on the amazon side and BAD_REQUEST is
+        # returned
+        file_path = os.path.abspath(__file__)
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        try:
+            self.driver.upload_object(file_path=file_path, container=container,
+                                      object_name=object_name,
+                                      verify_hash=True,
+                                      ex_blob_type='invalid-blob')
+        except LibcloudError:
+            e = sys.exc_info()[1]
+            self.assertTrue(str(e).lower().find('invalid blob type') != -1)
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_upload_object_invalid_md5(self):
+        # Invalid md5 is returned by azure
+        self.mock_raw_response_klass.type = 'INVALID_HASH'
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        file_path = os.path.abspath(__file__)
+        try:
+            self.driver.upload_object(file_path=file_path, container=container,
+                                      object_name=object_name,
+                                      verify_hash=True)
+        except ObjectHashMismatchError:
+            pass
+        else:
+            self.fail(
+                'Invalid hash was returned but an exception was not thrown')
+
+    def test_upload_small_block_object_success(self):
+        file_path = os.path.abspath(__file__)
+        file_size = os.stat(file_path).st_size
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+        obj = self.driver.upload_object(file_path=file_path,
+                                      container=container,
+                                      object_name=object_name,
+                                      extra=extra,
+                                      verify_hash=False,
+                                      ex_blob_type='BlockBlob')
+
+        self.assertEqual(obj.name, 'foo_test_upload')
+        self.assertEqual(obj.size, file_size)
+        self.assertTrue('some-value' in obj.meta_data)
+
+    def test_upload_big_block_object_success(self):
+        file_path = tempfile.mktemp(suffix='.jpg')
+        file_size = AZURE_BLOCK_MAX_SIZE + 1
+
+        with open(file_path, 'w') as file_hdl:
+            file_hdl.write('0' * file_size)
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+        obj = self.driver.upload_object(file_path=file_path,
+                                      container=container,
+                                      object_name=object_name,
+                                      extra=extra,
+                                      verify_hash=False,
+                                      ex_blob_type='BlockBlob')
+
+        self.assertEqual(obj.name, 'foo_test_upload')
+        self.assertEqual(obj.size, file_size)
+        self.assertTrue('some-value' in obj.meta_data)
+
+        os.remove(file_path)
+
+    def test_upload_page_object_success(self):
+        self.mock_response_klass.use_param = None
+        file_path = tempfile.mktemp(suffix='.jpg')
+        file_size = AZURE_PAGE_CHUNK_SIZE * 4
+
+        with open(file_path, 'w') as file_hdl:
+            file_hdl.write('0' * file_size)
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+        obj = self.driver.upload_object(file_path=file_path,
+                                      container=container,
+                                      object_name=object_name,
+                                      extra=extra,
+                                      verify_hash=False,
+                                      ex_blob_type='PageBlob')
+
+        self.assertEqual(obj.name, 'foo_test_upload')
+        self.assertEqual(obj.size, file_size)
+        self.assertTrue('some-value' in obj.meta_data)
+
+        os.remove(file_path)
+
+    def test_upload_page_object_failure(self):
+        file_path = tempfile.mktemp(suffix='.jpg')
+        file_size = AZURE_PAGE_CHUNK_SIZE * 2 + 1
+
+        with open(file_path, 'w') as file_hdl:
+            file_hdl.write('0' * file_size)
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+
+        try:
+            obj = self.driver.upload_object(file_path=file_path,
+                                          container=container,
+                                          object_name=object_name,
+                                          extra=extra,
+                                          verify_hash=False,
+                                          ex_blob_type='PageBlob')
+        except LibcloudError:
+            e = sys.exc_info()[1]
+            self.assertTrue(str(e).lower().find('not aligned') != -1)
+
+        os.remove(file_path)
+
+    def test_upload_small_block_object_success_with_lease(self):
+        self.mock_response_klass.use_param = 'comp'
+        file_path = os.path.abspath(__file__)
+        file_size = os.stat(file_path).st_size
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+        obj = self.driver.upload_object(file_path=file_path,
+                                      container=container,
+                                      object_name=object_name,
+                                      extra=extra,
+                                      verify_hash=False,
+                                      ex_blob_type='BlockBlob',
+                                      ex_use_lease=True)
+
+        self.assertEqual(obj.name, 'foo_test_upload')
+        self.assertEqual(obj.size, file_size)
+        self.assertTrue('some-value' in obj.meta_data)
+        self.mock_response_klass.use_param = None
+
+    def test_upload_big_block_object_success_with_lease(self):
+        self.mock_response_klass.use_param = 'comp'
+        file_path = tempfile.mktemp(suffix='.jpg')
+        file_size = AZURE_BLOCK_MAX_SIZE * 2
+
+        with open(file_path, 'w') as file_hdl:
+            file_hdl.write('0' * file_size)
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+        obj = self.driver.upload_object(file_path=file_path,
+                                      container=container,
+                                      object_name=object_name,
+                                      extra=extra,
+                                      verify_hash=False,
+                                      ex_blob_type='BlockBlob',
+                                      ex_use_lease=False)
+
+        self.assertEqual(obj.name, 'foo_test_upload')
+        self.assertEqual(obj.size, file_size)
+        self.assertTrue('some-value' in obj.meta_data)
+
+        os.remove(file_path)
+        self.mock_response_klass.use_param = None
+
+    def test_upload_page_object_success_with_lease(self):
+        self.mock_response_klass.use_param = 'comp'
+        file_path = tempfile.mktemp(suffix='.jpg')
+        file_size = AZURE_PAGE_CHUNK_SIZE * 4
+
+        with open(file_path, 'w') as file_hdl:
+            file_hdl.write('0' * file_size)
+
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        object_name = 'foo_test_upload'
+        extra = {'meta_data': {'some-value': 'foobar'}}
+        obj = self.driver.upload_object(file_path=file_path,
+                                      container=container,
+                                      object_name=object_name,
+                                      extra=extra,
+                                      verify_hash=False,
+                                      ex_blob_type='PageBlob',
+                                      ex_use_lease=True)
+
+        self.assertEqual(obj.name, 'foo_test_upload')
+        self.assertEqual(obj.size, file_size)
+        self.assertTrue('some-value' in obj.meta_data)
+
+        os.remove(file_path)
+        self.mock_response_klass.use_param = None
+
+    def test_upload_blob_object_via_stream(self):
+        self.mock_response_klass.use_param = 'comp'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+
+        object_name = 'foo_test_upload'
+        iterator = DummyIterator(data=['2', '3', '5'])
+        extra = {'content_type': 'text/plain'}
+        obj = self.driver.upload_object_via_stream(container=container,
+                                                   object_name=object_name,
+                                                   iterator=iterator,
+                                                   extra=extra,
+                                                   ex_blob_type='BlockBlob')
+
+        self.assertEqual(obj.name, object_name)
+        self.assertEqual(obj.size, 3)
+        self.mock_response_klass.use_param = None
+
+    def test_upload_blob_object_via_stream_with_lease(self):
+        self.mock_response_klass.use_param = 'comp'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+
+        object_name = 'foo_test_upload'
+        iterator = DummyIterator(data=['2', '3', '5'])
+        extra = {'content_type': 'text/plain'}
+        obj = self.driver.upload_object_via_stream(container=container,
+                                                   object_name=object_name,
+                                                   iterator=iterator,
+                                                   extra=extra,
+                                                   ex_blob_type='BlockBlob',
+                                                   ex_use_lease=True)
+
+        self.assertEqual(obj.name, object_name)
+        self.assertEqual(obj.size, 3)
+        self.mock_response_klass.use_param = None
+
+    def test_upload_page_object_via_stream(self):
+        self.mock_response_klass.use_param = 'comp'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+
+        object_name = 'foo_test_upload'
+        blob_size = AZURE_PAGE_CHUNK_SIZE
+        iterator = DummyIterator(data=['1'] * blob_size)
+        extra = {'content_type': 'text/plain'}
+        obj = self.driver.upload_object_via_stream(container=container,
+                                                   object_name=object_name,
+                                                   iterator=iterator,
+                                                   extra=extra,
+                                                   ex_blob_type='PageBlob',
+                                                   ex_page_blob_size=blob_size)
+
+        self.assertEqual(obj.name, object_name)
+        self.assertEqual(obj.size, blob_size)
+        self.mock_response_klass.use_param = None
+
+    def test_upload_page_object_via_stream_with_lease(self):
+        self.mock_response_klass.use_param = 'comp'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+
+        object_name = 'foo_test_upload'
+        blob_size = AZURE_PAGE_CHUNK_SIZE
+        iterator = DummyIterator(data=['1'] * blob_size)
+        extra = {'content_type': 'text/plain'}
+        obj = self.driver.upload_object_via_stream(container=container,
+                                                   object_name=object_name,
+                                                   iterator=iterator,
+                                                   extra=extra,
+                                                   ex_blob_type='PageBlob',
+                                                   ex_page_blob_size=blob_size,
+                                                   ex_use_lease=True)
+
+        self.assertEqual(obj.name, object_name)
+        self.assertEqual(obj.size, blob_size)
+
+    def test_delete_object_not_found(self):
+        self.mock_response_klass.type = 'NOT_FOUND'
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
+                     meta_data=None, container=container, driver=self.driver)
+        try:
+            self.driver.delete_object(obj=obj)
+        except ObjectDoesNotExistError:
+            pass
+        else:
+            self.fail('Exception was not thrown')
+
+    def test_delete_object_success(self):
+        container = Container(name='foo_bar_container', extra={},
+                              driver=self.driver)
+        obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
+                     meta_data=None, container=container, driver=self.driver)
+
+        result = self.driver.delete_object(obj=obj)
+        self.assertTrue(result)
+
+if __name__ == '__main__':
+    sys.exit(unittest.main())
