- Add more model tests
- Add noqa tags for ImportError except blocks
This commit is contained in:
Fred Park 2017-02-22 10:08:54 -08:00
Родитель 290a1ebdf9
Коммит e1d5f9f802
10 изменённых файлов: 487 добавлений и 58 удалений

Просмотреть файл

@ -36,7 +36,7 @@ import enum
import logging
try:
import pathlib2 as pathlib
except ImportError:
except ImportError: # noqa
import pathlib
import threading
# non-stdlib imports
@ -77,33 +77,33 @@ class Downloader(object):
self._creds = creds
self._spec = spec
def _check_download_conditions(self, lpath, rfile, spec):
# type: (Downloader, pathlib.Path, blobxfer.models.AzureStorageEntity,
# blobxfer.models.DownloadSpecification) -> DownloadAction
def _check_download_conditions(self, lpath, rfile):
# type: (Downloader, pathlib.Path,
# blobxfer.models.AzureStorageEntity) -> DownloadAction
"""Check for download conditions
:param Downloader self: this
:param pathlib.Path lpath: local path
:param blobxfer.models.AzureStorageEntity rfile: remote file
:param blobxfer.models.DownloadSpecification spec: download spec
:rtype: DownloadAction
:return: download action
"""
if not lpath.exists():
return DownloadAction.Download
if not spec.options.overwrite:
if not self._spec.options.overwrite:
logger.info(
'not overwriting local file: {} (remote: {}/{})'.format(
lpath, rfile.container, rfile.name))
return DownloadAction.Skip
# check skip on options, MD5 match takes priority
if spec.skip_on.md5_match:
if self._spec.skip_on.md5_match:
return DownloadAction.CheckMd5
# if neither of the remaining skip on actions are activated, download
if not spec.skip_on.filesize_match and not spec.skip_on.lmt_ge:
if (not self._spec.skip_on.filesize_match and
not self._spec.skip_on.lmt_ge):
return DownloadAction.Download
# check skip on file size match
dl_fs = None
if spec.skip_on.filesize_match:
if self._spec.skip_on.filesize_match:
lsize = lpath.stat().st_size
if rfile.mode == blobxfer.models.AzureStorageModes.Page:
lsize = blobxfer.util.page_align_content_length(lsize)
@ -113,7 +113,7 @@ class Downloader(object):
dl_fs = True
# check skip on lmt ge
dl_lmt = None
if spec.skip_on.lmt_ge:
if self._spec.skip_on.lmt_ge:
mtime = datetime.datetime.fromtimestamp(
lpath.stat().st_mtime, tz=dateutil.tz.tzlocal())
if mtime >= rfile.lmt:
@ -211,8 +211,7 @@ class Downloader(object):
# form local path for remote file
lpath = pathlib.Path(self._spec.destination.path, rfile.name)
# check on download conditions
action = self._check_download_conditions(
lpath, rfile, self._spec)
action = self._check_download_conditions(lpath, rfile)
if action == DownloadAction.Skip:
continue
elif action == DownloadAction.CheckMd5:

Просмотреть файл

@ -32,7 +32,7 @@ from builtins import ( # noqa
import logging
try:
import pathlib2 as pathlib
except ImportError:
except ImportError: # noqa
import pathlib
# non-stdlib imports
import azure.common

Просмотреть файл

@ -33,7 +33,7 @@ import logging
import multiprocessing
try:
import queue
except ImportError:
except ImportError: # noqa
import Queue as queue
# non-stdlib imports
# local imports
@ -75,7 +75,7 @@ class LocalFileMd5Offload(object):
:param LocalFileMd5Offload self: this
:param int num_workers: number of worker processes
"""
if num_workers is None or num_workers < 1:
if num_workers is None:
num_workers = multiprocessing.cpu_count() // 2
if num_workers < 1:
num_workers = 1

Просмотреть файл

@ -37,7 +37,7 @@ import logging
import os
try:
import pathlib2 as pathlib
except ImportError:
except ImportError: # noqa
import pathlib
# non-stdlib imports
# local imports

Просмотреть файл

@ -32,7 +32,7 @@ import json
import logging
try:
import pathlib2 as pathlib
except ImportError:
except ImportError: # noqa
import pathlib
# non-stdlib imports
import click

Просмотреть файл

@ -3,7 +3,7 @@ import os
import re
try:
from setuptools import setup
except ImportError:
except ImportError: # noqa
from distutils.core import setup
import sys

Просмотреть файл

@ -0,0 +1,253 @@
# coding=utf-8
"""Tests for download"""
# stdlib imports
import datetime
import dateutil.tz
import mock
import multiprocessing
try:
import pathlib2 as pathlib
except ImportError: # noqa
import pathlib
# non-stdlib imports
import azure.storage.blob
import pytest
# local imports
import blobxfer.models as models
import blobxfer.util as util
# module under test
import blobxfer.download as dl
def test_check_download_conditions(tmpdir):
ap = tmpdir.join('a')
ap.write('abc')
ep = pathlib.Path(str(ap))
nep = pathlib.Path(str(tmpdir.join('nep')))
ds = models.DownloadSpecification(
download_options=models.DownloadOptions(
check_file_md5=True,
delete_extraneous_destination=False,
mode=models.AzureStorageModes.Auto,
overwrite=False,
recursive=True,
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=models.SkipOnOptions(
filesize_match=True,
lmt_ge=True,
md5_match=True,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
result = d._check_download_conditions(nep, mock.MagicMock())
assert result == dl.DownloadAction.Download
result = d._check_download_conditions(ep, mock.MagicMock())
assert result == dl.DownloadAction.Skip
ds = models.DownloadSpecification(
download_options=models.DownloadOptions(
check_file_md5=True,
delete_extraneous_destination=False,
mode=models.AzureStorageModes.Auto,
overwrite=True,
recursive=True,
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=models.SkipOnOptions(
filesize_match=True,
lmt_ge=True,
md5_match=True,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
result = d._check_download_conditions(ep, mock.MagicMock())
assert result == dl.DownloadAction.CheckMd5
ds = models.DownloadSpecification(
download_options=models.DownloadOptions(
check_file_md5=True,
delete_extraneous_destination=False,
mode=models.AzureStorageModes.Auto,
overwrite=True,
recursive=True,
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=models.SkipOnOptions(
filesize_match=False,
lmt_ge=False,
md5_match=False,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
result = d._check_download_conditions(ep, mock.MagicMock())
assert result == dl.DownloadAction.Download
ds = models.DownloadSpecification(
download_options=models.DownloadOptions(
check_file_md5=True,
delete_extraneous_destination=False,
mode=models.AzureStorageModes.Auto,
overwrite=True,
recursive=True,
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=models.SkipOnOptions(
filesize_match=True,
lmt_ge=False,
md5_match=False,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
rfile = models.AzureStorageEntity('cont')
rfile._size = util.page_align_content_length(ep.stat().st_size)
rfile._mode = models.AzureStorageModes.Page
result = d._check_download_conditions(ep, rfile)
assert result == dl.DownloadAction.Skip
rfile._size = ep.stat().st_size
rfile._mode = models.AzureStorageModes.Page
result = d._check_download_conditions(ep, rfile)
assert result == dl.DownloadAction.Download
ds = models.DownloadSpecification(
download_options=models.DownloadOptions(
check_file_md5=True,
delete_extraneous_destination=False,
mode=models.AzureStorageModes.Auto,
overwrite=True,
recursive=True,
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=models.SkipOnOptions(
filesize_match=False,
lmt_ge=True,
md5_match=False,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
rfile = models.AzureStorageEntity('cont')
rfile._lmt = datetime.datetime.now(dateutil.tz.tzutc()) + \
datetime.timedelta(days=1)
result = d._check_download_conditions(ep, rfile)
assert result == dl.DownloadAction.Download
rfile._lmt = datetime.datetime.now(dateutil.tz.tzutc()) - \
datetime.timedelta(days=1)
result = d._check_download_conditions(ep, rfile)
assert result == dl.DownloadAction.Skip
def test_pre_md5_skip_on_check():
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._md5_offload = mock.MagicMock()
rfile = models.AzureStorageEntity('cont')
rfile._encryption = mock.MagicMock()
rfile._encryption.blobxfer_extensions = mock.MagicMock()
rfile._encryption.blobxfer_extensions.pre_encrypted_content_md5 = \
'abc'
lpath = 'lpath'
d._pre_md5_skip_on_check(lpath, rfile)
assert lpath in d._md5_map
lpath = 'lpath2'
rfile._encryption = None
rfile._md5 = 'abc'
d._pre_md5_skip_on_check(lpath, rfile)
assert lpath in d._md5_map
def test_post_md5_skip_on_check():
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._md5_offload = mock.MagicMock()
lpath = 'lpath'
rfile = models.AzureStorageEntity('cont')
rfile._md5 = 'abc'
d._pre_md5_skip_on_check(lpath, rfile)
assert lpath in d._md5_map
d._post_md5_skip_on_check(lpath, True)
assert lpath not in d._md5_map
# TODO test mismatch
def test_initialize_check_md5_downloads_thread():
lpath = 'lpath'
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._md5_map[lpath] = mock.MagicMock()
d._md5_offload = mock.MagicMock()
d._md5_offload.done_cv = multiprocessing.Condition()
d._md5_offload.get_localfile_md5_done = mock.MagicMock()
d._md5_offload.get_localfile_md5_done.side_effect = [None, (lpath, True)]
d._post_md5_skip_on_check = mock.MagicMock()
d._initialize_check_md5_downloads_thread()
d._all_remote_files_processed = True
d._md5_map.clear()
d._md5_offload.done_cv.acquire()
d._md5_offload.done_cv.notify()
d._md5_offload.done_cv.release()
d._md5_check_thread.join()
assert d._post_md5_skip_on_check.call_count == 1
@mock.patch('blobxfer.md5.LocalFileMd5Offload')
@mock.patch('blobxfer.blob.operations.list_blobs')
@mock.patch('blobxfer.operations.ensure_local_destination', return_value=True)
def test_start(patched_eld, patched_lb, patched_lfmo, tmpdir):
d = dl.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._initialize_check_md5_downloads_thread = mock.MagicMock()
d._md5_check_thread = mock.MagicMock()
d._spec.sources = []
d._spec.options = mock.MagicMock()
d._spec.options.mode = models.AzureStorageModes.Auto
d._spec.options.overwrite = True
d._spec.skip_on = mock.MagicMock()
d._spec.skip_on.md5_match = False
d._spec.skip_on.lmt_ge = False
d._spec.skip_on.filesize_match = False
d._spec.destination = mock.MagicMock()
d._spec.destination.path = pathlib.Path(str(tmpdir))
p = '/cont/remote/path'
asp = models.AzureSourcePath()
asp.add_path_with_storage_account(p, 'sa')
d._spec.sources.append(asp)
b = azure.storage.blob.models.Blob(name='name')
patched_lb.side_effect = [[b]]
d._check_download_conditions = mock.MagicMock()
d._check_download_conditions.return_value = dl.DownloadAction.Skip
d.start()
# TODO assert
patched_lb.side_effect = [[b]]
d._all_remote_files_processed = False
d._check_download_conditions.return_value = dl.DownloadAction.CheckMd5
d._pre_md5_skip_on_check = mock.MagicMock()
d.start()
assert d._pre_md5_skip_on_check.call_count == 1
patched_lb.side_effect = [[b]]
d._all_remote_files_processed = False
d._check_download_conditions.return_value = dl.DownloadAction.Download
d.start()
# TODO assert

Просмотреть файл

@ -0,0 +1,99 @@
# coding=utf-8
"""Tests for md5"""
# stdlib imports
import time
# non-stdlib imports
# local imports
import blobxfer.models as models
import blobxfer.util as util
# module under test
import blobxfer.md5 as md5
def test_done_cv():
a = None
try:
a = md5.LocalFileMd5Offload()
assert a.done_cv == a._done_cv
finally:
if a:
a.finalize_md5_processes()
def test_finalize_md5_processes():
a = None
try:
a = md5.LocalFileMd5Offload(num_workers=0)
finally:
if a:
a.finalize_md5_processes()
for proc in a._md5_procs:
assert not proc.is_alive()
def test_from_add_to_done_non_pagealigned(tmpdir):
file = tmpdir.join('a')
file.write('abc')
remote_md5 = util.compute_md5_for_file_asbase64(str(file))
a = None
try:
a = md5.LocalFileMd5Offload(num_workers=1)
result = a.get_localfile_md5_done()
assert result is None
a.add_localfile_for_md5_check(
str(file), remote_md5, models.AzureStorageModes.Block)
i = 33
checked = False
while i > 0:
result = a.get_localfile_md5_done()
if result is None:
time.sleep(0.3)
i -= 1
continue
assert len(result) == 2
assert result[0] == str(file)
assert result[1]
checked = True
break
assert checked
finally:
if a:
a.finalize_md5_processes()
def test_from_add_to_done_pagealigned(tmpdir):
file = tmpdir.join('a')
file.write('abc')
remote_md5 = util.compute_md5_for_file_asbase64(str(file), True)
a = None
try:
a = md5.LocalFileMd5Offload(num_workers=1)
result = a.get_localfile_md5_done()
assert result is None
a.add_localfile_for_md5_check(
str(file), remote_md5, models.AzureStorageModes.Page)
i = 33
checked = False
while i > 0:
result = a.get_localfile_md5_done()
if result is None:
time.sleep(0.3)
i -= 1
continue
assert len(result) == 2
assert result[0] == str(file)
assert result[1]
checked = True
break
assert checked
finally:
if a:
a.finalize_md5_processes()

Просмотреть файл

@ -6,18 +6,19 @@ import mock
import os
try:
import pathlib2 as pathlib
except ImportError:
except ImportError: # noqa
import pathlib
# non-stdlib imports
import azure.storage
import azure.storage.blob
import azure.storage.file
import pytest
# module under test
import blobxfer.models
import blobxfer.models as models
def test_storage_credentials():
creds = blobxfer.models.AzureStorageCredentials()
creds = models.AzureStorageCredentials()
creds.add_storage_account('sa1', 'somekey1', 'endpoint')
a = creds.get_storage_account('sa1')
@ -51,24 +52,24 @@ def test_storage_credentials():
def test_key_is_sas():
a = blobxfer.models.AzureStorageAccount('name', 'abcdef', 'endpoint')
a = models.AzureStorageAccount('name', 'abcdef', 'endpoint')
assert not a.is_sas
a = blobxfer.models.AzureStorageAccount('name', 'abcdef&blah', 'endpoint')
a = models.AzureStorageAccount('name', 'abcdef&blah', 'endpoint')
assert not a.is_sas
a = blobxfer.models.AzureStorageAccount('name', '?abcdef', 'endpoint')
a = models.AzureStorageAccount('name', '?abcdef', 'endpoint')
assert a.is_sas
a = blobxfer.models.AzureStorageAccount(
a = models.AzureStorageAccount(
'name', '?sv=0&sr=1&sig=2', 'endpoint')
assert a.is_sas
a = blobxfer.models.AzureStorageAccount(
a = models.AzureStorageAccount(
'name', 'sv=0&sr=1&sig=2', 'endpoint')
assert a.is_sas
a = blobxfer.models.AzureStorageAccount(
a = models.AzureStorageAccount(
'name', 'sig=0&sv=0&sr=1&se=2', 'endpoint')
assert a.is_sas
@ -86,7 +87,7 @@ def test_localsourcepaths_files(tmpdir):
defpath.join('world.txt').write('world')
defpath.join('moo.cow').write('y')
a = blobxfer.models.LocalSourcePaths()
a = models.LocalSourcePaths()
a.add_include('*.txt')
a.add_includes(['moo.cow', '*blah*'])
with pytest.raises(ValueError):
@ -106,7 +107,7 @@ def test_localsourcepaths_files(tmpdir):
assert str(defpath.join('world.txt')) in a_set
assert str(defpath.join('moo.cow')) not in a_set
b = blobxfer.models.LocalSourcePaths()
b = models.LocalSourcePaths()
b.add_includes(['moo.cow', '*blah*'])
b.add_include('*.txt')
b.add_excludes(['world.txt'])
@ -121,7 +122,7 @@ def test_localdestinationpath(tmpdir):
tmpdir.mkdir('1')
path = tmpdir.join('1')
a = blobxfer.models.LocalDestinationPath(str(path))
a = models.LocalDestinationPath(str(path))
a.is_dir = True
assert str(a.path) == str(path)
assert a.is_dir
@ -129,7 +130,7 @@ def test_localdestinationpath(tmpdir):
a.ensure_path_exists()
assert os.path.exists(str(a.path))
b = blobxfer.models.LocalDestinationPath()
b = models.LocalDestinationPath()
b.is_dir = False
b.path = str(path)
with pytest.raises(RuntimeError):
@ -138,7 +139,7 @@ def test_localdestinationpath(tmpdir):
path2 = tmpdir.join('2')
path3 = path2.join('3')
c = blobxfer.models.LocalDestinationPath(str(path3))
c = models.LocalDestinationPath(str(path3))
with pytest.raises(RuntimeError):
c.ensure_path_exists()
c.is_dir = False
@ -150,7 +151,7 @@ def test_localdestinationpath(tmpdir):
def test_azuresourcepath():
p = '/cont/remote/path'
asp = blobxfer.models.AzureSourcePath()
asp = models.AzureSourcePath()
asp.add_path_with_storage_account(p, 'sa')
with pytest.raises(RuntimeError):
@ -159,26 +160,106 @@ def test_azuresourcepath():
assert 'sa' == asp.lookup_storage_account(p)
@mock.patch('blobxfer.crypto.models.EncryptionMetadata')
@mock.patch('blobxfer.file.operations.list_files')
def test_azuresourcepath_files(patched_lf, patched_em):
p = '/cont/remote/path'
asp = models.AzureSourcePath()
asp.add_path_with_storage_account(p, 'sa')
options = mock.MagicMock()
options.mode = models.AzureStorageModes.File
creds = mock.MagicMock()
creds.get_storage_account = mock.MagicMock()
sa = mock.MagicMock()
sa.file_client = mock.MagicMock()
creds.get_storage_account.return_value = sa
f = azure.storage.file.models.File(name='name')
patched_lf.side_effect = [[f]]
patched_em.encryption_metadata_exists = mock.MagicMock()
patched_em.encryption_metadata_exists.return_value = False
i = 0
for file in asp.files(creds, options, mock.MagicMock()):
i += 1
assert file.name == 'name'
assert file.encryption_metadata is None
assert i == 1
fe = azure.storage.file.models.File(name='name')
fe.metadata = {'encryptiondata': {'a': 'b'}}
patched_lf.side_effect = [[fe]]
patched_em.encryption_metadata_exists.return_value = True
patched_em.convert_from_json = mock.MagicMock()
i = 0
for file in asp.files(creds, options, mock.MagicMock()):
i += 1
assert file.name == 'name'
assert file.encryption_metadata is not None
assert i == 1
@mock.patch('blobxfer.crypto.models.EncryptionMetadata')
@mock.patch('blobxfer.blob.operations.list_blobs')
def test_azuresourcepath_blobs(patched_lb, patched_em):
p = '/cont/remote/path'
asp = models.AzureSourcePath()
asp.add_path_with_storage_account(p, 'sa')
options = mock.MagicMock()
options.mode = models.AzureStorageModes.Auto
creds = mock.MagicMock()
creds.get_storage_account = mock.MagicMock()
sa = mock.MagicMock()
sa.block_blob_client = mock.MagicMock()
creds.get_storage_account.return_value = sa
b = azure.storage.blob.models.Blob(name='name')
patched_lb.side_effect = [[b]]
patched_em.encryption_metadata_exists = mock.MagicMock()
patched_em.encryption_metadata_exists.return_value = False
i = 0
for file in asp.files(creds, options, mock.MagicMock()):
i += 1
assert file.name == 'name'
assert file.encryption_metadata is None
assert i == 1
be = azure.storage.blob.models.Blob(name='name')
be.metadata = {'encryptiondata': {'a': 'b'}}
patched_lb.side_effect = [[be]]
patched_em.encryption_metadata_exists.return_value = True
patched_em.convert_from_json = mock.MagicMock()
i = 0
for file in asp.files(creds, options, mock.MagicMock()):
i += 1
assert file.name == 'name'
assert file.encryption_metadata is not None
assert i == 1
def test_downloadspecification():
ds = blobxfer.models.DownloadSpecification(
download_options=blobxfer.models.DownloadOptions(
ds = models.DownloadSpecification(
download_options=models.DownloadOptions(
check_file_md5=True,
delete_extraneous_destination=False,
mode=blobxfer.models.AzureStorageModes.Auto,
mode=models.AzureStorageModes.Auto,
overwrite=True,
recursive=True,
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=blobxfer.models.SkipOnOptions(
skip_on_options=models.SkipOnOptions(
filesize_match=True,
lmt_ge=False,
md5_match=True,
),
local_destination_path=blobxfer.models.LocalDestinationPath('dest'),
local_destination_path=models.LocalDestinationPath('dest'),
)
asp = blobxfer.models.AzureSourcePath()
asp = models.AzureSourcePath()
p = 'some/remote/path'
asp.add_path_with_storage_account(p, 'sa')
@ -193,7 +274,7 @@ def test_downloadspecification():
def test_azurestorageentity():
ase = blobxfer.models.AzureStorageEntity('cont')
ase = models.AzureStorageEntity('cont')
assert ase.container == 'cont'
assert ase.encryption_metadata is None
@ -211,15 +292,15 @@ def test_azurestorageentity():
assert ase.lmt == 'lmt'
assert ase.size == 123
assert ase.md5 == 'abc'
assert ase.mode == blobxfer.models.AzureStorageModes.Block
assert ase.mode == models.AzureStorageModes.Block
blob.properties.blob_type = azure.storage.blob.models._BlobTypes.AppendBlob
ase.populate_from_blob(blob)
assert ase.mode == blobxfer.models.AzureStorageModes.Append
assert ase.mode == models.AzureStorageModes.Append
blob.properties.blob_type = azure.storage.blob.models._BlobTypes.PageBlob
ase.populate_from_blob(blob)
assert ase.mode == blobxfer.models.AzureStorageModes.Page
assert ase.mode == models.AzureStorageModes.Page
ase.populate_from_file(blob)
assert ase.mode == blobxfer.models.AzureStorageModes.File
assert ase.mode == models.AzureStorageModes.File

Просмотреть файл

@ -2,10 +2,7 @@
"""Tests for operations"""
# stdlib imports
from mock import (
MagicMock,
patch,
)
import mock
# non-stdlib imports
import pytest
# local imports
@ -14,8 +11,8 @@ import blobxfer.models
import blobxfer.operations as ops
@patch('blobxfer.file.operations.check_if_single_file')
@patch('blobxfer.blob.operations.check_if_single_blob')
@mock.patch('blobxfer.file.operations.check_if_single_file')
@mock.patch('blobxfer.blob.operations.check_if_single_blob')
def test_ensure_local_destination(patched_blob, patched_file, tmpdir):
downdir = tmpdir.join('down')
@ -30,14 +27,14 @@ def test_ensure_local_destination(patched_blob, patched_file, tmpdir):
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=MagicMock(),
skip_on_options=mock.MagicMock(),
local_destination_path=blobxfer.models.LocalDestinationPath(
str(downdir)
),
)
with pytest.raises(RuntimeError):
ops.ensure_local_destination(MagicMock(), ds)
ops.ensure_local_destination(mock.MagicMock(), ds)
asp = blobxfer.models.AzureSourcePath()
p = 'cont/remote/path'
@ -46,12 +43,12 @@ def test_ensure_local_destination(patched_blob, patched_file, tmpdir):
ds.add_azure_source_path(asp)
patched_blob.return_value = False
ops.ensure_local_destination(MagicMock(), ds)
ops.ensure_local_destination(mock.MagicMock(), ds)
assert ds.destination.is_dir
patched_blob.return_value = True
with pytest.raises(RuntimeError):
ops.ensure_local_destination(MagicMock(), ds)
ops.ensure_local_destination(mock.MagicMock(), ds)
# file tests
ds = blobxfer.models.DownloadSpecification(
@ -64,7 +61,7 @@ def test_ensure_local_destination(patched_blob, patched_file, tmpdir):
restore_file_attributes=False,
rsa_private_key=None,
),
skip_on_options=MagicMock(),
skip_on_options=mock.MagicMock(),
local_destination_path=blobxfer.models.LocalDestinationPath(
str(downdir)
),
@ -73,9 +70,9 @@ def test_ensure_local_destination(patched_blob, patched_file, tmpdir):
ds.add_azure_source_path(asp)
patched_file.return_value = (False, None)
ops.ensure_local_destination(MagicMock(), ds)
ops.ensure_local_destination(mock.MagicMock(), ds)
assert ds.destination.is_dir
patched_file.return_value = (True, MagicMock())
patched_file.return_value = (True, mock.MagicMock())
with pytest.raises(RuntimeError):
ops.ensure_local_destination(MagicMock(), ds)
ops.ensure_local_destination(mock.MagicMock(), ds)