Duplicate volume checks between job and task

This commit is contained in:
Fred Park 2017-05-28 14:57:52 -07:00
Родитель 893baef123
Коммит ec986323fb
2 изменённых файлов: 8 добавлений и 0 удалений

Просмотреть файл

@ -12,6 +12,8 @@
### Fixed
- Remote FS allocation issue with `vm_count` deprecation check
- Better handling of package index refresh errors
- `pool udi` over SSH and no registry logins (#92)
- Duplicate volume checks between job and task definitions
## [2.7.0rc1] - 2017-05-24
### Added

Просмотреть файл

@ -2218,6 +2218,9 @@ def task_settings(cloud_pool, config, poolconf, jobspec, conf, missing_images):
tdv = conf['data_volumes']
if util.is_not_empty(tdv):
if util.is_not_empty(data_volumes):
# check for intersection
if len(set(data_volumes).intersection(set(tdv))) > 0:
raise ValueError('data volumes must be unique')
data_volumes.extend(tdv)
else:
data_volumes = tdv
@ -2246,6 +2249,9 @@ def task_settings(cloud_pool, config, poolconf, jobspec, conf, missing_images):
tsdv = conf['shared_data_volumes']
if util.is_not_empty(tsdv):
if util.is_not_empty(shared_data_volumes):
# check for intersection
if len(set(shared_data_volumes).intersection(set(tsdv))) > 0:
raise ValueError('shared data volumes must be unique')
shared_data_volumes.extend(tsdv)
else:
shared_data_volumes = tsdv