Post-release fixes
- Fix merge task with autogen task id - Recipe updates - Other minor fixes
This commit is contained in:
Родитель
71fc510a57
Коммит
d14b8aa8b4
|
@ -2,6 +2,9 @@
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fix merge task regression with enhanced autogenerated task id support
|
||||||
|
|
||||||
## [3.9.0] - 2019-11-15 (SC19 Edition)
|
## [3.9.0] - 2019-11-15 (SC19 Edition)
|
||||||
### Added
|
### Added
|
||||||
- Support for [Encrypted Singularity Containers](docs/50-batch-shipyard-encrypted-containers.md)
|
- Support for [Encrypted Singularity Containers](docs/50-batch-shipyard-encrypted-containers.md)
|
||||||
|
|
|
@ -5630,7 +5630,7 @@ def add_jobs(
|
||||||
merge_task_id = None
|
merge_task_id = None
|
||||||
if has_merge_task:
|
if has_merge_task:
|
||||||
ntasks += 1
|
ntasks += 1
|
||||||
_task = settings.job_merge_task(jobspec)
|
_task = settings.job_merge_task(config, jobspec)
|
||||||
existing_tasklist, merge_task_id, lasttaskic, gpu, ib = \
|
existing_tasklist, merge_task_id, lasttaskic, gpu, ib = \
|
||||||
_construct_task(
|
_construct_task(
|
||||||
batch_client, blob_client, keyvault_client, config,
|
batch_client, blob_client, keyvault_client, config,
|
||||||
|
|
|
@ -3545,14 +3545,23 @@ def job_has_merge_task(conf):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def job_merge_task(conf):
|
def job_merge_task(config, conf):
|
||||||
# type: (dict) -> dict
|
# type: (dict) -> dict
|
||||||
"""Gets merge task
|
"""Gets merge task
|
||||||
|
:param dict config: configuration object
|
||||||
:param dict conf: job configuration object
|
:param dict conf: job configuration object
|
||||||
:rtype: dict
|
:rtype: dict
|
||||||
:return: merge task
|
:return: merge task
|
||||||
"""
|
"""
|
||||||
return conf['merge_task']
|
if 'autogenerated_task_id' in conf:
|
||||||
|
prefix, padding = autogenerated_task_id_settings(conf, level='job')
|
||||||
|
else:
|
||||||
|
prefix, padding = autogenerated_task_id_settings(
|
||||||
|
config, level='global')
|
||||||
|
_task = conf['merge_task']
|
||||||
|
_task['##task_id_prefix'] = prefix
|
||||||
|
_task['##task_id_padding'] = padding
|
||||||
|
return _task
|
||||||
|
|
||||||
|
|
||||||
def job_force_enable_task_dependencies(conf):
|
def job_force_enable_task_dependencies(conf):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Singularity for Azure/batch-shipyard (cli)
|
# Singularity for Azure/batch-shipyard (cli)
|
||||||
|
|
||||||
Bootstrap: library
|
Bootstrap: library
|
||||||
From: alpine:3.10
|
From: alpine:3.9
|
||||||
|
|
||||||
%post
|
%post
|
||||||
apk update
|
apk update
|
||||||
|
|
|
@ -61,13 +61,12 @@ shipyard jobs add --jobs jobs-split.yaml --tail stdout.txt
|
||||||
shipyard jobs add --jobs jobs-blast.yaml
|
shipyard jobs add --jobs jobs-blast.yaml
|
||||||
|
|
||||||
# poll the merge task until it completes
|
# poll the merge task until it completes
|
||||||
shipyard jobs tasks list --jobid blast --taskid merge-task-00001 --poll-until-tasks-complete
|
shipyard jobs tasks list --jobid blast --taskid merge-task-00000 --poll-until-tasks-complete
|
||||||
|
|
||||||
# optionally egress the results.txt file from the compute node to local machine
|
# optionally egress the results.txt file from the compute node to local machine
|
||||||
shipyard data files task --filespec blast,merge-task-00001,wd/results.txt
|
shipyard data files task --filespec blast,merge-task-00000,wd/results.txt
|
||||||
|
|
||||||
# clean up
|
# clean up
|
||||||
shipyard jobs del -y --jobs jobs-split.yaml
|
|
||||||
shipyard jobs del -y --jobs jobs-blast.yaml
|
shipyard jobs del -y --jobs jobs-blast.yaml
|
||||||
shipyard pool del -y
|
shipyard pool del -y
|
||||||
```
|
```
|
||||||
|
|
|
@ -18,6 +18,6 @@ job_specifications:
|
||||||
mpi:
|
mpi:
|
||||||
runtime: intelmpi-ofa
|
runtime: intelmpi-ofa
|
||||||
processes_per_node: 1
|
processes_per_node: 1
|
||||||
pre_execution_command: source setup_hplinpack.sh -a $AVX -n 50000; source /opt/intel/compilers_and_libraries/linux/mpi/bin64/mpivars.sh
|
pre_execution_command: source setup_hplinpack.sh -a $AVX -n 50000; module load mpi/impi; source $MPI_BIN/mpivars.sh
|
||||||
# need to source mpivars again as the container intel mpi/benchmark is not the exact version as on the host
|
# need to source mpivars again as the container intel mpi/benchmark is not the exact version as on the host
|
||||||
command: /bin/bash -c "source /opt/intel/compilers_and_libraries/linux/mpi/bin64/mpivars.sh; cd /opt/intel/mkl/benchmarks/mp_linpack && ./runme_intel64_prv -p $P -q $Q -b $B $PSIZE"
|
command: /bin/bash -c "source /opt/intel/compilers_and_libraries/linux/mpi/bin64/mpivars.sh; cd /opt/intel/mkl/benchmarks/mp_linpack && ./runme_intel64_prv -p $P -q $Q -b $B $PSIZE"
|
||||||
|
|
Загрузка…
Ссылка в новой задаче