Fix: Custom scripts not read from cluster.yaml (#388)

This commit is contained in:
Timothee Guerin 2018-02-09 09:56:53 -08:00 коммит произвёл GitHub
Родитель e98de9f8ac
Коммит d7d5faaf7a
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
7 изменённых файлов: 107 добавлений и 40 удалений

6
.style.yapf Normal file
Просмотреть файл

@ -0,0 +1,6 @@
[style]
based_on_style = pep8
spaces_before_comment = 4
split_before_logical_operator = true
indent_width = 4
column_limit = 120

67
.vscode/launch.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1,67 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "List clusters",
"type": "python",
"request": "launch",
"stopOnEntry": false,
"internalConsoleOptions": "openOnSessionStart",
"pythonPath": "${config:python.pythonPath}",
"program": "${workspaceFolder}/cli/entrypoint.py",
"cwd": "${workspaceFolder}",
"args": [
"spark", "cluster", "list"
],
"env": {},
"envFile": "${workspaceFolder}/.env",
"debugOptions": [
"RedirectOutput"
]
},
{
"name": "Create cluster",
"type": "python",
"request": "launch",
"stopOnEntry": false,
"internalConsoleOptions": "openOnSessionStart",
"pythonPath": "${config:python.pythonPath}",
"program": "${workspaceFolder}/cli/entrypoint.py",
"cwd": "${workspaceFolder}",
"args": [
"spark", "cluster", "create", "--id", "spark-debug"
],
"env": {},
"envFile": "${workspaceFolder}/.env",
"debugOptions": [
"RedirectOutput"
]
},
{
"name": "Python: Attach",
"type": "python",
"request": "attach",
"localRoot": "${workspaceFolder}",
"remoteRoot": "${workspaceFolder}",
"port": 3000,
"secret": "my_secret",
"host": "localhost"
},
{
"name": "Python: Terminal (integrated)",
"type": "python",
"request": "launch",
"stopOnEntry": true,
"pythonPath": "${config:python.pythonPath}",
"program": "${file}",
"cwd": "",
"console": "integratedTerminal",
"env": {},
"envFile": "${workspaceFolder}/.env",
"debugOptions": []
}
]
}

5
.vscode/settings.json поставляемый
Просмотреть файл

@ -8,6 +8,7 @@
"**/__pycache__/**/*": true "**/__pycache__/**/*": true
}, },
"python.autoComplete.extraPaths": [ "python.autoComplete.extraPaths": [
"${workspaceRoot}/node" "${workspaceRoot}/node_scripts"
] ],
"python.formatting.provider": "yapf"
} }

Просмотреть файл

@ -101,9 +101,9 @@ def _merge_secrets_dict(secrets: SecretsConfiguration, secrets_config):
docker_config = secrets_config.get('docker') docker_config = secrets_config.get('docker')
if docker_config: if docker_config:
secrets.docker = DockerConfiguration( secrets.docker = DockerConfiguration(
endpoint = docker_config.get('endpoint'), endpoint=docker_config.get('endpoint'),
username = docker_config.get('username'), username=docker_config.get('username'),
password = docker_config.get('password'), password=docker_config.get('password'),
) )
default_config = secrets_config.get('default') default_config = secrets_config.get('default')
@ -132,6 +132,7 @@ def read_cluster_config(path: str = aztk.utils.constants.DEFAULT_CLUSTER_CONFIG_
return cluster_config_from_dict(config_dict) return cluster_config_from_dict(config_dict)
def cluster_config_from_dict(config: dict): def cluster_config_from_dict(config: dict):
output = ClusterConfiguration() output = ClusterConfiguration()
wait = False wait = False
@ -161,10 +162,26 @@ def cluster_config_from_dict(config: dict):
output.user_configuration.password = config['password'] output.user_configuration.password = config['password']
if config.get('custom_scripts') not in [[None], None]: if config.get('custom_scripts') not in [[None], None]:
output.custom_scripts = config['custom_scripts'] output.custom_scripts = []
for custom_script in config['custom_scripts']:
output.custom_scripts.append(
aztk.spark.models.CustomScript(
script=custom_script['script'],
run_on=custom_script['runOn']
)
)
if config.get('azure_files') not in [[None], None]: if config.get('azure_files') not in [[None], None]:
output.file_shares = config['azure_files'] output.file_shares = []
for file_share in config['azure_files']:
output.file_shares.append(
aztk.spark.models.FileShare(
storage_account_name=file_share['storage_account_name'],
storage_account_key=file_share['storage_account_key'],
file_share_path=file_share['file_share_path'],
mount_path=file_share['mount_path'],
)
)
if config.get('docker_repo') is not None: if config.get('docker_repo') is not None:
output.docker_repo = config['docker_repo'] output.docker_repo = config['docker_repo']
@ -175,7 +192,6 @@ def cluster_config_from_dict(config: dict):
return output, wait return output, wait
class SshConfig: class SshConfig:
def __init__(self): def __init__(self):
@ -271,13 +287,14 @@ class SshConfig:
raise aztk.error.AztkError( raise aztk.error.AztkError(
"Please supply a username either in the ssh.yaml configuration file or with a parameter (--username)") "Please supply a username either in the ssh.yaml configuration file or with a parameter (--username)")
class JobConfig(): class JobConfig():
def __init__(self): def __init__(self):
self.id = None self.id = None
self.applications = [] self.applications = []
self.custom_scripts = None self.custom_scripts = None
self.spark_configuration = None self.spark_configuration = None
self.vm_size=None self.vm_size = None
self.docker_repo = None self.docker_repo = None
self.max_dedicated_nodes = None self.max_dedicated_nodes = None
self.max_low_pri_nodes = None self.max_low_pri_nodes = None

Просмотреть файл

@ -54,32 +54,6 @@ def execute(args: typing.NamedTuple):
docker_repo=args.docker_repo)) docker_repo=args.docker_repo))
wait = wait if args.wait is None else args.wait wait = wait if args.wait is None else args.wait
if cluster_conf.custom_scripts:
custom_scripts = []
for custom_script in cluster_conf.custom_scripts:
custom_scripts.append(
aztk.spark.models.CustomScript(
script=custom_script['script'],
run_on=custom_script['runOn']
)
)
else:
custom_scripts = None
if cluster_conf.file_shares:
file_shares = []
for file_share in cluster_conf.file_shares:
file_shares.append(
aztk.spark.models.FileShare(
storage_account_name=file_share['storage_account_name'],
storage_account_key=file_share['storage_account_key'],
file_share_path=file_share['file_share_path'],
mount_path=file_share['mount_path']
)
)
else:
file_shares = None
user_configuration = cluster_conf.user_configuration user_configuration = cluster_conf.user_configuration
if user_configuration and user_configuration.username: if user_configuration and user_configuration.username:
@ -123,7 +97,7 @@ def print_cluster_conf(cluster_conf: ClusterConfiguration, wait: bool):
log.info("> dedicated: %s", cluster_conf.vm_count) log.info("> dedicated: %s", cluster_conf.vm_count)
log.info("> low priority: %s", cluster_conf.vm_low_pri_count) log.info("> low priority: %s", cluster_conf.vm_low_pri_count)
log.info("spark cluster vm size: %s", cluster_conf.vm_size) log.info("spark cluster vm size: %s", cluster_conf.vm_size)
log.info("custom scripts: %s", cluster_conf.custom_scripts) log.info("custom scripts: %s", len(cluster_conf.custom_scripts) if cluster_conf.custom_scripts else 0)
log.info("subnet ID: %s", cluster_conf.subnet_id) log.info("subnet ID: %s", cluster_conf.subnet_id)
log.info("file shares: %s", len(cluster_conf.file_shares) if cluster_conf.file_shares is not None else 0) log.info("file shares: %s", len(cluster_conf.file_shares) if cluster_conf.file_shares is not None else 0)
log.info("docker repo name: %s", cluster_conf.docker_repo) log.info("docker repo name: %s", cluster_conf.docker_repo)

Просмотреть файл

@ -1,9 +1,13 @@
# Distribution
azure-batch==3.0.0 azure-batch==3.0.0
azure-mgmt-batch==5.0.0 azure-mgmt-batch==5.0.0
azure-mgmt-storage==1.5.0 azure-mgmt-storage==1.5.0
azure-storage==0.33.0 azure-storage==0.33.0
pytest==3.1.3
pylint==1.7.2
pyyaml==3.12 pyyaml==3.12
pycryptodome==3.4.7 pycryptodome==3.4.7
paramiko==2.4.0 paramiko==2.4.0
# Development
yapf==0.20.1
pylint==1.7.2
pytest==3.1.3

Просмотреть файл

@ -1,2 +0,0 @@
[pep8]
max-line-length = 160