2017-07-19 20:48:23 +03:00
|
|
|
"""
|
2017-09-30 03:13:22 +03:00
|
|
|
AZTK module for the CLI entry point
|
2017-07-19 20:48:23 +03:00
|
|
|
|
|
|
|
Note: any changes to this file need have the package reinstalled
|
|
|
|
pip install -e .
|
|
|
|
"""
|
|
|
|
import argparse
|
2018-05-02 02:36:44 +03:00
|
|
|
import warnings
|
2017-08-02 18:43:26 +03:00
|
|
|
from typing import NamedTuple
|
2018-08-25 03:21:22 +03:00
|
|
|
|
Feature: Spark scheduling target (#661)
* initial
* update pipfile and pipfile.lock
* uncomment scheduling target, start ssh_submit impl
* get rid of debug code
* finish ssh_submit implementation
* serialize object instead of properties
* fix upload log bug, temp workaround for get logs
* remove unused function
* clean up node_scripts submit, remove debug code
* ensure warns on deprecated test
* remove commented timeout
* start scheduling_target for job_submission
* continue job scheduling target implementation
* update pipefile.lock
* update Pipfile deps, pin pynacl to fix build failure
* fix syntax
* fix pipfile with latest azure-nspkg
* update path for scheduling scripts
* update config.py import
* add nohup dependency
* use nohup and exit immediately
* remove bad dep
* remove nohup
* remove commented code
* add block to ssh, get retcode from node_exec
* fix typo
* fix some imports, add test stubs
* fixes
* start implementation of task table service
* add scheduling_target support for get_application_log
* todos
* remove useless statement
* move get_application_status to core, add scheduling_target support
* update deps in requirements.txt
* fix false positive pylint import error
* remove bad import
* bad local variable
* add batch task abstraction, add datetime field
* mediate table insertion with task abstraction
* fix issues with task abstraction usage
* fix pylint import error
* fix update task on run
* update job submission test
* make test package, update pylint
* update job submission with scheduling_target
* add job support for scheduling_target
* fix taskstate serialization to storage
* fix job submission job manager task, catch table storage errors
* fix import
* fix imports for batch sdk 5.0+
* fix test model module
* fix node election exception catch
* start fix job tests
* move get_task_status to base
* fix job tests
* fix get_application, add abstraction to batch task gets
* fix some bugs, remove some debug statements
* fix test
* use jobstate and application state
* add start_task retries
* make jobstate an enum
* fix import
* fixes
* fixes
* revert settings.json
* fixes for application state in cli
* conditionally create storage table
* remove commented code
* conditionally create storage table
* remove commented code
* fix test
* respond to comments
* fix debug statement, fix starttask issue
* remove debug test print
* formatting
* update doc string with correct return value
* revert settings.json
* more robust starget test, fix get_application for starget
* whitespace
2018-10-24 01:47:54 +03:00
|
|
|
from azure.batch.models import BatchErrorException
|
2018-08-25 03:21:22 +03:00
|
|
|
|
2017-12-02 00:42:55 +03:00
|
|
|
import aztk
|
2018-08-25 03:21:22 +03:00
|
|
|
from aztk_cli import constants, log, logger, utils
|
2018-03-06 04:18:47 +03:00
|
|
|
from aztk_cli.spark.endpoints import spark
|
2018-08-25 03:21:22 +03:00
|
|
|
|
2018-05-02 02:36:44 +03:00
|
|
|
from . import plugins, toolkit
|
|
|
|
|
|
|
|
|
|
|
|
# Makes sure the warnings are displayed nicely in the CLI without a stacktrace
|
|
|
|
def _show_warn(message, *_args):
|
2018-06-08 20:59:13 +03:00
|
|
|
log.warning(str(message))
|
2018-05-02 02:36:44 +03:00
|
|
|
|
2017-07-19 20:48:23 +03:00
|
|
|
|
|
|
|
def main():
|
|
|
|
parser = argparse.ArgumentParser(prog=constants.CLI_EXE)
|
2017-08-02 18:43:26 +03:00
|
|
|
|
|
|
|
setup_common_args(parser)
|
|
|
|
|
2018-08-07 01:29:06 +03:00
|
|
|
subparsers = parser.add_subparsers(title="Available Softwares", dest="software", metavar="<software>")
|
2017-07-19 20:48:23 +03:00
|
|
|
subparsers.required = True
|
2018-08-07 01:29:06 +03:00
|
|
|
spark_parser = subparsers.add_parser("spark", help="Commands to run spark jobs")
|
|
|
|
plugins_parser = subparsers.add_parser("plugins", help="Commands to list and view plugins")
|
|
|
|
toolkit_parser = subparsers.add_parser("toolkit", help="List current toolkit information and browse available ones")
|
2017-07-19 20:48:23 +03:00
|
|
|
|
|
|
|
spark.setup_parser(spark_parser)
|
2018-02-27 03:36:31 +03:00
|
|
|
plugins.setup_parser(plugins_parser)
|
2018-05-02 02:36:44 +03:00
|
|
|
toolkit.setup_parser(toolkit_parser)
|
2017-07-19 20:48:23 +03:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2017-08-02 18:43:26 +03:00
|
|
|
parse_common_args(args)
|
2017-09-21 01:26:23 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
run_software(args)
|
Feature: Spark scheduling target (#661)
* initial
* update pipfile and pipfile.lock
* uncomment scheduling target, start ssh_submit impl
* get rid of debug code
* finish ssh_submit implementation
* serialize object instead of properties
* fix upload log bug, temp workaround for get logs
* remove unused function
* clean up node_scripts submit, remove debug code
* ensure warns on deprecated test
* remove commented timeout
* start scheduling_target for job_submission
* continue job scheduling target implementation
* update pipefile.lock
* update Pipfile deps, pin pynacl to fix build failure
* fix syntax
* fix pipfile with latest azure-nspkg
* update path for scheduling scripts
* update config.py import
* add nohup dependency
* use nohup and exit immediately
* remove bad dep
* remove nohup
* remove commented code
* add block to ssh, get retcode from node_exec
* fix typo
* fix some imports, add test stubs
* fixes
* start implementation of task table service
* add scheduling_target support for get_application_log
* todos
* remove useless statement
* move get_application_status to core, add scheduling_target support
* update deps in requirements.txt
* fix false positive pylint import error
* remove bad import
* bad local variable
* add batch task abstraction, add datetime field
* mediate table insertion with task abstraction
* fix issues with task abstraction usage
* fix pylint import error
* fix update task on run
* update job submission test
* make test package, update pylint
* update job submission with scheduling_target
* add job support for scheduling_target
* fix taskstate serialization to storage
* fix job submission job manager task, catch table storage errors
* fix import
* fix imports for batch sdk 5.0+
* fix test model module
* fix node election exception catch
* start fix job tests
* move get_task_status to base
* fix job tests
* fix get_application, add abstraction to batch task gets
* fix some bugs, remove some debug statements
* fix test
* use jobstate and application state
* add start_task retries
* make jobstate an enum
* fix import
* fixes
* fixes
* revert settings.json
* fixes for application state in cli
* conditionally create storage table
* remove commented code
* conditionally create storage table
* remove commented code
* fix test
* respond to comments
* fix debug statement, fix starttask issue
* remove debug test print
* formatting
* update doc string with correct return value
* revert settings.json
* more robust starget test, fix get_application for starget
* whitespace
2018-10-24 01:47:54 +03:00
|
|
|
except BatchErrorException as e:
|
Feature: SDK (#180)
* initial sdk commit
* added submit, wait_until_cluster_ready, wait_until_jobs_done, async options
* remove incorrect public method
* initial error checking
* factored helper commands out of spark client file
* remove unnecessary print statement
* add get_cluster and list_cluster, fix imports
* add create_user
* remove appmodel from base class, create app_logs_model
* fix imports and models call bug
* change asynchronous to wait, add get_logs(), add wait_until_app_done()
* add get_application_status(), add_create_cluster_in_parallel(), add submit_all_applications(), add wait_until_all_clusters_are_ready, create_user() accepts cluster_id, rename app to application
* add try catches for all public methods, raise AztkErrors
* add Custom Script model
* added custom script support
* added ssh conf model
* added ssh conf subclass, fixed typing issue
* add support for spark configuration files, move upload_node_scripts to spark
* changed submit to require cluster_id
* whitespace
* initial integration commit
* create_user takes ssh key or path to key
* fix get_user_public_key
* add name for parameter
* integrate cluster_create and cluster_add_user with sdk
* expose pool in Cluster model
* add bool return value to delete_cluster
* integrate cluster_delete
* integrate cluster_get and cluster_list with sdk
* integrate cluster_submit and cluster_app_logs with sdk
* integrate ssh with sdk
* change master_ui to web_ui and web_ui to job_ui
* fix cluster_create, cluster_get, and cluster_ssh, aztklib
* add home_directory_path constant
* remove unnecessary files in cli
* remove unnecessary files
* fix setup.py constants
* redo #167
* fix constants and setup.py
* remove old tests, fix constants
* fix get_log typo
* refactor cluster_create for readability
* decouple cli from sdk, and batch functions from software functions
* update version, fix in setup.py
* whitespace
* fix init source path
* change import
* move error.py to root sdk directory
* fix cluster_ssh error call
* fix bug if no app_args are present
* remove default value for docker_repo in constructor
2017-10-31 22:34:23 +03:00
|
|
|
utils.print_batch_exception(e)
|
2017-12-02 00:42:55 +03:00
|
|
|
except aztk.error.AztkError as e:
|
2018-02-27 03:36:31 +03:00
|
|
|
log.error(str(e))
|
2017-08-02 18:43:26 +03:00
|
|
|
|
|
|
|
|
|
|
|
def setup_common_args(parser: argparse.ArgumentParser):
|
2018-08-25 03:21:22 +03:00
|
|
|
parser.add_argument("--version", action="version", version=aztk.version.__version__)
|
|
|
|
parser.add_argument("--verbose", action="store_true", help="Enable verbose logging.")
|
2017-08-02 18:43:26 +03:00
|
|
|
|
|
|
|
|
|
|
|
def parse_common_args(args: NamedTuple):
|
|
|
|
if args.verbose:
|
|
|
|
logger.setup_logging(True)
|
|
|
|
log.debug("Verbose logging enabled")
|
|
|
|
else:
|
2018-06-04 18:10:00 +03:00
|
|
|
warnings.showwarning = _show_warn
|
2017-08-02 18:43:26 +03:00
|
|
|
logger.setup_logging(False)
|
|
|
|
|
|
|
|
|
|
|
|
def run_software(args: NamedTuple):
|
2017-07-19 20:48:23 +03:00
|
|
|
softwares = {}
|
2017-12-02 00:42:55 +03:00
|
|
|
softwares[aztk.models.Software.spark] = spark.execute
|
2018-02-27 03:36:31 +03:00
|
|
|
softwares["plugins"] = plugins.execute
|
2018-05-02 02:36:44 +03:00
|
|
|
softwares["toolkit"] = toolkit.execute
|
2017-07-19 20:48:23 +03:00
|
|
|
|
|
|
|
func = softwares[args.software]
|
|
|
|
func(args)
|
|
|
|
|
|
|
|
|
2018-08-25 03:21:22 +03:00
|
|
|
if __name__ == "__main__":
|
2017-07-19 20:48:23 +03:00
|
|
|
main()
|