aztk/aztk_cli/entrypoint.py

77 строки
2.2 KiB
Python
Исходник Обычный вид История

"""
AZTK module for the CLI entry point
Note: any changes to this file need have the package reinstalled
pip install -e .
"""
import argparse
import warnings
from typing import NamedTuple
Feature: Spark scheduling target (#661) * initial * update pipfile and pipfile.lock * uncomment scheduling target, start ssh_submit impl * get rid of debug code * finish ssh_submit implementation * serialize object instead of properties * fix upload log bug, temp workaround for get logs * remove unused function * clean up node_scripts submit, remove debug code * ensure warns on deprecated test * remove commented timeout * start scheduling_target for job_submission * continue job scheduling target implementation * update pipefile.lock * update Pipfile deps, pin pynacl to fix build failure * fix syntax * fix pipfile with latest azure-nspkg * update path for scheduling scripts * update config.py import * add nohup dependency * use nohup and exit immediately * remove bad dep * remove nohup * remove commented code * add block to ssh, get retcode from node_exec * fix typo * fix some imports, add test stubs * fixes * start implementation of task table service * add scheduling_target support for get_application_log * todos * remove useless statement * move get_application_status to core, add scheduling_target support * update deps in requirements.txt * fix false positive pylint import error * remove bad import * bad local variable * add batch task abstraction, add datetime field * mediate table insertion with task abstraction * fix issues with task abstraction usage * fix pylint import error * fix update task on run * update job submission test * make test package, update pylint * update job submission with scheduling_target * add job support for scheduling_target * fix taskstate serialization to storage * fix job submission job manager task, catch table storage errors * fix import * fix imports for batch sdk 5.0+ * fix test model module * fix node election exception catch * start fix job tests * move get_task_status to base * fix job tests * fix get_application, add abstraction to batch task gets * fix some bugs, remove some debug statements * fix test * use jobstate and application state * add start_task retries * make jobstate an enum * fix import * fixes * fixes * revert settings.json * fixes for application state in cli * conditionally create storage table * remove commented code * conditionally create storage table * remove commented code * fix test * respond to comments * fix debug statement, fix starttask issue * remove debug test print * formatting * update doc string with correct return value * revert settings.json * more robust starget test, fix get_application for starget * whitespace
2018-10-24 01:47:54 +03:00
from azure.batch.models import BatchErrorException
import aztk
from aztk_cli import constants, log, logger, utils
2018-03-06 04:18:47 +03:00
from aztk_cli.spark.endpoints import spark
from . import plugins, toolkit
# Makes sure the warnings are displayed nicely in the CLI without a stacktrace
def _show_warn(message, *_args):
log.warning(str(message))
def main():
parser = argparse.ArgumentParser(prog=constants.CLI_EXE)
setup_common_args(parser)
subparsers = parser.add_subparsers(title="Available Softwares", dest="software", metavar="<software>")
subparsers.required = True
spark_parser = subparsers.add_parser("spark", help="Commands to run spark jobs")
plugins_parser = subparsers.add_parser("plugins", help="Commands to list and view plugins")
toolkit_parser = subparsers.add_parser("toolkit", help="List current toolkit information and browse available ones")
spark.setup_parser(spark_parser)
2018-02-27 03:36:31 +03:00
plugins.setup_parser(plugins_parser)
toolkit.setup_parser(toolkit_parser)
args = parser.parse_args()
parse_common_args(args)
try:
run_software(args)
Feature: Spark scheduling target (#661) * initial * update pipfile and pipfile.lock * uncomment scheduling target, start ssh_submit impl * get rid of debug code * finish ssh_submit implementation * serialize object instead of properties * fix upload log bug, temp workaround for get logs * remove unused function * clean up node_scripts submit, remove debug code * ensure warns on deprecated test * remove commented timeout * start scheduling_target for job_submission * continue job scheduling target implementation * update pipefile.lock * update Pipfile deps, pin pynacl to fix build failure * fix syntax * fix pipfile with latest azure-nspkg * update path for scheduling scripts * update config.py import * add nohup dependency * use nohup and exit immediately * remove bad dep * remove nohup * remove commented code * add block to ssh, get retcode from node_exec * fix typo * fix some imports, add test stubs * fixes * start implementation of task table service * add scheduling_target support for get_application_log * todos * remove useless statement * move get_application_status to core, add scheduling_target support * update deps in requirements.txt * fix false positive pylint import error * remove bad import * bad local variable * add batch task abstraction, add datetime field * mediate table insertion with task abstraction * fix issues with task abstraction usage * fix pylint import error * fix update task on run * update job submission test * make test package, update pylint * update job submission with scheduling_target * add job support for scheduling_target * fix taskstate serialization to storage * fix job submission job manager task, catch table storage errors * fix import * fix imports for batch sdk 5.0+ * fix test model module * fix node election exception catch * start fix job tests * move get_task_status to base * fix job tests * fix get_application, add abstraction to batch task gets * fix some bugs, remove some debug statements * fix test * use jobstate and application state * add start_task retries * make jobstate an enum * fix import * fixes * fixes * revert settings.json * fixes for application state in cli * conditionally create storage table * remove commented code * conditionally create storage table * remove commented code * fix test * respond to comments * fix debug statement, fix starttask issue * remove debug test print * formatting * update doc string with correct return value * revert settings.json * more robust starget test, fix get_application for starget * whitespace
2018-10-24 01:47:54 +03:00
except BatchErrorException as e:
Feature: SDK (#180) * initial sdk commit * added submit, wait_until_cluster_ready, wait_until_jobs_done, async options * remove incorrect public method * initial error checking * factored helper commands out of spark client file * remove unnecessary print statement * add get_cluster and list_cluster, fix imports * add create_user * remove appmodel from base class, create app_logs_model * fix imports and models call bug * change asynchronous to wait, add get_logs(), add wait_until_app_done() * add get_application_status(), add_create_cluster_in_parallel(), add submit_all_applications(), add wait_until_all_clusters_are_ready, create_user() accepts cluster_id, rename app to application * add try catches for all public methods, raise AztkErrors * add Custom Script model * added custom script support * added ssh conf model * added ssh conf subclass, fixed typing issue * add support for spark configuration files, move upload_node_scripts to spark * changed submit to require cluster_id * whitespace * initial integration commit * create_user takes ssh key or path to key * fix get_user_public_key * add name for parameter * integrate cluster_create and cluster_add_user with sdk * expose pool in Cluster model * add bool return value to delete_cluster * integrate cluster_delete * integrate cluster_get and cluster_list with sdk * integrate cluster_submit and cluster_app_logs with sdk * integrate ssh with sdk * change master_ui to web_ui and web_ui to job_ui * fix cluster_create, cluster_get, and cluster_ssh, aztklib * add home_directory_path constant * remove unnecessary files in cli * remove unnecessary files * fix setup.py constants * redo #167 * fix constants and setup.py * remove old tests, fix constants * fix get_log typo * refactor cluster_create for readability * decouple cli from sdk, and batch functions from software functions * update version, fix in setup.py * whitespace * fix init source path * change import * move error.py to root sdk directory * fix cluster_ssh error call * fix bug if no app_args are present * remove default value for docker_repo in constructor
2017-10-31 22:34:23 +03:00
utils.print_batch_exception(e)
except aztk.error.AztkError as e:
2018-02-27 03:36:31 +03:00
log.error(str(e))
def setup_common_args(parser: argparse.ArgumentParser):
parser.add_argument("--version", action="version", version=aztk.version.__version__)
parser.add_argument("--verbose", action="store_true", help="Enable verbose logging.")
def parse_common_args(args: NamedTuple):
if args.verbose:
logger.setup_logging(True)
log.debug("Verbose logging enabled")
else:
warnings.showwarning = _show_warn
logger.setup_logging(False)
def run_software(args: NamedTuple):
softwares = {}
softwares[aztk.models.Software.spark] = spark.execute
2018-02-27 03:36:31 +03:00
softwares["plugins"] = plugins.execute
softwares["toolkit"] = toolkit.execute
func = softwares[args.software]
func(args)
if __name__ == "__main__":
main()