This commit is contained in:
Maxime Beauchemin 2015-09-19 07:53:45 -07:00
Родитель 4820509182
Коммит c80efa31c8
9 изменённых файлов: 34 добавлений и 24 удалений

Просмотреть файл

@ -1,2 +0,0 @@
import airflow.contrib.hooks
import airflow.contrib.operators

Просмотреть файл

@ -1,11 +1,10 @@
'''
Imports the hooks dynamically while keeping the package API clean,
abstracting the underlying modules
'''
# Imports the hooks dynamically while keeping the package API clean,
# abstracting the underlying modules
from airflow.utils import import_module_attrs as _import_module_attrs
_hooks = {
'ftp_hook': ['FTPHook'],
'vertica_hook': ['VerticaHook'],
}
_import_module_attrs(globals(), _hooks)

Просмотреть файл

Просмотреть файл

@ -1,11 +1,10 @@
'''
Imports the operators dynamically while keeping the package API clean,
abstracting the underlying modules
'''
# Imports the operators dynamically while keeping the package API clean,
# abstracting the underlying modules
from airflow.utils import import_module_attrs as _import_module_attrs
_operators = {
# 'example': ['ExampleOperator'],
'vertica_operator': ['VerticaOperator'],
'vertica_to_hive': ['VerticaToHiveTransfer'],
}
_import_module_attrs(globals(), _operators)

Просмотреть файл

@ -4,7 +4,8 @@ import unicodecsv as csv
import logging
from tempfile import NamedTemporaryFile
from airflow.hooks import HiveCliHook, VerticaHook
from airflow.hooks import HiveCliHook
from airflow.contrib.hooks import VerticaHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
@ -22,6 +23,7 @@ class VerticaToHiveTransfer(BaseOperator):
queried considerably, you may want to use this operator only to
stage the data into a temporary table before loading it into its
final destination using a ``HiveOperator``.
:param sql: SQL query to execute against the Vertia database
:type sql: str
:param hive_table: target Hive table, use dot notation to target a

Просмотреть файл

@ -1,7 +1,5 @@
'''
Imports the hooks dynamically while keeping the package API clean,
abstracting the underlying modules
'''
# Imports the hooks dynamically while keeping the package API clean,
# abstracting the underlying modules
from airflow.utils import import_module_attrs as _import_module_attrs
from airflow.hooks.base_hook import BaseHook as _BaseHook
@ -24,7 +22,6 @@ _hooks = {
'dbapi_hook': ['DbApiHook'],
'mssql_hook': ['MsSqlHook'],
'oracle_hook': ['OracleHook'],
'vertica_hook': ['VerticaHook'],
}
_import_module_attrs(globals(), _hooks)

Просмотреть файл

@ -1,7 +1,5 @@
'''
Imports operators dynamically while keeping the package API clean,
abstracting the underlying modules
'''
# Imports operators dynamically while keeping the package API clean,
# abstracting the underlying modules
from airflow.utils import import_module_attrs as _import_module_attrs
# These need to be integrated first as other operators depend on them
@ -28,8 +26,6 @@ _operators = {
'mysql_operator': ['MySqlOperator'],
'sqlite_operator': ['SqliteOperator'],
'mysql_to_hive': ['MySqlToHiveTransfer'],
'vertica_operator': ['VerticaOperator'],
'vertica_to_hive': ['VerticaToHiveTransfer'],
'postgres_operator': ['PostgresOperator'],
'sensors': [
'SqlSensor',

Просмотреть файл

@ -452,7 +452,8 @@ def import_module_attrs(parent_module_globals, module_attrs_dict):
imported_attrs = []
for mod, attrs in list(module_attrs_dict.items()):
try:
folder = os.path.dirname(parent_module_globals['__file__'])
path = os.path.realpath(parent_module_globals['__file__'])
folder = os.path.dirname(path)
f, filename, description = imp.find_module(mod, [folder])
module = imp.load_module(mod, f, filename, description)
for attr in attrs:

Просмотреть файл

@ -68,6 +68,15 @@ Operator API
SubDagOperator,
TimeSensor
Community Contributed Operators
'''''''''''''''''''''''''''''''
.. automodule:: airflow.contrib.operators
:show-inheritance:
:members:
VerticaToHiveTransfer,
.. _macros:
Macros
@ -153,6 +162,15 @@ Hooks
S3Hook,
SqliteHook
Community Contributed Hooks
'''''''''''''''''''''''''''
.. automodule:: airflow.contrib.hooks
:show-inheritance:
:members:
VerticaHook,
FTPHook,
Executors
---------
Executors are the mechanism by which task instances get run.