This commit is contained in:
Maxime Beauchemin 2015-03-30 22:00:30 -07:00
Родитель d3389f2295
Коммит df698a427f
3 изменённых файлов: 94 добавлений и 86 удалений

Просмотреть файл

@ -1,30 +1,33 @@
import logging
'''
Imports the hooks dynamically while keeping the package API clean,
abstracting the underlying modules
'''
try:
from airflow.hooks.mysql_hook import MySqlHook
except:
logging.info("Couldn't load MySQLHook")
pass
import imp as _imp
import os as _os
_hooks = {
'hive_hooks': [
'HiveCliHook',
'HiveMetastoreHook',
'HiveServer2Hook',
],
'presto_hook': ['PrestoHook'],
'mysql_hook': ['MySqlHook'],
'postgres_hook': ['PostgresHook'],
'samba_hook': ['SambaHook'],
'S3_hook': ['S3Hook'],
}
try:
from airflow.hooks.postgres_hook import PostgresHook
except:
logging.info("Couldn't import PostgreHook")
pass
from airflow.hooks.hive_hooks import HiveCliHook
from airflow.hooks.hive_hooks import HiveMetastoreHook
from airflow.hooks.hive_hooks import HiveServer2Hook
from airflow.hooks.presto_hook import PrestoHook
try:
from airflow.hooks.samba_hook import SambaHook
except:
logging.info("Couldn't import SambaHook")
pass
try:
from airflow.hooks.S3_hook import S3Hook
except:
logging.info("Couldn't import S3Hook")
pass
def f():
__all__ = []
for mod, hks in _hooks.items():
#try:
f, filename, description = _imp.find_module(mod, [_os.path.dirname(__file__)])
module = _imp.load_module(mod, f, filename, description)
for hk in hks:
globals()[hk] = getattr(module, hk)
__all__ += [hk]
#except:
# pass
f()
del f

Просмотреть файл

@ -1,44 +1,47 @@
import logging
'''
Imports operators dynamically while keeping the package API clean,
abstracting the underlying modules
'''
from bash_operator import BashOperator
from python_operator import PythonOperator
import imp as _imp
import os as _os
try:
from mysql_operator import MySqlOperator
except:
logging.info("Couldn't import MySqlOperator")
pass
_operators = {
'bash_operator': ['BashOperator'],
'python_operator': ['PythonOperator'],
'hive_operator': ['HiveOperator'],
'presto_check_operator': [
'PrestoCheckOperator',
'PrestoValueCheckOperator',
'PrestoIntervalCheckOperator',
],
'dummy_operator': ['DummyOperator'],
'email_operator': ['EmailOperator'],
'hive2samba_operator': ['Hive2SambaOperator'],
'mysql_operator': ['MySqlOperator'],
'postgres_operator': ['PostgresOperator'],
'sensors': [
'SqlSensor',
'ExternalTaskSensor',
'HivePartitionSensor',
'S3KeySensor',
'S3PrefixSensor',
'HdfsSensor',
'TimeSensor',
],
'subdag_operator': ['SubDagOperator'],
}
try:
from postgres_operator import PostgresOperator
except:
logging.info("Couldn't import PostgresOperator")
pass
from hive_operator import HiveOperator
from presto_check_operator import PrestoCheckOperator
from presto_check_operator import PrestoIntervalCheckOperator
from presto_check_operator import PrestoValueCheckOperator
from sensors import SqlSensor
from sensors import ExternalTaskSensor
from sensors import HivePartitionSensor
from sensors import HdfsSensor
try:
from sensors import S3KeySensor
from sensors import S3PrefixSensor
except:
logging.info("Couldn't import S3KeySensor, S3PrefixSensor")
pass
from sensors import TimeSensor
from email_operator import EmailOperator
from dummy_operator import DummyOperator
try:
from hive2samba_operator import Hive2SambaOperator
except:
logging.info("Couldn't import Hive2SambaOperator")
pass
from subdag_operator import SubDagOperator
def f():
__all__ = []
for mod, ops in _operators.items():
try:
f, filename, description = _imp.find_module(mod, [_os.path.dirname(__file__)])
module = _imp.load_module(mod, f, filename, description)
for op in ops:
globals()[op] = getattr(module, op)
__all__ += [op]
except:
pass
f()
del f

Просмотреть файл

@ -3,20 +3,25 @@ from setuptools import setup, find_packages
# Kept manually in sync with airflow.__version__
version = '0.4.5'
doc = [
'sphinx>=1.2.3',
'sphinx-argparse>=0.1.13',
'sphinx-rtd-theme>=0.1.6',
'Sphinx-PyPI-upload>=0.2.1'
]
postgres = ['psycopg2>=2.6']
hive = [
'hive-thrift-py>=0.0.1',
'pyhive>=0.1.3',
'pyhs2>=0.6.0',
]
mysql = ['mysql-python>=1.2.5']
postgres = ['psycopg2>=2.6']
optional = ['librabbitmq>=1.6.1']
samba = ['pysmbclient>=0.1.3']
s3 = ['boto>=2.36.0']
all_dbs = postgres + mysql
devel = all_dbs + doc + samba + s3
all_dbs = postgres + mysql + hive
devel = all_dbs + doc + samba + s3
setup(
name='airflow',
@ -36,14 +41,10 @@ setup(
'flask-cache>=0.13.1',
'flask-login>=0.2.11',
'flower>=0.7.3',
'hive-thrift-py>=0.0.1',
'jinja2>=2.7.3',
'librabbitmq>=1.6.1',
'markdown>=2.5.2',
'pandas>=0.15.2',
'pygments>=2.0.1',
'pyhive>=0.1.3',
'pyhs2>=0.6.0',
'python-dateutil>=2.3',
'requests>=2.5.1',
'setproctitle>=1.1.8',
@ -54,15 +55,16 @@ setup(
'tornado>=4.0.2',
],
extras_require={
"postgres": postgres,
"mysql": mysql,
"all_dbs": all_dbs,
"samba": samba,
"s3": s3,
"doc": doc,
"devel": devel,
"all": devel
},
'all': devel + optional,
'all_dbs': all_dbs,
'doc': doc,
'devel': devel,
'hive': hive,
'mysql': mysql,
'postgres': postgres,
's3': s3,
'samba': samba,
},
author='Maxime Beauchemin',
author_email='maximebeauchemin@gmail.com',
url='https://github.com/mistercrunch/Airflow',