* added dependency on pytest-flask
* Updated logging method names
* cleaned up S3 configuration for all recommendation engines
  All S3 configuration data is now in taar.recommenders.s3config
* dropped pinned hashes for packages in requirements.txt
This commit is contained in:
Victor Ng 2018-11-27 12:36:09 -05:00
Родитель 7820aab240
Коммит bab992b682
16 изменённых файлов: 581 добавлений и 937 удалений

Просмотреть файл

@ -98,3 +98,43 @@ LocaleRecommender:
EnsembleRecommender: EnsembleRecommender:
* s3://telemetry-parquet/taar/ensemble/ensemble_weight.json * s3://telemetry-parquet/taar/ensemble/ensemble_weight.json
TAAR breaks out all S3 data load configuration into enviroment
variables. This ensures that running under test has no chance of
clobbering the production data in the event that a developer has AWS
configuration keys installed locally in `~/.aws/`
Production enviroment variables required for TAAR
Collaborative Recommender ::
TAAR_ITEM_MATRIX_BUCKET = "telemetry-public-analysis-2"
TAAR_ITEM_MATRIX_KEY = "telemetry-ml/addon_recommender/item_matrix.json"
TAAR_ADDON_MAPPING_BUCKET = "telemetry-public-analysis-2"
TAAR_ADDON_MAPPING_KEY = "telemetry-ml/addon_recommender/addon_mapping.json"
Ensemble Recommender ::
TAAR_ENSEMBLE_BUCKET = "telemetry-parquet"
TAAR_ENSEMBLE_KEY = "taar/ensemble/ensemble_weight.json"
Hybrid Recommender ::
TAAR_WHITELIST_BUCKET = "telemetry-parquet"
TAAR_WHITELIST_KEY = "telemetry-ml/addon_recommender/only_guids_top_200.json"
Locale Recommender ::
TAAR_LOCALE_BUCKET = "telemetry-parquet"
TAAR_LOCALE_KEY = "taar/locale/top10_dict.json"
Similarity Recommender ::
TAAR_SIMILARITY_BUCKET = "telemetry-parquet"
TAAR_SIMILARITY_DONOR_KEY = "taar/similarity/donors.json"
TAAR_SIMILARITY_LRCURVES_KEY = "taar/similarity/lr_curves.json"

Просмотреть файл

@ -1,3 +1 @@
certifi==2018.10.15 \ certifi==2018.10.15
--hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \
--hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a

Просмотреть файл

@ -1,603 +1,119 @@
appnope==0.1.0 \ appnope==0.1.0
--hash=sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0 \ arrow==0.12.1
--hash=sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71 asn1crypto==0.24.0
arrow==0.12.1 \ atomicwrites==1.1.5
--hash=sha256:a558d3b7b6ce7ffc74206a86c147052de23d3d4ef0e17c210dd478c53575c4cd attrs==18.1.0
asn1crypto==0.24.0 \ aws==0.2.5
--hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \ aws-xray-sdk==0.95
--hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 backcall==0.1.0
atomicwrites==1.1.5 \ bcrypt==3.1.4
--hash=sha256:240831ea22da9ab882b551b31d4225591e5e447a68c5e188db5b89ca1d487585 \ binaryornot==0.4.4
--hash=sha256:a24da68318b08ac9c9c45029f4a10371ab5b20e4226738e150e6e7c571630ae6 boto==2.49.0
attrs==18.1.0 \ boto3==1.7.71
--hash=sha256:4b90b09eeeb9b88c35bc642cbac057e45a5fd85367b985bd2809c62b7b939265 \ botocore==1.10.71
--hash=sha256:e0d0eb91441a3b53dab4d9b743eafc1ac44476296a2053b6ca3af0b139faf87b cffi==1.11.5
aws==0.2.5 \ chardet==3.0.4
--hash=sha256:460cd737dee028bcebdb626f0c7acf87753f9e04e3317fda05929625419f2989 click==6.7
aws-xray-sdk==0.95 \ colander==1.4
--hash=sha256:72791618feb22eaff2e628462b0d58f398ce8c1bacfa989b7679817ab1fad60c \ colorama==0.3.9
--hash=sha256:9e7ba8dd08fd2939376c21423376206bff01d0deaea7d7721c6b35921fed1943 cookiecutter==1.6.0
backcall==0.1.0 \ cookies==2.2.1
--hash=sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4 \ coverage==4.5.1
--hash=sha256:bbbf4b1e5cd2bdb08f915895b51081c041bac22394fdfcfdfbe9f14b77c08bf2 coveralls==1.3.0
bcrypt==3.1.4 \ cryptography==2.3
--hash=sha256:01477981abf74e306e8ee31629a940a5e9138de000c6b0898f7f850461c4a0a5 \ decorator==4.3.0
--hash=sha256:054d6e0acaea429e6da3613fcd12d05ee29a531794d96f6ab959f29a39f33391 \ docker==3.4.1
--hash=sha256:0872eeecdf9a429c1420158500eedb323a132bc5bf3339475151c52414729e70 \ docker-pycreds==0.3.0
--hash=sha256:09a3b8c258b815eadb611bad04ca15ec77d86aa9ce56070e1af0d5932f17642a \ dockerflow==2018.4.0
--hash=sha256:0f317e4ffbdd15c3c0f8ab5fbd86aa9aabc7bea18b5cc5951b456fe39e9f738c \ docopt==0.6.2
--hash=sha256:2788c32673a2ad0062bea850ab73cffc0dba874db10d7a3682b6f2f280553f20 \ docutils==0.14
--hash=sha256:321d4d48be25b8d77594d8324c0585c80ae91ac214f62db9098734e5e7fb280f \ Fabric==2.1.3
--hash=sha256:346d6f84ff0b493dbc90c6b77136df83e81f903f0b95525ee80e5e6d5e4eef84 \ flake8==3.5.0
--hash=sha256:34dd60b90b0f6de94a89e71fcd19913a30e83091c8468d0923a93a0cccbfbbff \ Flask==1.0.2
--hash=sha256:3b4c23300c4eded8895442c003ae9b14328ae69309ac5867e7530de8bdd7875d \ Flask-API==1.0
--hash=sha256:43d1960e7db14042319c46925892d5fa99b08ff21d57482e6f5328a1aca03588 \ future==0.16.0
--hash=sha256:49e96267cd9be55a349fd74f9852eb9ae2c427cd7f6455d0f1765d7332292832 \ idna==2.7
--hash=sha256:63e06ffdaf4054a89757a3a1ab07f1b922daf911743114a54f7c561b9e1baa58 \ invoke==1.1.0
--hash=sha256:67ed1a374c9155ec0840214ce804616de49c3df9c5bc66740687c1c9b1cd9e8d \ iso8601==0.1.12
--hash=sha256:6b662a5669186439f4f583636c8d6ea77cf92f7cfe6aae8d22edf16c36840574 \ itsdangerous==0.24
--hash=sha256:6efd9ca20aefbaf2e7e6817a2c6ed4a50ff6900fafdea1bcb1d0e9471743b144 \ jedi==0.12.1
--hash=sha256:8569844a5d8e1fdde4d7712a05ab2e6061343ac34af6e7e3d7935b2bd1907bfd \ Jinja2==2.10
--hash=sha256:8629ea6a8a59f865add1d6a87464c3c676e60101b8d16ef404d0a031424a8491 \ jinja2-time==0.2.0
--hash=sha256:988cac675e25133d01a78f2286189c1f01974470817a33eaf4cfee573cfb72a5 \ jmespath==0.9.3
--hash=sha256:9a6fedda73aba1568962f7543a1f586051c54febbc74e87769bad6a4b8587c39 \ jsondiff==1.1.1
--hash=sha256:9eced8962ce3b7124fe20fd358cf8c7470706437fa064b9874f849ad4c5866fc \ jsonpickle==0.9.6
--hash=sha256:a005ed6163490988711ff732386b08effcbf8df62ae93dd1e5bda0714fad8afb \ MarkupSafe==1.0
--hash=sha256:ae35dbcb6b011af6c840893b32399252d81ff57d52c13e12422e16b5fea1d0fb \ mccabe==0.6.1
--hash=sha256:b1e8491c6740f21b37cca77bc64677696a3fb9f32360794d57fa8477b7329eda \ mock==2.0.0
--hash=sha256:c906bdb482162e9ef48eea9f8c0d967acceb5c84f2d25574c7d2a58d04861df1 \ more-itertools==4.2.0
--hash=sha256:cb18ffdc861dbb244f14be32c47ab69604d0aca415bee53485fcea4f8e93d5ef \ moto==1.3.3
--hash=sha256:cc2f24dc1c6c88c56248e93f28d439ee4018338567b0bbb490ea26a381a29b1e \ mozilla-srgutil==0.1.7
--hash=sha256:d860c7fff18d49e20339fc6dffc2d485635e36d4b2cccf58f45db815b64100b4 \ numpy==1.14.3
--hash=sha256:d86da365dda59010ba0d1ac45aa78390f56bf7f992e65f70b3b081d5e5257b09 \ packaging==17.1
--hash=sha256:e22f0997622e1ceec834fd25947dc2ee2962c2133ea693d61805bc867abaf7ea \ paramiko==2.4.2
--hash=sha256:f2fe545d27a619a552396533cddf70d83cecd880a611cdfdbb87ca6aec52f66b \ parso==0.3.1
--hash=sha256:f425e925485b3be48051f913dbe17e08e8c48588fdf44a26b8b14067041c0da6 \ pbr==4.1.0
--hash=sha256:f7fd3ed3745fe6e81e28dc3b3d76cce31525a91f32a387e1febd6b982caf8cdb \ pexpect==4.6.0
--hash=sha256:f9210820ee4818d84658ed7df16a7f30c9fba7d8b139959950acef91745cc0f7 pickleshare==0.7.4
binaryornot==0.4.4 \ pip-api==0.0.1
--hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ pkginfo==1.4.2
--hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 pluggy==0.6.0
boto==2.49.0 \ ply==3.11
--hash=sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8 \ poyo==0.4.1
--hash=sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a PrettyTable==0.7.2
boto3==1.7.71 \ prompt_toolkit==1.0.15
--hash=sha256:1fb25a1d8455b97276ef5f1e14255c04f59a985a14ddb69804ddf6c8a3449e08 \ ptyprocess==0.6.0
--hash=sha256:71ee5169b6957298fb178b294452592cd7c734e5c0d1a67487b56f993085f254 py==1.5.3
botocore==1.10.71 \ pyaml==17.12.1
--hash=sha256:9302ad235db66efa9d11c664b1cb0b259826d82a206446460ea05bcfcc431a4a \ pyasn1==0.4.3
--hash=sha256:ffa673c9a53f3ab4eba4ce8cf9d736177ca67509827e716cb5070f0b621fb0a7 pycodestyle==2.3.1
cffi==1.11.5 \ pycparser==2.18
--hash=sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743 \ pyflakes==1.6.0
--hash=sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef \ Pygments==2.2.0
--hash=sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50 \ PyNaCl==1.2.1
--hash=sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f \ pyparsing==2.2.0
--hash=sha256:3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30 \ pytest==3.6.0
--hash=sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93 \ pytest-cov==2.5.1
--hash=sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257 \ python-dateutil==2.6.1
--hash=sha256:495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b \ pytz==2018.5
--hash=sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3 \ PyYAML==3.13
--hash=sha256:57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e \ requests==2.20.1
--hash=sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc \ requests-toolbelt==0.8.0
--hash=sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04 \ responses==0.9.0
--hash=sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6 \ rsa==3.4.2
--hash=sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359 \ s3transfer==0.1.13
--hash=sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596 \ scipy==1.1.0
--hash=sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b \ setuptools_scm==2.1.0
--hash=sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd \ simplegeneric==0.8.1
--hash=sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95 \ six==1.11.0
--hash=sha256:a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5 \ spark==0.2.1
--hash=sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e \ spark_parser==1.8.7
--hash=sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6 \ thriftpy==0.3.9
--hash=sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca \ tox==3.0.0
--hash=sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31 \ tqdm==4.23.4
--hash=sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1 \ translationstring==1.3
--hash=sha256:ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2 \ twine==1.11.0
--hash=sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085 \ uncompyle2==2.0.0
--hash=sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801 \ uncompyle6==3.2.0
--hash=sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4 \ urllib3==1.23
--hash=sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184 \ virtualenv==16.0.0
--hash=sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917 \ wcwidth==0.1.7
--hash=sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f \ websocket-client==0.48.0
--hash=sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb Werkzeug==0.14.1
chardet==3.0.4 \ whichcraft==0.4.1
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ wrapt==1.10.11
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 xdis==3.8.2
click==6.7 \ xmltodict==0.11.0
--hash=sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d \ backports.functools_lru_cache==1.5
--hash=sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b backports.ssl_match_hostname==3.5.0.1
colander==1.4 \ backports.tempfile==1.0
--hash=sha256:3ed2941e006e88c7abe78ee0921f0b91801340acdcd46389380887027108e999 \ backports.weakref==1.0.post1
--hash=sha256:e20e9acf190e5711cf96aa65a5405dac04b6e841028fc361d953a9923dbc4e72 configparser==3.5.0
colorama==0.3.9 \ pathlib2==2.3.2
--hash=sha256:463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda \ funcsigs==1.0.2
--hash=sha256:48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1 scandir==1.8
cookiecutter==1.6.0 \ python-decouple==3.1
--hash=sha256:1316a52e1c1f08db0c9efbf7d876dbc01463a74b155a0d83e722be88beda9a3e \ enum34==1.1.6
--hash=sha256:ed8f54a8fc79b6864020d773ce11539b5f08e4617f353de1f22d23226f6a0d36 ipaddress==1.0.22
cookies==2.2.1 \ futures==3.2.0; python_version < '3.0'
--hash=sha256:15bee753002dff684987b8df8c235288eb8d45f8191ae056254812dfd42c81d3 \ pytest-flask==0.14.0
--hash=sha256:d6b698788cae4cfa4e62ef8643a9ca332b79bd96cb314294b864ae8d7eb3ee8e
coverage==4.5.1 \
--hash=sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba \
--hash=sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed \
--hash=sha256:104ab3934abaf5be871a583541e8829d6c19ce7bde2923b2751e0d3ca44db60a \
--hash=sha256:15b111b6a0f46ee1a485414a52a7ad1d703bdf984e9ed3c288a4414d3871dcbd \
--hash=sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640 \
--hash=sha256:1c383d2ef13ade2acc636556fd544dba6e14fa30755f26812f54300e401f98f2 \
--hash=sha256:28b2191e7283f4f3568962e373b47ef7f0392993bb6660d079c62bd50fe9d162 \
--hash=sha256:2eb564bbf7816a9d68dd3369a510be3327f1c618d2357fa6b1216994c2e3d508 \
--hash=sha256:337ded681dd2ef9ca04ef5d93cfc87e52e09db2594c296b4a0a3662cb1b41249 \
--hash=sha256:3a2184c6d797a125dca8367878d3b9a178b6fdd05fdc2d35d758c3006a1cd694 \
--hash=sha256:3c79a6f7b95751cdebcd9037e4d06f8d5a9b60e4ed0cd231342aa8ad7124882a \
--hash=sha256:3d72c20bd105022d29b14a7d628462ebdc61de2f303322c0212a054352f3b287 \
--hash=sha256:3eb42bf89a6be7deb64116dd1cc4b08171734d721e7a7e57ad64cc4ef29ed2f1 \
--hash=sha256:4635a184d0bbe537aa185a34193898eee409332a8ccb27eea36f262566585000 \
--hash=sha256:56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1 \
--hash=sha256:5a13ea7911ff5e1796b6d5e4fbbf6952381a611209b736d48e675c2756f3f74e \
--hash=sha256:69bf008a06b76619d3c3f3b1983f5145c75a305a0fea513aca094cae5c40a8f5 \
--hash=sha256:6bc583dc18d5979dc0f6cec26a8603129de0304d5ae1f17e57a12834e7235062 \
--hash=sha256:701cd6093d63e6b8ad7009d8a92425428bc4d6e7ab8d75efbb665c806c1d79ba \
--hash=sha256:7608a3dd5d73cb06c531b8925e0ef8d3de31fed2544a7de6c63960a1e73ea4bc \
--hash=sha256:76ecd006d1d8f739430ec50cc872889af1f9c1b6b8f48e29941814b09b0fd3cc \
--hash=sha256:7aa36d2b844a3e4a4b356708d79fd2c260281a7390d678a10b91ca595ddc9e99 \
--hash=sha256:7d3f553904b0c5c016d1dad058a7554c7ac4c91a789fca496e7d8347ad040653 \
--hash=sha256:7e1fe19bd6dce69d9fd159d8e4a80a8f52101380d5d3a4d374b6d3eae0e5de9c \
--hash=sha256:8c3cb8c35ec4d9506979b4cf90ee9918bc2e49f84189d9bf5c36c0c1119c6558 \
--hash=sha256:9d6dd10d49e01571bf6e147d3b505141ffc093a06756c60b053a859cb2128b1f \
--hash=sha256:9e112fcbe0148a6fa4f0a02e8d58e94470fc6cb82a5481618fea901699bf34c4 \
--hash=sha256:ac4fef68da01116a5c117eba4dd46f2e06847a497de5ed1d64bb99a5fda1ef91 \
--hash=sha256:b8815995e050764c8610dbc82641807d196927c3dbed207f0a079833ffcf588d \
--hash=sha256:be6cfcd8053d13f5f5eeb284aa8a814220c3da1b0078fa859011c7fffd86dab9 \
--hash=sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd \
--hash=sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d \
--hash=sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6 \
--hash=sha256:e4d96c07229f58cb686120f168276e434660e4358cc9cf3b0464210b04913e77 \
--hash=sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80 \
--hash=sha256:f8a923a85cb099422ad5a2e345fe877bbc89a8a8b23235824a93488150e45f6e
coveralls==1.3.0 \
--hash=sha256:32569a43c9dbc13fa8199247580a4ab182ef439f51f65bb7f8316d377a1340e8 \
--hash=sha256:664794748d2e5673e347ec476159a9d87f43e0d2d44950e98ed0e27b98da8346
cryptography==2.3 \
--hash=sha256:21af753934f2f6d1a10fe8f4c0a64315af209ef6adeaee63ca349797d747d687 \
--hash=sha256:27bb401a20a838d6d0ea380f08c6ead3ccd8c9d8a0232dc9adcc0e4994576a66 \
--hash=sha256:29720c4253263cff9aea64585adbbe85013ba647f6e98367efff9db2d7193ded \
--hash=sha256:2a35b7570d8f247889784010aac8b384fd2e4a47b33e15c4a60b45a7c1944120 \
--hash=sha256:42c531a6a354407f42ee07fda5c2c0dc822cf6d52744949c182f2b295fbd4183 \
--hash=sha256:5eb86f03f9c4f0ac2336ac5431271072ddf7ecc76b338e26366732cfac58aa19 \
--hash=sha256:67f7f57eae8dede577f3f7775957f5bec93edd6bdb6ce597bb5b28e1bdf3d4fb \
--hash=sha256:6ec84edcbc966ae460560a51a90046503ff0b5b66157a9efc61515c68059f6c8 \
--hash=sha256:7ba834564daef87557e7fcd35c3c3183a4147b0b3a57314e53317360b9b201b3 \
--hash=sha256:7d7f084cbe1fdb82be5a0545062b59b1ad3637bc5a48612ac2eb428ff31b31ea \
--hash=sha256:82409f5150e529d699e5c33fa8fd85e965104db03bc564f5f4b6a9199e591f7c \
--hash=sha256:87d092a7c2a44e5f7414ab02fb4145723ebba411425e1a99773531dd4c0e9b8d \
--hash=sha256:8c56ef989342e42b9fcaba7c74b446f0cc9bed546dd00034fa7ad66fc00307ef \
--hash=sha256:9449f5d4d7c516a6118fa9210c4a00f34384cb1d2028672100ee0c6cce49d7f6 \
--hash=sha256:bc2301170986ad82d9349a91eb8884e0e191209c45f5541b16aa7c0cfb135978 \
--hash=sha256:c132bab45d4bd0fff1d3fe294d92b0a6eb8404e93337b3127bdec9f21de117e6 \
--hash=sha256:c3d945b7b577f07a477700f618f46cbc287af3a9222cd73035c6ef527ef2c363 \
--hash=sha256:cee18beb4c807b5c0b178f4fa2fae03cef9d51821a358c6890f8b23465b7e5d2 \
--hash=sha256:d01dfc5c2b3495184f683574e03c70022674ca9a7be88589c5aba130d835ea90
decorator==4.3.0 \
--hash=sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82 \
--hash=sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c
docker==3.4.1 \
--hash=sha256:52cf5b1c3c394f9abf897638bfc3336d6b63a0f65969d0d4d2da6d3b1d8032b6 \
--hash=sha256:ad077b49660b711d20f50f344f70cfae014d635ef094bf21b0d7df5f0aeedf99
docker-pycreds==0.3.0 \
--hash=sha256:0a941b290764ea7286bd77f54c0ace43b86a8acd6eb9ead3de9840af52384079 \
--hash=sha256:8b0e956c8d206f832b06aa93a710ba2c3bcbacb5a314449c040b0b814355bbff
dockerflow==2018.4.0 \
--hash=sha256:2ea52a904abfda3430ff4f1effc164863b30d2b69f7ecbf92dd672860b0ec423 \
--hash=sha256:388d02c557968e6957140f7b82f669eac70adf5f570bc7705aa749d220a2e535
docopt==0.6.2 \
--hash=sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491
docutils==0.14 \
--hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
--hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
--hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
Fabric==2.1.3 \
--hash=sha256:1ee8d659507c21a191efca119ce25c0e18ee855eea4c9c1d46d41ec9765d42e6 \
--hash=sha256:4aeb5bcd9039a1e1225caed4b2ac296bbc347c869bdef7e3717c13ee49dba58a
flake8==3.5.0 \
--hash=sha256:7253265f7abd8b313e3892944044a365e3f4ac3fcdcfb4298f55ee9ddf188ba0 \
--hash=sha256:c7841163e2b576d435799169b78703ad6ac1bbb0f199994fc05f700b2a90ea37
Flask==1.0.2 \
--hash=sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48 \
--hash=sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05
Flask-API==1.0 \
--hash=sha256:6f9dc56d55fd82ffb1c5c9fd794cd6c50873ac10cf662e26817c179a655d1e22 \
--hash=sha256:fc10a80a13ea6fcf04acc2b1835aea05ec44aa6ae94f2ee85e52cd068567ce35
future==0.16.0 \
--hash=sha256:e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb
idna==2.7 \
--hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \
--hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16
invoke==1.1.0 \
--hash=sha256:1db6cf918e5df10efe4d61101b19763abe1510b6b2fe8c553daba25476de8044 \
--hash=sha256:265eead8c89805a2ac5083200842db6da7636ac63fb4fe0d1121b930770f3e2a \
--hash=sha256:3e8e2c2e69493227e210a1d19ccc7c44189240385dda4c9b8eb5d98fa0f68a3e
iso8601==0.1.12 \
--hash=sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3 \
--hash=sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82 \
--hash=sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd
itsdangerous==0.24 \
--hash=sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519
jedi==0.12.1 \
--hash=sha256:b409ed0f6913a701ed474a614a3bb46e6953639033e31f769ca7581da5bd1ec1 \
--hash=sha256:c254b135fb39ad76e78d4d8f92765ebc9bf92cbc76f49e97ade1d5f5121e1f6f
Jinja2==2.10 \
--hash=sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd \
--hash=sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4
jinja2-time==0.2.0 \
--hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \
--hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa
jmespath==0.9.3 \
--hash=sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64 \
--hash=sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63
jsondiff==1.1.1 \
--hash=sha256:2d0437782de9418efa34e694aa59f43d7adb1899bd9a793f063867ddba8f7893
jsonpickle==0.9.6 \
--hash=sha256:545b3bee0d65e1abb4baa1818edcc9ec239aa9f2ffbfde8084d71c056180054f
MarkupSafe==1.0 \
--hash=sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665
mccabe==0.6.1 \
--hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
--hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f
mock==2.0.0 \
--hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \
--hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba
more-itertools==4.2.0 \
--hash=sha256:2b6b9893337bfd9166bee6a62c2b0c9fe7735dcf85948b387ec8cba30e85d8e8 \
--hash=sha256:6703844a52d3588f951883005efcf555e49566a48afd4db4e965d69b883980d3 \
--hash=sha256:a18d870ef2ffca2b8463c0070ad17b5978056f403fb64e3f15fe62a52db21cc0
moto==1.3.3 \
--hash=sha256:45d14aca2b06b0083d5e82cfd770ebca0ba77b5070aec6928670240939a78681 \
--hash=sha256:ee71b515ba34d64c5f625950fc995594040f793a4a106614ff108ae02c1a2896
mozilla-srgutil==0.1.7 \
--hash=sha256:b28a8a779500e7700d63eb1cdf4d1c5f83676209df6721103be682441f9ab51a
numpy==1.14.3 \
--hash=sha256:0074d42e2cc333800bd09996223d40ec52e3b1ec0a5cab05dacc09b662c4c1ae \
--hash=sha256:034717bfef517858abc79324820a702dc6cd063effb9baab86533e8a78670689 \
--hash=sha256:0db6301324d0568089663ef2701ad90ebac0e975742c97460e89366692bd0563 \
--hash=sha256:1864d005b2eb7598063e35c320787d87730d864f40d6410f768fe4ea20672016 \
--hash=sha256:46ce8323ca9384814c7645298b8b627b7d04ce97d6948ef02da357b2389d6972 \
--hash=sha256:510863d606c932b41d2209e4de6157ab3fdf52001d3e4ad351103176d33c4b8b \
--hash=sha256:560e23a12e7599be8e8b67621396c5bc687fd54b48b890adbc71bc5a67333f86 \
--hash=sha256:57dc6c22d59054542600fce6fae2d1189b9c50bafc1aab32e55f7efcc84a6c46 \
--hash=sha256:760550fdf9d8ec7da9c4402a4afe6e25c0f184ae132011676298a6b636660b45 \
--hash=sha256:8670067685051b49d1f2f66e396488064299fefca199c7c80b6ba0c639fedc98 \
--hash=sha256:9016692c7d390f9d378fc88b7a799dc9caa7eb938163dda5276d3f3d6f75debf \
--hash=sha256:98ff275f1b5907490d26b30b6ff111ecf2de0254f0ab08833d8fe61aa2068a00 \
--hash=sha256:9ccf4d5c9139b1e985db915039baa0610a7e4a45090580065f8d8cb801b7422f \
--hash=sha256:a8dbab311d4259de5eeaa5b4e83f5f8545e4808f9144e84c0f424a6ee55a7b98 \
--hash=sha256:aaef1bea636b6e552bbc5dae0ada87d4f6046359daaa97a05a013b0169620f27 \
--hash=sha256:b8987e30d9a0eb6635df9705a75cf8c4a2835590244baecf210163343bc65176 \
--hash=sha256:c3fe23df6fe0898e788581753da453f877350058c5982e85a8972feeecb15309 \
--hash=sha256:c5eb7254cfc4bd7a4330ad7e1f65b98343836865338c57b0e25c661e41d5cfd9 \
--hash=sha256:c80fcf9b38c7f4df666150069b04abbd2fe42ae640703a6e1f128cda83b552b7 \
--hash=sha256:e33baf50f2f6b7153ddb973601a11df852697fba4c08b34a5e0f39f66f8120e1 \
--hash=sha256:e8578a62a8eaf552b95d62f630bb5dd071243ba1302bbff3e55ac48588508736 \
--hash=sha256:f22b3206f1c561dd9110b93d144c6aaa4a9a354e3b07ad36030df3ea92c5bb5b \
--hash=sha256:f39afab5769b3aaa786634b94b4a23ef3c150bdda044e8a32a3fc16ddafe803b
packaging==17.1 \
--hash=sha256:e9215d2d2535d3ae866c3d6efc77d5b24a0192cce0ff20e42896cc0664f889c0 \
--hash=sha256:f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b
paramiko==2.4.2 \
--hash=sha256:3c16b2bfb4c0d810b24c40155dbfd113c0521e7e6ee593d704e84b4c658a1f3b \
--hash=sha256:a8975a7df3560c9f1e2b43dc54ebd40fd00a7017392ca5445ce7df409f900fcb
parso==0.3.1 \
--hash=sha256:35704a43a3c113cce4de228ddb39aab374b8004f4f2407d070b6a2ca784ce8a2 \
--hash=sha256:895c63e93b94ac1e1690f5fdd40b65f07c8171e3e53cbd7793b5b96c0e0a7f24
pbr==4.1.0 \
--hash=sha256:4f2b11d95917af76e936811be8361b2b19616e5ef3b55956a429ec7864378e0c \
--hash=sha256:e0f23b61ec42473723b2fec2f33fb12558ff221ee551962f01dd4de9053c2055
pexpect==4.6.0 \
--hash=sha256:2a8e88259839571d1251d278476f3eec5db26deb73a70be5ed5dc5435e418aba \
--hash=sha256:3fbd41d4caf27fa4a377bfd16fef87271099463e6fa73e92a52f92dfee5d425b
pickleshare==0.7.4 \
--hash=sha256:84a9257227dfdd6fe1b4be1319096c20eb85ff1e82c7932f36efccfe1b09737b \
--hash=sha256:c9a2541f25aeabc070f12f452e1f2a8eae2abd51e1cd19e8430402bdf4c1d8b5
pip-api==0.0.1 \
--hash=sha256:3cb7b51c541d4c13df43bf254aca371d9feb4669dc6c1cf3cecb9e9360eb3cb6
pkginfo==1.4.2 \
--hash=sha256:5878d542a4b3f237e359926384f1dde4e099c9f5525d236b1840cf704fa8d474 \
--hash=sha256:a39076cb3eb34c333a0dd390b568e9e1e881c7bf2cc0aee12120636816f55aee
pluggy==0.6.0 \
--hash=sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff \
--hash=sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c \
--hash=sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5
ply==3.11 \
--hash=sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3 \
--hash=sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce
poyo==0.4.1 \
--hash=sha256:103b4ee3e1c7765098fe1cabe43f828db2e2a6079646561a2117e1a809f352d6 \
--hash=sha256:230ec11c2f35a23410c1f0e474f09fa4e203686f40ab3adca7b039c845d8c325
PrettyTable==0.7.2 \
--hash=sha256:2d5460dc9db74a32bcc8f9f67de68b2c4f4d2f01fa3bd518764c69156d9cacd9 \
--hash=sha256:853c116513625c738dc3ce1aee148b5b5757a86727e67eff6502c7ca59d43c36 \
--hash=sha256:a53da3b43d7a5c229b5e3ca2892ef982c46b7923b51e98f0db49956531211c4f
prompt_toolkit==1.0.15 \
--hash=sha256:1df952620eccb399c53ebb359cc7d9a8d3a9538cb34c5a1344bdbeb29fbcc381 \
--hash=sha256:3f473ae040ddaa52b52f97f6b4a493cfa9f5920c255a12dc56a7d34397a398a4 \
--hash=sha256:858588f1983ca497f1cf4ffde01d978a3ea02b01c8a26a8bbc5cd2e66d816917
ptyprocess==0.6.0 \
--hash=sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0 \
--hash=sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f
py==1.5.3 \
--hash=sha256:29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881 \
--hash=sha256:983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a
pyaml==17.12.1 \
--hash=sha256:66623c52f34d83a2c0fc963e08e8b9d0c13d88404e3b43b1852ef71eda19afa3 \
--hash=sha256:f83fc302c52c6b83a15345792693ae0b5bc07ad19f59e318b7617d7123d62990
pyasn1==0.4.3 \
--hash=sha256:24f21b4fd2dc2b344dee2205fa3930464aa21292216d3d6e39007a2e059e21af \
--hash=sha256:2f57960dc7a2820ea5a1782b872d974b639aa3b448ac6628d1ecc5d0fe3986f2 \
--hash=sha256:3651774ca1c9726307560792877db747ba5e8a844ea1a41feb7670b319800ab3 \
--hash=sha256:602fda674355b4701acd7741b2be5ac188056594bf1eecf690816d944e52905e \
--hash=sha256:8fb265066eac1d3bb5015c6988981b009ccefd294008ff7973ed5f64335b0f2d \
--hash=sha256:9334cb427609d2b1e195bb1e251f99636f817d7e3e1dffa150cb3365188fb992 \
--hash=sha256:9a15cc13ff6bf5ed29ac936ca941400be050dff19630d6cd1df3fb978ef4c5ad \
--hash=sha256:a66dcda18dbf6e4663bde70eb30af3fc4fe1acb2d14c4867a861681887a5f9a2 \
--hash=sha256:ba77f1e8d7d58abc42bfeddd217b545fdab4c1eeb50fd37c2219810ad56303bf \
--hash=sha256:cdc8eb2eaafb56de66786afa6809cd9db2df1b3b595dcb25aa5b9dc61189d40a \
--hash=sha256:d01fbba900c80b42af5c3fe1a999acf61e27bf0e452e0f1ef4619065e57622da \
--hash=sha256:f281bf11fe204f05859225ec2e9da7a7c140b65deccd8a4eb0bc75d0bd6949e0 \
--hash=sha256:fb81622d8f3509f0026b0683fe90fea27be7284d3826a5f2edf97f69151ab0fc
pycodestyle==2.3.1 \
--hash=sha256:682256a5b318149ca0d2a9185d365d8864a768a28db66a84a2ea946bcc426766 \
--hash=sha256:6c4245ade1edfad79c3446fadfc96b0de2759662dc29d07d80a6f27ad1ca6ba9
pycparser==2.18 \
--hash=sha256:99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226
pyflakes==1.6.0 \
--hash=sha256:08bd6a50edf8cffa9fa09a463063c425ecaaf10d1eb0335a7e8b1401aef89e6f \
--hash=sha256:8d616a382f243dbf19b54743f280b80198be0bca3a5396f1d2e1fca6223e8805
Pygments==2.2.0 \
--hash=sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d \
--hash=sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc
PyNaCl==1.2.1 \
--hash=sha256:04e30e5bdeeb2d5b34107f28cd2f5bbfdc6c616f3be88fc6f53582ff1669eeca \
--hash=sha256:0bfa0d94d2be6874e40f896e0a67e290749151e7de767c5aefbad1121cad7512 \
--hash=sha256:11aa4e141b2456ce5cecc19c130e970793fa3a2c2e6fbb8ad65b28f35aa9e6b6 \
--hash=sha256:13bdc1fe084ff9ac7653ae5a924cae03bf4bb07c6667c9eb5b6eb3c570220776 \
--hash=sha256:14339dc233e7a9dda80a3800e64e7ff89d0878ba23360eea24f1af1b13772cac \
--hash=sha256:1d33e775fab3f383167afb20b9927aaf4961b953d76eeb271a5703a6d756b65b \
--hash=sha256:2a42b2399d0428619e58dac7734838102d35f6dcdee149e0088823629bf99fbb \
--hash=sha256:2dce05ac8b3c37b9e2f65eab56c544885607394753e9613fd159d5e2045c2d98 \
--hash=sha256:63cfccdc6217edcaa48369191ae4dca0c390af3c74f23c619e954973035948cd \
--hash=sha256:6453b0dae593163ffc6db6f9c9c1597d35c650598e2c39c0590d1757207a1ac2 \
--hash=sha256:73a5a96fb5fbf2215beee2353a128d382dbca83f5341f0d3c750877a236569ef \
--hash=sha256:8abb4ef79161a5f58848b30ab6fb98d8c466da21fdd65558ce1d7afc02c70b5f \
--hash=sha256:8ac1167195b32a8755de06efd5b2d2fe76fc864517dab66aaf65662cc59e1988 \
--hash=sha256:8f505f42f659012794414fa57c498404e64db78f1d98dfd40e318c569f3c783b \
--hash=sha256:9c8a06556918ee8e3ab48c65574f318f5a0a4d31437fc135da7ee9d4f9080415 \
--hash=sha256:a1e25fc5650cf64f01c9e435033e53a4aca9de30eb9929d099f3bb078e18f8f2 \
--hash=sha256:be71cd5fce04061e1f3d39597f93619c80cdd3558a6c9ba99a546f144a8d8101 \
--hash=sha256:c5b1a7a680218dee9da0f1b5e24072c46b3c275d35712bc1d505b85bb03441c0 \
--hash=sha256:cb785db1a9468841a1265c9215c60fe5d7af2fb1b209e3316a152704607fc582 \
--hash=sha256:cf6877124ae6a0698404e169b3ba534542cfbc43f939d46b927d956daf0a373a \
--hash=sha256:d0eb5b2795b7ee2cbcfcadacbe95a13afbda048a262bd369da9904fecb568975 \
--hash=sha256:d3a934e2b9f20abac009d5b6951067cfb5486889cb913192b4d8288b216842f1 \
--hash=sha256:d795f506bcc9463efb5ebb0f65ed77921dcc9e0a50499dedd89f208445de9ecb \
--hash=sha256:d8aaf7e5d6b0e0ef7d6dbf7abeb75085713d0100b4eb1a4e4e857de76d77ac45 \
--hash=sha256:de2aaca8386cf4d70f1796352f2346f48ddb0bed61dc43a3ce773ba12e064031 \
--hash=sha256:e0d38fa0a75f65f556fb912f2c6790d1fa29b7dd27a1d9cc5591b281321eaaa9 \
--hash=sha256:eb2acabbd487a46b38540a819ef67e477a674481f84a82a7ba2234b9ba46f752 \
--hash=sha256:eeee629828d0eb4f6d98ac41e9a3a6461d114d1d0aa111a8931c049359298da0 \
--hash=sha256:f5836463a3c0cca300295b229b6c7003c415a9d11f8f9288ddbd728e2746524c \
--hash=sha256:f5ce9e26d25eb0b2d96f3ef0ad70e1d3ae89b5d60255c462252a3e456a48c053 \
--hash=sha256:fabf73d5d0286f9e078774f3435601d2735c94ce9e514ac4fb945701edead7e4
pyparsing==2.2.0 \
--hash=sha256:0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04 \
--hash=sha256:281683241b25fe9b80ec9d66017485f6deff1af5cde372469134b56ca8447a07 \
--hash=sha256:8f1e18d3fd36c6795bb7e02a39fd05c611ffc2596c1e0d995d34d67630426c18 \
--hash=sha256:9e8143a3e15c13713506886badd96ca4b579a87fbdf49e550dbfc057d6cb218e \
--hash=sha256:b8b3117ed9bdf45e14dcc89345ce638ec7e0e29b2b579fa1ecf32ce45ebac8a5 \
--hash=sha256:e4d45427c6e20a59bf4f88c639dcc03ce30d193112047f94012102f235853a58 \
--hash=sha256:fee43f17a9c4087e7ed1605bd6df994c6173c1e977d7ade7b651292fab2bd010
pytest==3.6.0 \
--hash=sha256:39555d023af3200d004d09e51b4dd9fdd828baa863cded3fd6ba2f29f757ae2d \
--hash=sha256:c76e93f3145a44812955e8d46cdd302d8a45fbfc7bf22be24fe231f9d8d8853a
pytest-cov==2.5.1 \
--hash=sha256:03aa752cf11db41d281ea1d807d954c4eda35cfa1b21d6971966cc041bbf6e2d \
--hash=sha256:890fe5565400902b0c78b5357004aab1c814115894f4f21370e2433256a3eeec
python-dateutil==2.6.1 \
--hash=sha256:891c38b2a02f5bb1be3e4793866c8df49c7d19baabf9c1bad62547e0b4866aca \
--hash=sha256:95511bae634d69bc7329ba55e646499a842bc4ec342ad54a8cdb65645a0aad3c
pytz==2018.5 \
--hash=sha256:a061aa0a9e06881eb8b3b2b43f05b9439d6583c206d0a6c340ff72a7b6669053 \
--hash=sha256:ffb9ef1de172603304d9d2819af6f5ece76f2e85ec10692a524dd876e72bf277
PyYAML==3.13 \
--hash=sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b \
--hash=sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf \
--hash=sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a \
--hash=sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3 \
--hash=sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1 \
--hash=sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1 \
--hash=sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613 \
--hash=sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04 \
--hash=sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f \
--hash=sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537 \
--hash=sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531
requests==2.20.1 \
--hash=sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54 \
--hash=sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263
requests-toolbelt==0.8.0 \
--hash=sha256:42c9c170abc2cacb78b8ab23ac957945c7716249206f90874651971a4acff237 \
--hash=sha256:f6a531936c6fa4c6cfce1b9c10d5c4f498d16528d2a54a22ca00011205a187b5
responses==0.9.0 \
--hash=sha256:c6082710f4abfb60793899ca5f21e7ceb25aabf321560cc0726f8b59006811c9 \
--hash=sha256:f23a29dca18b815d9d64a516b4a0abb1fbdccff6141d988ad8100facb81cf7b3
rsa==3.4.2 \
--hash=sha256:25df4e10c263fb88b5ace923dd84bf9aa7f5019687b5e55382ffcdb8bede9db5 \
--hash=sha256:43f682fea81c452c98d09fc316aae12de6d30c4b5c84226642cf8f8fd1c93abd
s3transfer==0.1.13 \
--hash=sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1 \
--hash=sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f
scipy==1.1.0 \
--hash=sha256:0611ee97296265af4a21164a5323f8c1b4e8e15c582d3dfa7610825900136bb7 \
--hash=sha256:08237eda23fd8e4e54838258b124f1cd141379a5f281b0a234ca99b38918c07a \
--hash=sha256:0e645dbfc03f279e1946cf07c9c754c2a1859cb4a41c5f70b25f6b3a586b6dbd \
--hash=sha256:0e9bb7efe5f051ea7212555b290e784b82f21ffd0f655405ac4f87e288b730b3 \
--hash=sha256:108c16640849e5827e7d51023efb3bd79244098c3f21e4897a1007720cb7ce37 \
--hash=sha256:340ef70f5b0f4e2b4b43c8c8061165911bc6b2ad16f8de85d9774545e2c47463 \
--hash=sha256:3ad73dfc6f82e494195144bd3a129c7241e761179b7cb5c07b9a0ede99c686f3 \
--hash=sha256:3b243c77a822cd034dad53058d7c2abf80062aa6f4a32e9799c95d6391558631 \
--hash=sha256:404a00314e85eca9d46b80929571b938e97a143b4f2ddc2b2b3c91a4c4ead9c5 \
--hash=sha256:423b3ff76957d29d1cce1bc0d62ebaf9a3fdfaf62344e3fdec14619bb7b5ad3a \
--hash=sha256:42d9149a2fff7affdd352d157fa5717033767857c11bd55aa4a519a44343dfef \
--hash=sha256:625f25a6b7d795e8830cb70439453c9f163e6870e710ec99eba5722775b318f3 \
--hash=sha256:698c6409da58686f2df3d6f815491fd5b4c2de6817a45379517c92366eea208f \
--hash=sha256:729f8f8363d32cebcb946de278324ab43d28096f36593be6281ca1ee86ce6559 \
--hash=sha256:8190770146a4c8ed5d330d5b5ad1c76251c63349d25c96b3094875b930c44692 \
--hash=sha256:878352408424dffaa695ffedf2f9f92844e116686923ed9aa8626fc30d32cfd1 \
--hash=sha256:8b984f0821577d889f3c7ca8445564175fb4ac7c7f9659b7c60bef95b2b70e76 \
--hash=sha256:8f841bbc21d3dad2111a94c490fb0a591b8612ffea86b8e5571746ae76a3deac \
--hash=sha256:c22b27371b3866c92796e5d7907e914f0e58a36d3222c5d436ddd3f0e354227a \
--hash=sha256:d0cdd5658b49a722783b8b4f61a6f1f9c75042d0e29a30ccb6cacc9b25f6d9e2 \
--hash=sha256:d40dc7f494b06dcee0d303e51a00451b2da6119acbeaccf8369f2d29e28917ac \
--hash=sha256:d8491d4784aceb1f100ddb8e31239c54e4afab8d607928a9f7ef2469ec35ae01 \
--hash=sha256:dfc5080c38dde3f43d8fbb9c0539a7839683475226cf83e4b24363b227dfe552 \
--hash=sha256:e24e22c8d98d3c704bb3410bce9b69e122a8de487ad3dbfe9985d154e5c03a40 \
--hash=sha256:e7a01e53163818d56eabddcafdc2090e9daba178aad05516b20c6591c4811020 \
--hash=sha256:ee677635393414930541a096fc8e61634304bb0153e4e02b75685b11eba14cae \
--hash=sha256:f0521af1b722265d824d6ad055acfe9bd3341765735c44b5a4d0069e189a0f40 \
--hash=sha256:f25c281f12c0da726c6ed00535ca5d1622ec755c30a3f8eafef26cf43fede694
setuptools_scm==2.1.0 \
--hash=sha256:0f386524bb99d959e0d98381d7fe1f0a810e04eace5d2cc6297e701d64de9a7d \
--hash=sha256:1261fb48def5ac5e4d04cb6196886cb8c2de5dc066ed2bfee99d4bb21aecb781 \
--hash=sha256:95ff5ca2cb1e48a3b92080c90fac35ac015c3f1be185f401f0941b11279fdae8 \
--hash=sha256:a767141fecdab1c0b3c8e4c788ac912d7c94a0d6c452d40777ba84f918316379 \
--hash=sha256:e2ab256c944e66f063a020a56b4269010d772ce3af757cc703fe56e6fdc2dda1 \
--hash=sha256:fda84172bd4dca0b671c1569eef6d4458d7d006c66a5adb41aa7a88462bcb6c0
simplegeneric==0.8.1 \
--hash=sha256:dc972e06094b9af5b855b3df4a646395e43d1c9d0d39ed345b7393560d0b9173
six==1.11.0 \
--hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \
--hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb
spark==0.2.1 \
--hash=sha256:df499b57d30178c6d32dbda2af188b9833a261a70ccd262126ed7b415d9e36d1
spark_parser==1.8.7 \
--hash=sha256:03e115c9c3cf849658ce191b4ad8a512e88c6337141f44b3d87a51a6bf18413a \
--hash=sha256:11be196977ce42710d44dbb7adb78b42853875a1f686c64b88315e519a8b1b0d \
--hash=sha256:1f37849f320d8f8454456c0ca985deda1413974256945f398e30addaa905d198 \
--hash=sha256:4c5e6064afbb3c114749016d585b0e4f9222d4ffa97a1854c9ab70b25783ef48 \
--hash=sha256:5426f3ade7ccad74f12e0ffdf34e28a29b0295c245ee5f5de96a5f942025c53c \
--hash=sha256:6c1edc94291385d23b56a0eab348aceba72b6d5f363e52e773c7b2c88ec9f8ab \
--hash=sha256:76bb0acdfba2c7193e608e9cf995f41799b61b2e3b8e1f13320b7ee66aff7b65 \
--hash=sha256:e0769cba3fe78af969b178c571ba297aa8de202a8d48661abf0e46fc8b427e02 \
--hash=sha256:e8e456ffa6e83f963f4830884624830bbbea82c9ae6b3b1700f84566550e1ab0
thriftpy==0.3.9 \
--hash=sha256:309e57d97b5bfa01601393ad4f245451e989d6206a59279e56866b264a99796d
tox==3.0.0 \
--hash=sha256:96efa09710a3daeeb845561ebbe1497641d9cef2ee0aea30db6969058b2bda2f \
--hash=sha256:9ee7de958a43806402a38c0d2aa07fa8553f4d2c20a15b140e9f771c2afeade0
tqdm==4.23.4 \
--hash=sha256:224291ee0d8c52d91b037fd90806f48c79bcd9994d3b0abc9e44b946a908fccd \
--hash=sha256:77b8424d41b31e68f437c6dd9cd567aebc9a860507cb42fbd880a5f822d966fe
translationstring==1.3 \
--hash=sha256:4ee44cfa58c52ade8910ea0ebc3d2d84bdcad9fa0422405b1801ec9b9a65b72d \
--hash=sha256:e26c7bf383413234ed442e0980a2ebe192b95e3745288a8fd2805156d27515b4
twine==1.11.0 \
--hash=sha256:08eb132bbaec40c6d25b358f546ec1dc96ebd2638a86eea68769d9e67fe2b129 \
--hash=sha256:2fd9a4d9ff0bcacf41fdc40c8cb0cfaef1f1859457c9653fd1b92237cc4e9f25
uncompyle2==2.0.0 \
--hash=sha256:629571965db312d34a714d8e16f25b8d67f85b5bb78abbc950457b1b64b324b1 \
--hash=sha256:c1458bb0662c1f7f269c3aa0768ec0c2951789a1fed3e6b3fdfd79dba43eb68d
uncompyle6==3.2.0 \
--hash=sha256:12b27e7179516d77136629b094e1665fd5c6fc4583eea4a2b81cd98dfb0d046d \
--hash=sha256:182a922633e09a688240718c67991da210967f8946755af6e2b2ba6c666c2f1b \
--hash=sha256:339329faf77ec6e53b87e70353545c472f253b0f7772ec369ef705e763de9ce6 \
--hash=sha256:3985675039554fb0ce3c7cff823a747a93f96a29946c5f39ab3c0f72bad8a2a2 \
--hash=sha256:610335002a9c49c1e98a04f615d23602e4d081d3b2992a867a48e5162cb86c0a \
--hash=sha256:679c727e3c468a922ed8ee0094448db78448989362aa2ca23ba5d98b3db381f1 \
--hash=sha256:83fea296bb01a4e94bf1b0e70ab3eebeff8a1bb4029c0213bc89bdc2c55e7bb1 \
--hash=sha256:94a61a046e0754a76c64dc843a3e91d071dfe5dda6691a3b5c8539e4132b6ab1 \
--hash=sha256:9b91cecf8ad398cb13b804bbdbc34fb0570a3552777f873eb55c109c28221ded \
--hash=sha256:a7c3351e3b8357245faaa3934fb59623b5ecf75f629f6fb5a7d2d02f25ff8021 \
--hash=sha256:d4a96c43cec878e438a8abca515d9a05203038adf41c28e9423232dd61b9aeeb \
--hash=sha256:ebe473b2e5b64adbeb9a2dc3cb88d1f811ff84cb7289d4f5b942568b558c091f \
--hash=sha256:f6ed1d07ac5c7addc23ca6d435fc0c3c9d124e99cb143edffbcdee5c0a564c66
urllib3==1.23 \
--hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf \
--hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5
virtualenv==16.0.0 \
--hash=sha256:2ce32cd126117ce2c539f0134eb89de91a8413a29baac49cbab3eb50e2026669 \
--hash=sha256:ca07b4c0b54e14a91af9f34d0919790b016923d157afda5efdde55c96718f752
wcwidth==0.1.7 \
--hash=sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e \
--hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c
websocket-client==0.48.0 \
--hash=sha256:18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a \
--hash=sha256:db70953ae4a064698b27ae56dcad84d0ee68b7b43cb40940f537738f38f510c1
Werkzeug==0.14.1 \
--hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \
--hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b
whichcraft==0.4.1 \
--hash=sha256:9e0d51c9387cb7e9f28b7edb549e6a03da758f7784f991eb4397d7f7808c57fd \
--hash=sha256:cd0e10b58960ab877d9f273cd28788730936c3cdaceec2dafad97c7cf3067d46
wrapt==1.10.11 \
--hash=sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6
xdis==3.8.2 \
--hash=sha256:1cc6a3be72c1707926cc46f0689d01b5b49cbc0a57080bc756b92159e3cca2d0 \
--hash=sha256:4aa484236296738981a4b48f8e2d33836e05bd4364d94c252f8f75f19b407800 \
--hash=sha256:4bcd0727c150824b4556c1f0714140ec8d7f7f4d3af5728b084fbcb89bc3a21b \
--hash=sha256:52645c3196d9c43d2ebdabff94b005b323795c2b94e418c33b0adcae249e4ae8 \
--hash=sha256:52a4d7e13bb76d7610bc3ea57d300fcc47adc09ab78cc1774829f100dc01ed02 \
--hash=sha256:6a468d3856632f99f94670abd22471fde325ae7dae92e556b6f670947b81e6a1 \
--hash=sha256:8d55ca893c6ed8e66ff9008178a417400034be0d6331c248c600298d0a4371c5 \
--hash=sha256:9182db8aa2f9663faeec3f978c096921b0b4690501f200f56f083b8083e6a2bf \
--hash=sha256:9ae0fe0bfe9eb046ed112f2a97ee5f7e942d02ff7d49d414ca200dbcb6ccacc9 \
--hash=sha256:a05231e0ace28c87aeddcb148cbdf392577c2194fc391850203d7b6a044569cb \
--hash=sha256:ceeb62f3c9e730891e3be5be0f5a27b5397abb8bc2bf6bf2a59e6b40017c5cfb \
--hash=sha256:d931f8260dd30e7c9053103e5dce8e77febf4497dbb7d1b5c16f0550f4290b5b \
--hash=sha256:dd032b73922f0021613c0cf5e8e6d85a592c56a7e68bb27b549d836f1ccc3da8 \
--hash=sha256:e8548be5a386f51a6a0295e033984869ed71191879adc3d5aed17cd4a030ad8f \
--hash=sha256:f186d5ad93378ec51d9e06dd63e382e7fd26158459a2d9db3de12abbcf4528c7
xmltodict==0.11.0 \
--hash=sha256:8f8d7d40aa28d83f4109a7e8aa86e67a4df202d9538be40c0cb1d70da527b0df \
--hash=sha256:add07d92089ff611badec526912747cf87afd4f9447af6661aca074eeaf32615
backports.functools_lru_cache==1.5 \
--hash=sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a \
--hash=sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd
backports.ssl_match_hostname==3.5.0.1 \
--hash=sha256:502ad98707319f4a51fa2ca1c677bd659008d27ded9f6380c79e8932e38dcdf2
backports.tempfile==1.0 \
--hash=sha256:05aa50940946f05759696156a8c39be118169a0e0f94a49d0bb106503891ff54 \
--hash=sha256:1c648c452e8770d759bdc5a5e2431209be70d25484e1be24876cf2168722c762
backports.weakref==1.0.post1 \
--hash=sha256:81bc9b51c0abc58edc76aefbbc68c62a787918ffe943a37947e162c3f8e19e82 \
--hash=sha256:bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2
configparser==3.5.0 \
--hash=sha256:5308b47021bc2340965c371f0f058cc6971a04502638d4244225c49d80db273a
pathlib2==2.3.2 \
--hash=sha256:8eb170f8d0d61825e09a95b38be068299ddeda82f35e96c3301a8a5e7604cb83 \
--hash=sha256:d1aa2a11ba7b8f7b21ab852b1fb5afb277e1bb99d5dfc663380b5015c0d80c5a
funcsigs==1.0.2 \
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
scandir==1.8 \
--hash=sha256:01babbf0fea42a135f6e24747cac63225399d97a67a9a7eedc1f0510b63122db \
--hash=sha256:0f0059d907817cd3c07f1b658611aabd1af0a4bdc4bb7b211dfd8962d5bd46ba \
--hash=sha256:11e3bd756b13db4baca364985575eeef4781ce35ce66e2324811091b39b97cdb \
--hash=sha256:49345923704d611458335872925802620fcf895e1c67074dd8ea715e579f2581 \
--hash=sha256:7f94d5967d61d1b5e415840b3a8995cb00a90893b9628451745e57a3749546d6 \
--hash=sha256:8231e327a3a1c090b4f09ba40cc0b75a85939812d0e8f4c83acd745df3ed6c23 \
--hash=sha256:8d5011d3a99042c4d90e8adda0052d4475aae3d57bb927012267a6c59186d870 \
--hash=sha256:9f703e6b8eb53211d39c0f10e5c02f86e9a989fd44913b5c992259312d9bd59d \
--hash=sha256:b009e15a3d73376a84f8d8fad9b5ab6d9f96cb7606bdb867a4c882f10508e57e \
--hash=sha256:b0e0b4e6de8f8aae41a9fb4834127ee125668c363a79c62eb9f9c77de58e7b71 \
--hash=sha256:f70d557a271ee9973087dc704daea205c95f021ee149f1605592bb0b1571ad78
python-decouple==3.1 \
--hash=sha256:1317df14b43efee4337a4aa02914bf004f010cd56d6c4bd894e6474ec8c4fe2d
enum34==1.1.6 \
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
ipaddress==1.0.22 \
--hash=sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794 \
--hash=sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c
futures==3.2.0; python_version < '3.0' \
--hash=sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265 \
--hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1

Просмотреть файл

@ -9,11 +9,12 @@ import operator as op
from .base_recommender import AbstractRecommender from .base_recommender import AbstractRecommender
ITEM_MATRIX_CONFIG = ('telemetry-public-analysis-2', 'telemetry-ml/addon_recommender/item_matrix.json') from .s3config import TAAR_ITEM_MATRIX_BUCKET
ADDON_MAPPING_CONFIG = ('telemetry-public-analysis-2', 'telemetry-ml/addon_recommender/addon_mapping.json') from .s3config import TAAR_ITEM_MATRIX_KEY
from .s3config import TAAR_ADDON_MAPPING_BUCKET
from .s3config import TAAR_ADDON_MAPPING_KEY
# http://garage.pimentech.net/libcommonPython_src_python_libcommon_javastringhashcode/
def java_string_hashcode(s): def java_string_hashcode(s):
h = 0 h = 0
for c in s: for c in s:
@ -33,24 +34,19 @@ class CollaborativeRecommender(AbstractRecommender):
recommender = CollaborativeRecommender() recommender = CollaborativeRecommender()
dists = recommender.recommend(client_info) dists = recommender.recommend(client_info)
""" """
def __init__(self, ctx): def __init__(self, ctx):
self._ctx = ctx self._ctx = ctx
if 'collaborative_addon_mapping' in self._ctx: self._addon_mapping = LazyJSONLoader(
self._addon_mapping = self._ctx['collaborative_addon_mapping'] self._ctx, TAAR_ADDON_MAPPING_BUCKET, TAAR_ADDON_MAPPING_KEY
else: )
self._addon_mapping = LazyJSONLoader(self._ctx,
ADDON_MAPPING_CONFIG[0],
ADDON_MAPPING_CONFIG[1])
if 'collaborative_item_matrix' in self._ctx: self._raw_item_matrix = LazyJSONLoader(
self._raw_item_matrix = self._ctx['collaborative_item_matrix'] self._ctx, TAAR_ITEM_MATRIX_BUCKET, TAAR_ITEM_MATRIX_KEY
else: )
self._raw_item_matrix = LazyJSONLoader(self._ctx,
ITEM_MATRIX_CONFIG[0],
ITEM_MATRIX_CONFIG[1])
self.logger = self._ctx[IMozLogging].get_logger('taar') self.logger = self._ctx[IMozLogging].get_logger("taar")
self.model = None self.model = None
self._build_model() self._build_model()
@ -66,10 +62,18 @@ class CollaborativeRecommender(AbstractRecommender):
def _load_json_models(self): def _load_json_models(self):
# Download the addon mappings. # Download the addon mappings.
if self.addon_mapping is None: if self.addon_mapping is None:
self.logger.error("Cannot download the addon mapping file {} {}".format(*ADDON_MAPPING_CONFIG)) self.logger.error(
"Cannot download the addon mapping file {} {}".format(
TAAR_ADDON_MAPPING_BUCKET, TAAR_ADDON_MAPPING_KEY
)
)
if self.addon_mapping is None: if self.addon_mapping is None:
self.logger.error("Cannot download the model file {} {}".format(*ITEM_MATRIX_CONFIG)) self.logger.error(
"Cannot download the model file {} {}".format(
TAAR_ITEM_MATRIX_BUCKET, TAAR_ITEM_MATRIX_KEY
)
)
def _build_model(self): def _build_model(self):
if self.raw_item_matrix is None: if self.raw_item_matrix is None:
@ -77,34 +81,43 @@ class CollaborativeRecommender(AbstractRecommender):
# Build a dense numpy matrix out of it. # Build a dense numpy matrix out of it.
num_rows = len(self.raw_item_matrix) num_rows = len(self.raw_item_matrix)
num_cols = len(self.raw_item_matrix[0]['features']) num_cols = len(self.raw_item_matrix[0]["features"])
self.model = np.zeros(shape=(num_rows, num_cols)) self.model = np.zeros(shape=(num_rows, num_cols))
for index, row in enumerate(self.raw_item_matrix): for index, row in enumerate(self.raw_item_matrix):
self.model[index, :] = row['features'] self.model[index, :] = row["features"]
def can_recommend(self, client_data, extra_data={}): def can_recommend(self, client_data, extra_data={}):
# We can't recommend if we don't have our data files. # We can't recommend if we don't have our data files.
if self.raw_item_matrix is None or self.model is None or self.addon_mapping is None: if (
self.raw_item_matrix is None
or self.model is None
or self.addon_mapping is None
):
return False return False
# We only get meaningful recommendation if a client has at least an # We only get meaningful recommendation if a client has at least an
# addon installed. # addon installed.
if len(client_data.get('installed_addons', [])) > 0: if len(client_data.get("installed_addons", [])) > 0:
return True return True
return False return False
def recommend(self, client_data, limit, extra_data={}): def recommend(self, client_data, limit, extra_data={}):
# Addons identifiers are stored as positive hash values within the model. # Addons identifiers are stored as positive hash values within the model.
installed_addons_as_hashes =\ installed_addons_as_hashes = [
[positive_hash(addon_id) for addon_id in client_data.get('installed_addons', [])] positive_hash(addon_id)
for addon_id in client_data.get("installed_addons", [])
]
# Build the query vector by setting the position of the queried addons to 1.0 # Build the query vector by setting the position of the queried addons to 1.0
# and the other to 0.0. # and the other to 0.0.
query_vector = np.array([1.0 query_vector = np.array(
if (entry.get("id") in installed_addons_as_hashes) [
else 0.0 for entry in self.raw_item_matrix]) 1.0 if (entry.get("id") in installed_addons_as_hashes) else 0.0
for entry in self.raw_item_matrix
]
)
# Build the user factors matrix. # Build the user factors matrix.
user_factors = np.matmul(query_vector, self.model) user_factors = np.matmul(query_vector, self.model)
@ -119,12 +132,15 @@ class CollaborativeRecommender(AbstractRecommender):
# filter out legacy addons from the suggestions. # filter out legacy addons from the suggestions.
hashed_id = addon.get("id") hashed_id = addon.get("id")
str_hashed_id = str(hashed_id) str_hashed_id = str(hashed_id)
if (hashed_id in installed_addons_as_hashes or if (
str_hashed_id not in self.addon_mapping or hashed_id in installed_addons_as_hashes
self.addon_mapping[str_hashed_id].get("isWebextension", False) is False): or str_hashed_id not in self.addon_mapping
or self.addon_mapping[str_hashed_id].get("isWebextension", False)
is False
):
continue continue
dist = np.dot(user_factors_transposed, addon.get('features')) dist = np.dot(user_factors_transposed, addon.get("features"))
# Read the addon ids from the "addon_mapping" looking it # Read the addon ids from the "addon_mapping" looking it
# up by 'id' (which is an hashed value). # up by 'id' (which is an hashed value).
addon_id = self.addon_mapping[str_hashed_id].get("id") addon_id = self.addon_mapping[str_hashed_id].get("id")
@ -132,15 +148,14 @@ class CollaborativeRecommender(AbstractRecommender):
# Sort the suggested addons by their score and return the # Sort the suggested addons by their score and return the
# sorted list of addon ids. # sorted list of addon ids.
sorted_dists = sorted(distances.items(), sorted_dists = sorted(distances.items(), key=op.itemgetter(1), reverse=True)
key=op.itemgetter(1),
reverse=True)
recommendations = [(s[0], s[1]) for s in sorted_dists[:limit]] recommendations = [(s[0], s[1]) for s in sorted_dists[:limit]]
log_data = (client_data['client_id'], log_data = (client_data["client_id"], str([r[0] for r in recommendations]))
str([r[0] for r in recommendations])) self.logger.info(
self.logger.info("collaborative_recommender_triggered, " "collaborative_recommender_triggered, "
"client_id: [%s], " "client_id: [%s], "
"guids: [%s]" % log_data) "guids: [%s]" % log_data
)
return recommendations return recommendations

Просмотреть файл

@ -7,24 +7,20 @@ import itertools
from .base_recommender import AbstractRecommender from .base_recommender import AbstractRecommender
from .lazys3 import LazyJSONLoader from .lazys3 import LazyJSONLoader
from .s3config import TAAR_ENSEMBLE_BUCKET
S3_BUCKET = 'telemetry-parquet' from .s3config import TAAR_ENSEMBLE_KEY
ENSEMBLE_WEIGHTS = 'taar/ensemble/ensemble_weight.json'
class WeightCache: class WeightCache:
def __init__(self, ctx): def __init__(self, ctx):
self._ctx = ctx self._ctx = ctx
if 'ensemble_weights' in self._ctx: self._weights = LazyJSONLoader(
self._weights = self._ctx['ensemble_weights'] self._ctx, TAAR_ENSEMBLE_BUCKET, TAAR_ENSEMBLE_KEY
else: )
self._weights = LazyJSONLoader(self._ctx,
S3_BUCKET,
ENSEMBLE_WEIGHTS)
def getWeights(self): def getWeights(self):
return self._weights.get()[0]['ensemble_weights'] return self._weights.get()[0]["ensemble_weights"]
class EnsembleRecommender(AbstractRecommender): class EnsembleRecommender(AbstractRecommender):
@ -34,12 +30,13 @@ class EnsembleRecommender(AbstractRecommender):
factor. The aggregate results are combines and used to recommend factor. The aggregate results are combines and used to recommend
addons for users. addons for users.
""" """
def __init__(self, ctx):
self.RECOMMENDER_KEYS = ['collaborative', 'similarity', 'locale']
self._ctx = ctx
self.logger = self._ctx[IMozLogging].get_logger('taar.ensemble')
assert 'recommender_factory' in self._ctx def __init__(self, ctx):
self.RECOMMENDER_KEYS = ["collaborative", "similarity", "locale"]
self._ctx = ctx
self.logger = self._ctx[IMozLogging].get_logger("taar.ensemble")
assert "recommender_factory" in self._ctx
self._init_from_ctx() self._init_from_ctx()
@ -47,7 +44,7 @@ class EnsembleRecommender(AbstractRecommender):
# Copy the map of the recommenders # Copy the map of the recommenders
self._recommender_map = {} self._recommender_map = {}
recommender_factory = self._ctx['recommender_factory'] recommender_factory = self._ctx["recommender_factory"]
for rkey in self.RECOMMENDER_KEYS: for rkey in self.RECOMMENDER_KEYS:
self._recommender_map[rkey] = recommender_factory.create(rkey) self._recommender_map[rkey] = recommender_factory.create(rkey)
@ -56,8 +53,12 @@ class EnsembleRecommender(AbstractRecommender):
def can_recommend(self, client_data, extra_data={}): def can_recommend(self, client_data, extra_data={}):
"""The ensemble recommender is always going to be """The ensemble recommender is always going to be
available if at least one recommender is available""" available if at least one recommender is available"""
result = sum([self._recommender_map[rkey].can_recommend(client_data) result = sum(
for rkey in self.RECOMMENDER_KEYS]) [
self._recommender_map[rkey].can_recommend(client_data)
for rkey in self.RECOMMENDER_KEYS
]
)
self.logger.info("Ensemble can_recommend: {}".format(result)) self.logger.info("Ensemble can_recommend: {}".format(result))
return result return result
@ -76,7 +77,7 @@ class EnsembleRecommender(AbstractRecommender):
correct. correct.
""" """
self.logger.info("Ensemble recommend invoked") self.logger.info("Ensemble recommend invoked")
preinstalled_addon_ids = client_data.get('installed_addons', []) preinstalled_addon_ids = client_data.get("installed_addons", [])
# Compute an extended limit by adding the length of # Compute an extended limit by adding the length of
# the list of any preinstalled addons. # the list of any preinstalled addons.
@ -89,9 +90,9 @@ class EnsembleRecommender(AbstractRecommender):
recommender = self._recommender_map[rkey] recommender = self._recommender_map[rkey]
if recommender.can_recommend(client_data): if recommender.can_recommend(client_data):
raw_results = recommender.recommend(client_data, raw_results = recommender.recommend(
extended_limit, client_data, extended_limit, extra_data
extra_data) )
reweighted_results = [] reweighted_results = []
for guid, weight in raw_results: for guid, weight in raw_results:
item = (guid, weight * ensemble_weights[rkey]) item = (guid, weight * ensemble_weights[rkey])
@ -114,14 +115,20 @@ class EnsembleRecommender(AbstractRecommender):
# Sort in reverse order (greatest weight to least) # Sort in reverse order (greatest weight to least)
ensemble_suggestions.sort(key=lambda x: -x[1]) ensemble_suggestions.sort(key=lambda x: -x[1])
filtered_ensemble_suggestions = [(guid, weight) for (guid, weight) filtered_ensemble_suggestions = [
in ensemble_suggestions (guid, weight)
if guid not in preinstalled_addon_ids] for (guid, weight) in ensemble_suggestions
if guid not in preinstalled_addon_ids
]
results = filtered_ensemble_suggestions[:limit] results = filtered_ensemble_suggestions[:limit]
log_data = (client_data['client_id'], log_data = (
str(ensemble_weights), client_data["client_id"],
str([r[0] for r in results])) str(ensemble_weights),
self.logger.info("client_id: [%s], ensemble_weight: [%s], guids: [%s]" % log_data) str([r[0] for r in results]),
)
self.logger.info(
"client_id: [%s], ensemble_weight: [%s], guids: [%s]" % log_data
)
return results return results

Просмотреть файл

@ -5,13 +5,10 @@
from .base_recommender import AbstractRecommender from .base_recommender import AbstractRecommender
from .lazys3 import LazyJSONLoader from .lazys3 import LazyJSONLoader
from srgutil.interfaces import IMozLogging from srgutil.interfaces import IMozLogging
import random
import operator as op import operator as op
import random
S3_BUCKET = "telemetry-parquet" from .s3config import TAAR_WHITELIST_BUCKET
from .s3config import TAAR_WHITELIST_KEY
ENSEMBLE_WEIGHTS = "taar/ensemble/ensemble_weight.json"
CURATED_WHITELIST = "telemetry-ml/addon_recommender/only_guids_top_200.json"
class CuratedWhitelistCache: class CuratedWhitelistCache:
@ -21,10 +18,9 @@ class CuratedWhitelistCache:
def __init__(self, ctx): def __init__(self, ctx):
self._ctx = ctx self._ctx = ctx
if "curated_whitelist_data" in self._ctx: self._data = LazyJSONLoader(
self._data = self._ctx["curated_whitelist_data"] self._ctx, TAAR_WHITELIST_BUCKET, TAAR_WHITELIST_KEY
else: )
self._data = LazyJSONLoader(self._ctx, S3_BUCKET, CURATED_WHITELIST)
def get_whitelist(self): def get_whitelist(self):
return self._data.get()[0] return self._data.get()[0]

Просмотреть файл

@ -6,9 +6,8 @@ from srgutil.interfaces import IMozLogging
from .base_recommender import AbstractRecommender from .base_recommender import AbstractRecommender
from .lazys3 import LazyJSONLoader from .lazys3 import LazyJSONLoader
from .s3config import TAAR_LOCALE_BUCKET
ADDON_LIST_BUCKET = 'telemetry-parquet' from .s3config import TAAR_LOCALE_KEY
ADDON_LIST_KEY = 'taar/locale/top10_dict.json'
class LocaleRecommender(AbstractRecommender): class LocaleRecommender(AbstractRecommender):
@ -24,12 +23,9 @@ class LocaleRecommender(AbstractRecommender):
def __init__(self, ctx): def __init__(self, ctx):
self._ctx = ctx self._ctx = ctx
if 'locale_mock_data' in self._ctx: self._top_addons_per_locale = LazyJSONLoader(self._ctx,
self._top_addons_per_locale = self._ctx['locale_mock_data'] TAAR_LOCALE_BUCKET,
else: TAAR_LOCALE_KEY)
self._top_addons_per_locale = LazyJSONLoader(self._ctx,
ADDON_LIST_BUCKET,
ADDON_LIST_KEY)
self._init_from_ctx() self._init_from_ctx()
self.logger = self._ctx[IMozLogging].get_logger('taar') self.logger = self._ctx[IMozLogging].get_logger('taar')
@ -40,7 +36,7 @@ class LocaleRecommender(AbstractRecommender):
def _init_from_ctx(self): def _init_from_ctx(self):
if self.top_addons_per_locale is None: if self.top_addons_per_locale is None:
self.logger.error("Cannot download the top per locale file {}".format(ADDON_LIST_KEY)) self.logger.error("Cannot download the top per locale file {}".format(TAAR_LOCALE_KEY))
def can_recommend(self, client_data, extra_data={}): def can_recommend(self, client_data, extra_data={}):
# We can't recommend if we don't have our data files. # We can't recommend if we don't have our data files.

Просмотреть файл

@ -21,21 +21,29 @@ from taar.context import default_context
from .lazys3 import LazyJSONLoader from .lazys3 import LazyJSONLoader
import random import random
from .s3config import TAAR_WHITELIST_BUCKET
from .s3config import TAAR_WHITELIST_KEY
# We need to build a default logger for the schema validation as there # We need to build a default logger for the schema validation as there
# is no class to bind to yet. # is no class to bind to yet.
ctx = default_context() ctx = default_context()
schema_logger = ctx[IMozLogging].get_logger('taar.schema_validate') schema_logger = ctx[IMozLogging].get_logger("taar.schema_validate")
TEST_CLIENT_IDS = ['00000000-0000-0000-0000-000000000000', TEST_CLIENT_IDS = [
'11111111-1111-1111-1111-111111111111', "00000000-0000-0000-0000-000000000000",
'22222222-2222-2222-2222-222222222222', "11111111-1111-1111-1111-111111111111",
'33333333-3333-3333-3333-333333333333'] "22222222-2222-2222-2222-222222222222",
"33333333-3333-3333-3333-333333333333",
]
EMPTY_TEST_CLIENT_IDS = ['00000000-aaaa-0000-0000-000000000000', EMPTY_TEST_CLIENT_IDS = [
'11111111-aaaa-1111-1111-111111111111', "00000000-aaaa-0000-0000-000000000000",
'22222222-aaaa-2222-2222-222222222222', "11111111-aaaa-1111-1111-111111111111",
'33333333-aaaa-3333-3333-333333333333'] "22222222-aaaa-2222-2222-222222222222",
"33333333-aaaa-3333-3333-333333333333",
]
# TODO: rework this function as it seems to add a lot of overhead # TODO: rework this function as it seems to add a lot of overhead
@ -45,6 +53,7 @@ def schema_validate(colandar_schema): # noqa: C901
Compute the function signature and apply a schema validator on the Compute the function signature and apply a schema validator on the
function. function.
""" """
def real_decorator(func): def real_decorator(func):
func_sig = inspect_sig(func) func_sig = inspect_sig(func)
@ -52,7 +61,7 @@ def schema_validate(colandar_schema): # noqa: C901
json_arg_names = [] json_arg_names = []
for key in func_sig.parameters.keys(): for key in func_sig.parameters.keys():
json_arg_names.append(key) json_arg_names.append(key)
if key == 'self': if key == "self":
continue continue
default_val = func_sig.parameters[key].default default_val = func_sig.parameters[key].default
@ -63,7 +72,7 @@ def schema_validate(colandar_schema): # noqa: C901
def wrapper(*w_args, **w_kwargs): def wrapper(*w_args, **w_kwargs):
if json_arg_names[0] == 'self': if json_arg_names[0] == "self":
# first arg is 'self', so this is a method. # first arg is 'self', so this is a method.
# Strip out self when doing argument validation # Strip out self when doing argument validation
for i, argval in enumerate(w_args[1:]): for i, argval in enumerate(w_args[1:]):
@ -83,16 +92,20 @@ def schema_validate(colandar_schema): # noqa: C901
try: try:
schema.deserialize(json_args) schema.deserialize(json_args)
except colander.Invalid as e: except colander.Invalid as e:
msg = "Defaulting to empty results. Error deserializing input arguments: " + str(e.asdict().values()) msg = (
"Defaulting to empty results. Error deserializing input arguments: "
+ str(e.asdict().values())
)
# This logger can't use the context logger as the code # This logger can't use the context logger as the code
# is running in a method decorator # is running in a method decorator
schema_logger.warn(msg) schema_logger.warning(msg)
# Invalid data means TAAR safely returns an empty list # Invalid data means TAAR safely returns an empty list
return [] return []
return func(*w_args, **w_kwargs) return func(*w_args, **w_kwargs)
return wrapper return wrapper
return real_decorator return real_decorator
@ -104,10 +117,11 @@ class RecommenderFactory:
the RecommendationManager and eases the implementation of test the RecommendationManager and eases the implementation of test
harnesses. harnesses.
""" """
def __init__(self, ctx): def __init__(self, ctx):
self._ctx = ctx self._ctx = ctx
# This map is set in the default context # This map is set in the default context
self._recommender_factory_map = self._ctx['recommender_factory_map'] self._recommender_factory_map = self._ctx["recommender_factory_map"]
def get_names(self): def get_names(self):
return self._recommender_factory_map.keys() return self._recommender_factory_map.keys()
@ -124,24 +138,25 @@ class RecommendationManager:
"""Initialize the user profile fetcher and the recommenders. """Initialize the user profile fetcher and the recommenders.
""" """
self._ctx = ctx self._ctx = ctx
self.logger = self._ctx[IMozLogging].get_logger('taar') self.logger = self._ctx[IMozLogging].get_logger("taar")
assert 'profile_fetcher' in self._ctx assert "profile_fetcher" in self._ctx
self.profile_fetcher = ctx['profile_fetcher'] self.profile_fetcher = ctx["profile_fetcher"]
self._recommender_map = {} self._recommender_map = {}
self.logger.info("Initializing recommenders") self.logger.info("Initializing recommenders")
self._recommender_map[INTERVENTION_A] = EnsembleRecommender(self._ctx.child()) self._recommender_map[INTERVENTION_A] = EnsembleRecommender(self._ctx.child())
hybrid_ctx = self._ctx.child() hybrid_ctx = self._ctx.child()
hybrid_ctx['ensemble_recommender'] = self._recommender_map[INTERVENTION_A] hybrid_ctx["ensemble_recommender"] = self._recommender_map[INTERVENTION_A]
self._recommender_map[INTERVENTION_B] = HybridRecommender(hybrid_ctx) self._recommender_map[INTERVENTION_B] = HybridRecommender(hybrid_ctx)
# The whitelist data is only used for test client IDs # The whitelist data is only used for test client IDs
WHITELIST_S3_BUCKET = 'telemetry-parquet'
WHITELIST_S3_KEY = 'telemetry-ml/addon_recommender/only_guids_top_200.json' self._whitelist_data = LazyJSONLoader(
self._whitelist_data = LazyJSONLoader(self._ctx, WHITELIST_S3_BUCKET, WHITELIST_S3_KEY) self._ctx, TAAR_WHITELIST_BUCKET, TAAR_WHITELIST_KEY
)
@schema_validate(RecommendationManagerQuerySchema) @schema_validate(RecommendationManagerQuerySchema)
def recommend(self, client_id, limit, extra_data={}): def recommend(self, client_id, limit, extra_data={}):
@ -156,11 +171,11 @@ class RecommendationManager:
""" """
# Select recommendation output based on extra_data['branch'] # Select recommendation output based on extra_data['branch']
branch_selector = extra_data.get('branch', INTERVENTION_CONTROL) branch_selector = extra_data.get("branch", INTERVENTION_CONTROL)
method_selector = branch_selector.replace('-', '_') method_selector = branch_selector.replace("-", "_")
method_name = 'recommend_{}'.format(method_selector) method_name = "recommend_{}".format(method_selector)
self.logger.info("Dispatching to method [{}]".format(method_name)) self.logger.info("Dispatching to method [{}]".format(method_name))
branch_method = getattr(self, 'recommend_%s' % method_selector) branch_method = getattr(self, "recommend_%s" % method_selector)
if client_id in TEST_CLIENT_IDS: if client_id in TEST_CLIENT_IDS:
data = self._whitelist_data.get()[0] data = self._whitelist_data.get()[0]
@ -175,7 +190,9 @@ class RecommendationManager:
client_info = self.profile_fetcher.get(client_id) client_info = self.profile_fetcher.get(client_id)
if client_info is None: if client_info is None:
self.logger.warn("Defaulting to empty results. No client info fetched from dynamo.") self.logger.warning(
"Defaulting to empty results. No client info fetched from dynamo."
)
return [] return []
return branch_method(client_info, client_id, limit, extra_data) return branch_method(client_info, client_id, limit, extra_data)

Просмотреть файл

@ -0,0 +1,28 @@
from decouple import config
TAAR_ENSEMBLE_BUCKET = config("TAAR_ENSEMBLE_BUCKET", default="test_ensemble_bucket")
TAAR_ENSEMBLE_KEY = config("TAAR_ENSEMBLE_KEY", default="test_ensemble_key")
TAAR_WHITELIST_BUCKET = config("TAAR_WHITELIST_BUCKET", default="test_whitelist_bucket")
TAAR_WHITELIST_KEY = config("TAAR_WHITELIST_KEY", default="test_whitelist_key")
TAAR_ITEM_MATRIX_BUCKET = config(
"TAAR_ITEM_MATRIX_BUCKET", default="test_matrix_bucket"
)
TAAR_ITEM_MATRIX_KEY = config("TAAR_ITEM_MATRIX_KEY", default="test_matrix_key")
TAAR_ADDON_MAPPING_BUCKET = config(
"TAAR_ADDON_MAPPING_BUCKET", default="test_mapping_bucket"
)
TAAR_ADDON_MAPPING_KEY = config("TAAR_ADDON_MAPPING_KEY", default="test_mapping_key")
TAAR_LOCALE_BUCKET = config("TAAR_LOCALE_BUCKET", default="test_locale_bucket")
TAAR_LOCALE_KEY = config("TAAR_LOCALE_KEY", default="test_locale_key")
TAAR_SIMILARITY_BUCKET = config("telemetry-parquet", default="test_similarity_bucket")
TAAR_SIMILARITY_DONOR_KEY = config(
"TAAR_SIMILARITY_DONOR_KEY", default="test_similarity_donor_key"
)
TAAR_SIMILARITY_LRCURVES_KEY = config(
"TAAR_SIMILARITY_LRCURVES_KEY", default="test_similarity_lrcurves_key"
)

Просмотреть файл

@ -9,6 +9,11 @@ from srgutil.interfaces import IMozLogging
import numpy as np import numpy as np
from .lazys3 import LazyJSONLoader from .lazys3 import LazyJSONLoader
from .s3config import TAAR_SIMILARITY_BUCKET
from .s3config import TAAR_SIMILARITY_DONOR_KEY
from .s3config import TAAR_SIMILARITY_LRCURVES_KEY
FLOOR_DISTANCE_ADJUSTMENT = 0.001 FLOOR_DISTANCE_ADJUSTMENT = 0.001
CATEGORICAL_FEATURES = ["geo_city", "locale", "os"] CATEGORICAL_FEATURES = ["geo_city", "locale", "os"]
@ -20,11 +25,6 @@ CONTINUOUS_FEATURES = [
"unique_tlds", "unique_tlds",
] ]
S3_BUCKET = "telemetry-parquet"
DONOR_LIST_KEY = "taar/similarity/donors.json"
LR_CURVES_SIMILARITY_TO_PROBABILITY = "taar/similarity/lr_curves.json"
class SimilarityRecommender(AbstractRecommender): class SimilarityRecommender(AbstractRecommender):
""" A recommender class that returns top N addons based on the """ A recommender class that returns top N addons based on the
@ -50,13 +50,15 @@ class SimilarityRecommender(AbstractRecommender):
if "similarity_donors_pool" in self._ctx: if "similarity_donors_pool" in self._ctx:
self._donors_pool = self._ctx["similarity_donors_pool"] self._donors_pool = self._ctx["similarity_donors_pool"]
else: else:
self._donors_pool = LazyJSONLoader(self._ctx, S3_BUCKET, DONOR_LIST_KEY) self._donors_pool = LazyJSONLoader(
self._ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY
)
if "similarity_lr_curves" in self._ctx: if "similarity_lr_curves" in self._ctx:
self._lr_curves = self._ctx["similarity_lr_curves"] self._lr_curves = self._ctx["similarity_lr_curves"]
else: else:
self._lr_curves = LazyJSONLoader( self._lr_curves = LazyJSONLoader(
self._ctx, S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY self._ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY
) )
self.logger = self._ctx[IMozLogging].get_logger("taar") self.logger = self._ctx[IMozLogging].get_logger("taar")
@ -75,15 +77,13 @@ class SimilarityRecommender(AbstractRecommender):
# Download the addon donors list. # Download the addon donors list.
if self.donors_pool is None: if self.donors_pool is None:
self.logger.error( self.logger.error(
"Cannot download the donor list: {}".format(DONOR_LIST_KEY) "Cannot download the donor list: {}".format(TAAR_SIMILARITY_DONOR_KEY)
) )
# Download the probability mapping curves from similarity to likelihood of being a good donor. # Download the probability mapping curves from similarity to likelihood of being a good donor.
if self.lr_curves is None: if self.lr_curves is None:
self.logger.error( self.logger.error(
"Cannot download the lr curves: {}".format( "Cannot download the lr curves: {}".format(TAAR_SIMILARITY_LRCURVES_KEY)
LR_CURVES_SIMILARITY_TO_PROBABILITY
)
) )
self.build_features_caches() self.build_features_caches()

Просмотреть файл

@ -10,11 +10,15 @@ import numpy
from moto import mock_s3 from moto import mock_s3
import boto3 import boto3
from taar.recommenders.collaborative_recommender import ITEM_MATRIX_CONFIG from taar.recommenders.collaborative_recommender import (
from taar.recommenders.collaborative_recommender import ADDON_MAPPING_CONFIG TAAR_ITEM_MATRIX_BUCKET,
TAAR_ITEM_MATRIX_KEY,
TAAR_ADDON_MAPPING_BUCKET,
TAAR_ADDON_MAPPING_KEY,
)
from taar.recommenders.collaborative_recommender import CollaborativeRecommender from taar.recommenders.collaborative_recommender import CollaborativeRecommender
from taar.recommenders.collaborative_recommender import positive_hash from taar.recommenders.collaborative_recommender import positive_hash
from taar.recommenders.lazys3 import LazyJSONLoader
import json import json
@ -31,21 +35,15 @@ def install_none_mock_data(ctx):
Overload the 'real' addon model and mapping URLs responses so that Overload the 'real' addon model and mapping URLs responses so that
we always get 404 errors. we always get 404 errors.
""" """
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=ITEM_MATRIX_CONFIG[0]) conn.create_bucket(Bucket=TAAR_ITEM_MATRIX_BUCKET)
conn.Object(ITEM_MATRIX_CONFIG[0], ITEM_MATRIX_CONFIG[1]).put(Body="") conn.Object(TAAR_ITEM_MATRIX_BUCKET, TAAR_ITEM_MATRIX_KEY).put(Body="")
ctx['collaborative_item_matrix'] = LazyJSONLoader(ctx,
ITEM_MATRIX_CONFIG[0],
ITEM_MATRIX_CONFIG[1])
# Don't reuse connections with moto. badness happens # Don't reuse connections with moto. badness happens
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=ADDON_MAPPING_CONFIG[0]) conn.create_bucket(Bucket=TAAR_ADDON_MAPPING_BUCKET)
conn.Object(ADDON_MAPPING_CONFIG[0], ADDON_MAPPING_CONFIG[1]).put(Body="") conn.Object(TAAR_ADDON_MAPPING_BUCKET, TAAR_ADDON_MAPPING_KEY).put(Body="")
ctx['collaborative_addon_mapping'] = LazyJSONLoader(ctx,
ADDON_MAPPING_CONFIG[0],
ADDON_MAPPING_CONFIG[1])
return ctx return ctx
@ -55,37 +53,37 @@ def install_mock_data(ctx):
we always the fixture data at the top of this test module. we always the fixture data at the top of this test module.
""" """
addon_space = [{"id": "addon1.id", "name": "addon1.name", "isWebextension": True}, addon_space = [
{"id": "addon2.id", "name": "addon2.name", "isWebextension": True}, {"id": "addon1.id", "name": "addon1.name", "isWebextension": True},
{"id": "addon3.id", "name": "addon3.name", "isWebextension": True}, {"id": "addon2.id", "name": "addon2.name", "isWebextension": True},
{"id": "addon4.id", "name": "addon4.name", "isWebextension": True}, {"id": "addon3.id", "name": "addon3.name", "isWebextension": True},
{"id": "addon5.id", "name": "addon5.name", "isWebextension": True}] {"id": "addon4.id", "name": "addon4.name", "isWebextension": True},
{"id": "addon5.id", "name": "addon5.name", "isWebextension": True},
]
fake_addon_matrix = [] fake_addon_matrix = []
for i, addon in enumerate(addon_space): for i, addon in enumerate(addon_space):
row = {"id": positive_hash(addon['id']), "features": [0, 0.2, 0.0, 0.1, 0.15]} row = {"id": positive_hash(addon["id"]), "features": [0, 0.2, 0.0, 0.1, 0.15]}
row['features'][i] = 1.0 row["features"][i] = 1.0
fake_addon_matrix.append(row) fake_addon_matrix.append(row)
fake_mapping = {} fake_mapping = {}
for addon in addon_space: for addon in addon_space:
java_hash = positive_hash(addon['id']) java_hash = positive_hash(addon["id"])
fake_mapping[str(java_hash)] = addon fake_mapping[str(java_hash)] = addon
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=ITEM_MATRIX_CONFIG[0]) conn.create_bucket(Bucket=TAAR_ITEM_MATRIX_BUCKET)
conn.Object(ITEM_MATRIX_CONFIG[0], ITEM_MATRIX_CONFIG[1]).put(Body=json.dumps(fake_addon_matrix)) conn.Object(TAAR_ITEM_MATRIX_BUCKET, TAAR_ITEM_MATRIX_KEY).put(
Body=json.dumps(fake_addon_matrix)
)
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=ADDON_MAPPING_CONFIG[0]) conn.create_bucket(Bucket=TAAR_ADDON_MAPPING_BUCKET)
conn.Object(ADDON_MAPPING_CONFIG[0], ADDON_MAPPING_CONFIG[1]).put(Body=json.dumps(fake_mapping)) conn.Object(TAAR_ADDON_MAPPING_BUCKET, TAAR_ADDON_MAPPING_KEY).put(
Body=json.dumps(fake_mapping)
)
ctx['collaborative_addon_mapping'] = LazyJSONLoader(ctx,
ADDON_MAPPING_CONFIG[0],
ADDON_MAPPING_CONFIG[1])
ctx['collaborative_item_matrix'] = LazyJSONLoader(ctx,
ITEM_MATRIX_CONFIG[0],
ITEM_MATRIX_CONFIG[1])
return ctx return ctx
@ -106,8 +104,9 @@ def test_can_recommend(test_ctx):
# For some reason, moto doesn't like to play nice with this call # For some reason, moto doesn't like to play nice with this call
# Check that we can recommend if we the user has at least an addon. # Check that we can recommend if we the user has at least an addon.
assert r.can_recommend({"installed_addons": ["uBlock0@raymondhill.net"], assert r.can_recommend(
"client_id": "test-client"}) {"installed_addons": ["uBlock0@raymondhill.net"], "client_id": "test-client"}
)
@mock_s3 @mock_s3
@ -141,8 +140,10 @@ def test_best_recommendation(test_ctx):
r = CollaborativeRecommender(ctx) r = CollaborativeRecommender(ctx)
# An non-empty set of addons should give a list of recommendations # An non-empty set of addons should give a list of recommendations
fixture_client_data = {"installed_addons": ["addon4.id"], fixture_client_data = {
"client_id": "test_client"} "installed_addons": ["addon4.id"],
"client_id": "test_client",
}
assert r.can_recommend(fixture_client_data) assert r.can_recommend(fixture_client_data)
recommendations = r.recommend(fixture_client_data, 1) recommendations = r.recommend(fixture_client_data, 1)
@ -154,9 +155,9 @@ def test_best_recommendation(test_ctx):
result = recommendations[0] result = recommendations[0]
assert type(result) is tuple assert type(result) is tuple
assert len(result) == 2 assert len(result) == 2
assert result[0] == 'addon2.id' assert result[0] == "addon2.id"
assert type(result[1]) is numpy.float64 assert type(result[1]) is numpy.float64
assert numpy.isclose(result[1], numpy.float64('0.3225')) assert numpy.isclose(result[1], numpy.float64("0.3225"))
@mock_s3 @mock_s3
@ -168,8 +169,10 @@ def test_recommendation_weights(test_ctx):
r = CollaborativeRecommender(ctx) r = CollaborativeRecommender(ctx)
# An non-empty set of addons should give a list of recommendations # An non-empty set of addons should give a list of recommendations
fixture_client_data = {"installed_addons": ["addon4.id"], fixture_client_data = {
"client_id": "test_client"} "installed_addons": ["addon4.id"],
"client_id": "test_client",
}
assert r.can_recommend(fixture_client_data) assert r.can_recommend(fixture_client_data)
recommendations = r.recommend(fixture_client_data, 2) recommendations = r.recommend(fixture_client_data, 2)
assert isinstance(recommendations, list) assert isinstance(recommendations, list)
@ -180,15 +183,15 @@ def test_recommendation_weights(test_ctx):
result = recommendations[0] result = recommendations[0]
assert type(result) is tuple assert type(result) is tuple
assert len(result) == 2 assert len(result) == 2
assert result[0] == 'addon2.id' assert result[0] == "addon2.id"
assert type(result[1]) is numpy.float64 assert type(result[1]) is numpy.float64
assert numpy.isclose(result[1], numpy.float64('0.3225')) assert numpy.isclose(result[1], numpy.float64("0.3225"))
# Verify that addon2 - the most heavy weighted addon was # Verify that addon2 - the most heavy weighted addon was
# recommended # recommended
result = recommendations[1] result = recommendations[1]
assert type(result) is tuple assert type(result) is tuple
assert len(result) == 2 assert len(result) == 2
assert result[0] == 'addon5.id' assert result[0] == "addon5.id"
assert type(result[1]) is numpy.float64 assert type(result[1]) is numpy.float64
assert numpy.isclose(result[1], numpy.float64('0.29')) assert numpy.isclose(result[1], numpy.float64("0.29"))

Просмотреть файл

@ -3,30 +3,24 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from taar.recommenders.ensemble_recommender import WeightCache, EnsembleRecommender from taar.recommenders.ensemble_recommender import WeightCache, EnsembleRecommender
from taar.recommenders.s3config import (
TAAR_ENSEMBLE_BUCKET,
TAAR_ENSEMBLE_KEY,
)
from moto import mock_s3 from moto import mock_s3
import boto3 import boto3
import json import json
from taar.recommenders.lazys3 import LazyJSONLoader
from .mocks import MockRecommenderFactory from .mocks import MockRecommenderFactory
EXPECTED = {'collaborative': 1000, EXPECTED = {"collaborative": 1000, "similarity": 100, "locale": 10}
'similarity': 100,
'locale': 10}
def install_mock_ensemble_data(ctx): def install_mock_ensemble_data(ctx):
DATA = {'ensemble_weights': EXPECTED} DATA = {"ensemble_weights": EXPECTED}
S3_BUCKET = 'telemetry-parquet' conn = boto3.resource("s3", region_name="us-west-2")
ENSEMBLE_WEIGHTS = 'taar/ensemble/ensemble_weight.json' conn.create_bucket(Bucket=TAAR_ENSEMBLE_BUCKET)
conn.Object(TAAR_ENSEMBLE_BUCKET, TAAR_ENSEMBLE_KEY).put(Body=json.dumps(DATA))
conn = boto3.resource('s3', region_name='us-west-2')
conn.create_bucket(Bucket=S3_BUCKET)
conn.Object(S3_BUCKET, ENSEMBLE_WEIGHTS).put(Body=json.dumps(DATA))
ctx['ensemble_weights'] = LazyJSONLoader(ctx,
S3_BUCKET,
ENSEMBLE_WEIGHTS)
return ctx return ctx
@ -43,20 +37,24 @@ def test_weight_cache(test_ctx):
def test_recommendations(test_ctx): def test_recommendations(test_ctx):
ctx = install_mock_ensemble_data(test_ctx) ctx = install_mock_ensemble_data(test_ctx)
EXPECTED_RESULTS = [('ghi', 3430.0), EXPECTED_RESULTS = [
('def', 3320.0), ("ghi", 3430.0),
('ijk', 3200.0), ("def", 3320.0),
('hij', 3100.0), ("ijk", 3200.0),
('lmn', 420.0)] ("hij", 3100.0),
("lmn", 420.0),
]
factory = MockRecommenderFactory() factory = MockRecommenderFactory()
ctx['recommender_factory'] = factory ctx["recommender_factory"] = factory
ctx['recommender_map'] = {'collaborative': factory.create('collaborative'), ctx["recommender_map"] = {
'similarity': factory.create('similarity'), "collaborative": factory.create("collaborative"),
'locale': factory.create('locale')} "similarity": factory.create("similarity"),
"locale": factory.create("locale"),
}
r = EnsembleRecommender(ctx.child()) r = EnsembleRecommender(ctx.child())
client = {'client_id': '12345'} # Anything will work here client = {"client_id": "12345"} # Anything will work here
recommendation_list = r.recommend(client, 5) recommendation_list = r.recommend(client, 5)
assert isinstance(recommendation_list, list) assert isinstance(recommendation_list, list)
@ -67,25 +65,28 @@ def test_recommendations(test_ctx):
def test_preinstalled_guids(test_ctx): def test_preinstalled_guids(test_ctx):
ctx = install_mock_ensemble_data(test_ctx) ctx = install_mock_ensemble_data(test_ctx)
EXPECTED_RESULTS = [('ghi', 3430.0), EXPECTED_RESULTS = [
('ijk', 3200.0), ("ghi", 3430.0),
('lmn', 420.0), ("ijk", 3200.0),
('klm', 409.99999999999994), ("lmn", 420.0),
('abc', 23.0)] ("klm", 409.99999999999994),
("abc", 23.0),
]
factory = MockRecommenderFactory() factory = MockRecommenderFactory()
ctx['recommender_factory'] = factory ctx["recommender_factory"] = factory
ctx['recommender_map'] = {'collaborative': factory.create('collaborative'), ctx["recommender_map"] = {
'similarity': factory.create('similarity'), "collaborative": factory.create("collaborative"),
'locale': factory.create('locale')} "similarity": factory.create("similarity"),
"locale": factory.create("locale"),
}
r = EnsembleRecommender(ctx.child()) r = EnsembleRecommender(ctx.child())
# 'hij' should be excluded from the suggestions list # 'hij' should be excluded from the suggestions list
# The other two addon GUIDs 'def' and 'jkl' will never be # The other two addon GUIDs 'def' and 'jkl' will never be
# recommended anyway and should have no impact on results # recommended anyway and should have no impact on results
client = {'client_id': '12345', client = {"client_id": "12345", "installed_addons": ["def", "hij", "jkl"]}
'installed_addons': ['def', 'hij', 'jkl']}
recommendation_list = r.recommend(client, 5) recommendation_list = r.recommend(client, 5)
print(recommendation_list) print(recommendation_list)

Просмотреть файл

@ -10,11 +10,9 @@ from taar.recommenders.hybrid_recommender import CuratedRecommender
from taar.recommenders.hybrid_recommender import HybridRecommender from taar.recommenders.hybrid_recommender import HybridRecommender
from taar.recommenders.ensemble_recommender import EnsembleRecommender from taar.recommenders.ensemble_recommender import EnsembleRecommender
from taar.recommenders.hybrid_recommender import S3_BUCKET from taar.recommenders.s3config import TAAR_WHITELIST_BUCKET, TAAR_WHITELIST_KEY
from taar.recommenders.hybrid_recommender import CURATED_WHITELIST
# from taar.recommenders.hybrid_recommender import ENSEMBLE_WEIGHTS # from taar.recommenders.hybrid_recommender import ENSEMBLE_WEIGHTS
from taar.recommenders.lazys3 import LazyJSONLoader
from .test_ensemblerecommender import install_mock_ensemble_data from .test_ensemblerecommender import install_mock_ensemble_data
from .mocks import MockRecommenderFactory from .mocks import MockRecommenderFactory
@ -27,9 +25,8 @@ def install_no_curated_data(ctx):
ctx = ctx.child() ctx = ctx.child()
conn = boto3.resource("s3", region_name="us-west-2") conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=S3_BUCKET) conn.create_bucket(Bucket=TAAR_WHITELIST_BUCKET)
conn.Object(S3_BUCKET, CURATED_WHITELIST).put(Body="") conn.Object(TAAR_WHITELIST_BUCKET, TAAR_WHITELIST_KEY).put(Body="")
ctx["curated_whitelist_data"] = LazyJSONLoader(ctx, S3_BUCKET, CURATED_WHITELIST)
return ctx return ctx
@ -42,9 +39,10 @@ def install_mock_curated_data(ctx):
ctx = ctx.child() ctx = ctx.child()
conn = boto3.resource("s3", region_name="us-west-2") conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=S3_BUCKET) conn.create_bucket(Bucket=TAAR_WHITELIST_BUCKET)
conn.Object(S3_BUCKET, CURATED_WHITELIST).put(Body=json.dumps(mock_data)) conn.Object(TAAR_WHITELIST_BUCKET, TAAR_WHITELIST_KEY).put(
ctx["curated_whitelist_data"] = LazyJSONLoader(ctx, S3_BUCKET, CURATED_WHITELIST) Body=json.dumps(mock_data)
)
return ctx return ctx

Просмотреть файл

@ -9,30 +9,28 @@ import json
from taar.recommenders import LocaleRecommender from taar.recommenders import LocaleRecommender
from taar.recommenders.lazys3 import LazyJSONLoader from taar.recommenders.s3config import TAAR_LOCALE_KEY, TAAR_LOCALE_BUCKET
from taar.recommenders.locale_recommender import ADDON_LIST_BUCKET, ADDON_LIST_KEY
FAKE_LOCALE_DATA = { FAKE_LOCALE_DATA = {
"te-ST": [ "te-ST": [
"{1e6b8bce-7dc8-481c-9f19-123e41332b72}", "some-other@nice-addon.com", "{1e6b8bce-7dc8-481c-9f19-123e41332b72}",
"{66d1eed2-a390-47cd-8215-016e9fa9cc55}", "{5f1594c3-0d4c-49dd-9182-4fbbb25131a7}" "some-other@nice-addon.com",
"{66d1eed2-a390-47cd-8215-016e9fa9cc55}",
"{5f1594c3-0d4c-49dd-9182-4fbbb25131a7}",
], ],
"en": [ "en": ["some-uuid@test-addon.com", "other-addon@some-id.it"],
"some-uuid@test-addon.com", "other-addon@some-id.it"
]
} }
def install_mock_data(ctx): def install_mock_data(ctx):
ctx = ctx.child() ctx = ctx.child()
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=ADDON_LIST_BUCKET) conn.create_bucket(Bucket=TAAR_LOCALE_BUCKET)
conn.Object(ADDON_LIST_BUCKET, ADDON_LIST_KEY).put(Body=json.dumps(FAKE_LOCALE_DATA)) conn.Object(TAAR_LOCALE_BUCKET, TAAR_LOCALE_KEY).put(
ctx['locale_mock_data'] = LazyJSONLoader(ctx, Body=json.dumps(FAKE_LOCALE_DATA)
ADDON_LIST_BUCKET, )
ADDON_LIST_KEY)
return ctx return ctx

Просмотреть файл

@ -8,11 +8,17 @@ from moto import mock_s3
from taar.recommenders import RecommendationManager from taar.recommenders import RecommendationManager
from taar.recommenders.recommendation_manager import TEST_CLIENT_IDS from taar.recommenders.recommendation_manager import TEST_CLIENT_IDS
from taar.recommenders.recommendation_manager import EMPTY_TEST_CLIENT_IDS from taar.recommenders.recommendation_manager import EMPTY_TEST_CLIENT_IDS
from taar.recommenders.lazys3 import LazyJSONLoader
from taar.schema import INTERVENTION_A from taar.schema import INTERVENTION_A
from taar.schema import INTERVENTION_B from taar.schema import INTERVENTION_B
from taar.schema import INTERVENTION_CONTROL from taar.schema import INTERVENTION_CONTROL
from taar.recommenders.base_recommender import AbstractRecommender from taar.recommenders.base_recommender import AbstractRecommender
from taar.recommenders.ensemble_recommender import (
TAAR_ENSEMBLE_BUCKET,
TAAR_ENSEMBLE_KEY,
)
from .mocks import MockRecommenderFactory from .mocks import MockRecommenderFactory
from .test_hybrid_recommender import install_mock_curated_data from .test_hybrid_recommender import install_mock_curated_data
@ -20,6 +26,7 @@ from .test_hybrid_recommender import install_mock_curated_data
class StubRecommender(AbstractRecommender): class StubRecommender(AbstractRecommender):
""" A shared, stub recommender that can be used for testing. """ A shared, stub recommender that can be used for testing.
""" """
def __init__(self, can_recommend, stub_recommendations): def __init__(self, can_recommend, stub_recommendations):
self._can_recommend = can_recommend self._can_recommend = can_recommend
self._recommendations = stub_recommendations self._recommendations = stub_recommendations
@ -36,25 +43,18 @@ def install_mocks(ctx):
class MockProfileFetcher: class MockProfileFetcher:
def get(self, client_id): def get(self, client_id):
return {'client_id': client_id} return {"client_id": client_id}
ctx['profile_fetcher'] = MockProfileFetcher() ctx["profile_fetcher"] = MockProfileFetcher()
ctx['recommender_factory'] = MockRecommenderFactory() ctx["recommender_factory"] = MockRecommenderFactory()
DATA = {'ensemble_weights': {'collaborative': 1000, DATA = {
'similarity': 100, "ensemble_weights": {"collaborative": 1000, "similarity": 100, "locale": 10}
'locale': 10}} }
S3_BUCKET = 'telemetry-parquet' conn = boto3.resource("s3", region_name="us-west-2")
ENSEMBLE_WEIGHTS = 'taar/ensemble/ensemble_weight.json' conn.create_bucket(Bucket=TAAR_ENSEMBLE_BUCKET)
conn.Object(TAAR_ENSEMBLE_BUCKET, TAAR_ENSEMBLE_KEY).put(Body=json.dumps(DATA))
conn = boto3.resource('s3', region_name='us-west-2')
conn.create_bucket(Bucket=S3_BUCKET)
conn.Object(S3_BUCKET, ENSEMBLE_WEIGHTS).put(Body=json.dumps(DATA))
ctx['ensemble_weights'] = LazyJSONLoader(ctx,
S3_BUCKET,
ENSEMBLE_WEIGHTS)
return ctx return ctx
@ -70,21 +70,23 @@ def test_none_profile_returns_empty_list(test_ctx):
def test_intervention_a(test_ctx): def test_intervention_a(test_ctx):
ctx = install_mocks(test_ctx) ctx = install_mocks(test_ctx)
EXPECTED_RESULTS = [('ghi', 3430.0), EXPECTED_RESULTS = [
('def', 3320.0), ("ghi", 3430.0),
('ijk', 3200.0), ("def", 3320.0),
('hij', 3100.0), ("ijk", 3200.0),
('lmn', 420.0), ("hij", 3100.0),
('klm', 409.99999999999994), ("lmn", 420.0),
('jkl', 400.0), ("klm", 409.99999999999994),
('abc', 23.0), ("jkl", 400.0),
('fgh', 22.0), ("abc", 23.0),
('efg', 21.0)] ("fgh", 22.0),
("efg", 21.0),
]
manager = RecommendationManager(ctx.child()) manager = RecommendationManager(ctx.child())
recommendation_list = manager.recommend('some_ignored_id', recommendation_list = manager.recommend(
10, "some_ignored_id", 10, extra_data={"branch": INTERVENTION_A}
extra_data={'branch': INTERVENTION_A}) )
assert isinstance(recommendation_list, list) assert isinstance(recommendation_list, list)
assert recommendation_list == EXPECTED_RESULTS assert recommendation_list == EXPECTED_RESULTS
@ -100,9 +102,9 @@ def test_intervention_b(test_ctx):
ctx = install_mock_curated_data(ctx) ctx = install_mock_curated_data(ctx)
manager = RecommendationManager(ctx.child()) manager = RecommendationManager(ctx.child())
recommendation_list = manager.recommend('some_ignored_id', recommendation_list = manager.recommend(
4, "some_ignored_id", 4, extra_data={"branch": INTERVENTION_B}
extra_data={'branch': INTERVENTION_B}) )
assert isinstance(recommendation_list, list) assert isinstance(recommendation_list, list)
assert len(recommendation_list) == 4 assert len(recommendation_list) == 4
@ -114,9 +116,9 @@ def test_intervention_control(test_ctx):
ctx = install_mock_curated_data(ctx) ctx = install_mock_curated_data(ctx)
manager = RecommendationManager(ctx.child()) manager = RecommendationManager(ctx.child())
recommendation_list = manager.recommend('some_ignored_id', recommendation_list = manager.recommend(
10, "some_ignored_id", 10, extra_data={"branch": INTERVENTION_CONTROL}
extra_data={'branch': INTERVENTION_CONTROL}) )
assert len(recommendation_list) == 0 assert len(recommendation_list) == 0
@ -127,15 +129,15 @@ def test_fixed_client_id_valid(test_ctx):
ctx = install_mock_curated_data(ctx) ctx = install_mock_curated_data(ctx)
manager = RecommendationManager(ctx.child()) manager = RecommendationManager(ctx.child())
recommendation_list = manager.recommend(TEST_CLIENT_IDS[0], recommendation_list = manager.recommend(
10, TEST_CLIENT_IDS[0], 10, extra_data={"branch": INTERVENTION_A}
extra_data={'branch': INTERVENTION_A}) )
assert len(recommendation_list) == 10 assert len(recommendation_list) == 10
recommendation_list = manager.recommend(TEST_CLIENT_IDS[0], recommendation_list = manager.recommend(
10, TEST_CLIENT_IDS[0], 10, extra_data={"branch": INTERVENTION_B}
extra_data={'branch': INTERVENTION_B}) )
assert len(recommendation_list) == 10 assert len(recommendation_list) == 10
@ -148,15 +150,15 @@ def test_intervention_names(test_ctx):
ctx = install_mock_curated_data(ctx) ctx = install_mock_curated_data(ctx)
manager = RecommendationManager(ctx.child()) manager = RecommendationManager(ctx.child())
recommendation_list = manager.recommend(TEST_CLIENT_IDS[0], recommendation_list = manager.recommend(
10, TEST_CLIENT_IDS[0], 10, extra_data={"branch": "intervention-a"}
extra_data={'branch': 'intervention-a'}) )
assert len(recommendation_list) == 10 assert len(recommendation_list) == 10
recommendation_list = manager.recommend(TEST_CLIENT_IDS[0], recommendation_list = manager.recommend(
10, TEST_CLIENT_IDS[0], 10, extra_data={"branch": "intervention-b"}
extra_data={'branch': 'intervention-b'}) )
assert len(recommendation_list) == 10 assert len(recommendation_list) == 10
@ -167,14 +169,14 @@ def test_fixed_client_id_empty_list(test_ctx):
ctx = install_mock_curated_data(ctx) ctx = install_mock_curated_data(ctx)
manager = RecommendationManager(ctx.child()) manager = RecommendationManager(ctx.child())
recommendation_list = manager.recommend(EMPTY_TEST_CLIENT_IDS[0], recommendation_list = manager.recommend(
10, EMPTY_TEST_CLIENT_IDS[0], 10, extra_data={"branch": INTERVENTION_A}
extra_data={'branch': INTERVENTION_A}) )
assert len(recommendation_list) == 0 assert len(recommendation_list) == 0
recommendation_list = manager.recommend(EMPTY_TEST_CLIENT_IDS[0], recommendation_list = manager.recommend(
10, EMPTY_TEST_CLIENT_IDS[0], 10, extra_data={"branch": INTERVENTION_B}
extra_data={'branch': INTERVENTION_B}) )
assert len(recommendation_list) == 0 assert len(recommendation_list) == 0

Просмотреть файл

@ -12,14 +12,21 @@ from taar.recommenders.lazys3 import LazyJSONLoader
import boto3 import boto3
from moto import mock_s3 from moto import mock_s3
from taar.recommenders.similarity_recommender import S3_BUCKET from taar.recommenders.similarity_recommender import (
from taar.recommenders.similarity_recommender import \ CATEGORICAL_FEATURES,
CATEGORICAL_FEATURES, CONTINUOUS_FEATURES, DONOR_LIST_KEY, LR_CURVES_SIMILARITY_TO_PROBABILITY, \ CONTINUOUS_FEATURES,
SimilarityRecommender SimilarityRecommender,
)
from .similarity_data import CONTINUOUS_FEATURE_FIXTURE_DATA from .similarity_data import CONTINUOUS_FEATURE_FIXTURE_DATA
from .similarity_data import CATEGORICAL_FEATURE_FIXTURE_DATA from .similarity_data import CATEGORICAL_FEATURE_FIXTURE_DATA
from taar.recommenders.s3config import (
TAAR_SIMILARITY_BUCKET,
TAAR_SIMILARITY_DONOR_KEY,
TAAR_SIMILARITY_LRCURVES_KEY,
)
def generate_fake_lr_curves(num_elements, ceiling=10.0): def generate_fake_lr_curves(num_elements, ceiling=10.0):
""" """
@ -53,46 +60,53 @@ def generate_a_fake_taar_client():
"bookmark_count": 7, "bookmark_count": 7,
"tab_open_count": 4, "tab_open_count": 4,
"total_uri": 222, "total_uri": 222,
"unique_tlds": 21 "unique_tlds": 21,
} }
def install_no_data(ctx): def install_no_data(ctx):
ctx = ctx.child() ctx = ctx.child()
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=S3_BUCKET) conn.create_bucket(Bucket=TAAR_SIMILARITY_BUCKET)
conn.Object(S3_BUCKET, DONOR_LIST_KEY).put(Body="") conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY).put(Body="")
conn.Object(S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY).put(Body="") conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY).put(Body="")
ctx['similarity_donors_pool'] = LazyJSONLoader(ctx, ctx["similarity_donors_pool"] = LazyJSONLoader(
S3_BUCKET, ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY
DONOR_LIST_KEY) )
ctx['similarity_lr_curves'] = LazyJSONLoader(ctx, ctx["similarity_lr_curves"] = LazyJSONLoader(
S3_BUCKET, ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY
LR_CURVES_SIMILARITY_TO_PROBABILITY) )
return ctx return ctx
def install_categorical_data(ctx): def install_categorical_data(ctx):
ctx = ctx.child() ctx = ctx.child()
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=S3_BUCKET) try:
conn.Object(S3_BUCKET, DONOR_LIST_KEY).put(Body=json.dumps(CATEGORICAL_FEATURE_FIXTURE_DATA)) conn.create_bucket(Bucket=TAAR_SIMILARITY_BUCKET)
except Exception:
pass
conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY).put(
Body=json.dumps(CATEGORICAL_FEATURE_FIXTURE_DATA)
)
conn.Object(S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY).put(Body=json.dumps(generate_fake_lr_curves(1000))) conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY).put(
Body=json.dumps(generate_fake_lr_curves(1000))
)
ctx['similarity_donors_pool'] = LazyJSONLoader(ctx, ctx["similarity_donors_pool"] = LazyJSONLoader(
S3_BUCKET, ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY
DONOR_LIST_KEY) )
ctx['similarity_lr_curves'] = LazyJSONLoader(ctx, ctx["similarity_lr_curves"] = LazyJSONLoader(
S3_BUCKET, ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY
LR_CURVES_SIMILARITY_TO_PROBABILITY) )
return ctx return ctx
@ -102,20 +116,23 @@ def install_continuous_data(ctx):
cts_data = json.dumps(CONTINUOUS_FEATURE_FIXTURE_DATA) cts_data = json.dumps(CONTINUOUS_FEATURE_FIXTURE_DATA)
lrs_data = json.dumps(generate_fake_lr_curves(1000)) lrs_data = json.dumps(generate_fake_lr_curves(1000))
conn = boto3.resource('s3', region_name='us-west-2') conn = boto3.resource("s3", region_name="us-west-2")
conn.create_bucket(Bucket=S3_BUCKET) try:
conn.Object(S3_BUCKET, DONOR_LIST_KEY).put(Body=cts_data) conn.create_bucket(Bucket=TAAR_SIMILARITY_BUCKET)
except Exception:
pass
conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY).put(Body=cts_data)
conn.Object(S3_BUCKET, LR_CURVES_SIMILARITY_TO_PROBABILITY).put(Body=lrs_data) conn.Object(TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY).put(Body=lrs_data)
ctx['similarity_donors_pool'] = LazyJSONLoader(ctx, ctx["similarity_donors_pool"] = LazyJSONLoader(
S3_BUCKET, ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_DONOR_KEY
DONOR_LIST_KEY) )
ctx['similarity_lr_curves'] = LazyJSONLoader(ctx, ctx["similarity_lr_curves"] = LazyJSONLoader(
S3_BUCKET, ctx, TAAR_SIMILARITY_BUCKET, TAAR_SIMILARITY_LRCURVES_KEY
LR_CURVES_SIMILARITY_TO_PROBABILITY) )
return ctx return ctx
@ -209,7 +226,7 @@ def test_compute_clients_dist(test_ctx):
"bookmark_count": 1, "bookmark_count": 1,
"tab_open_count": 1, "tab_open_count": 1,
"total_uri": 1, "total_uri": 1,
"unique_tlds": 1 "unique_tlds": 1,
}, },
{ {
"client_id": "test-client-003", "client_id": "test-client-003",
@ -221,7 +238,7 @@ def test_compute_clients_dist(test_ctx):
"bookmark_count": 10, "bookmark_count": 10,
"tab_open_count": 1, "tab_open_count": 1,
"total_uri": 1, "total_uri": 1,
"unique_tlds": 1 "unique_tlds": 1,
}, },
{ {
"client_id": "test-client-004", "client_id": "test-client-004",
@ -233,8 +250,8 @@ def test_compute_clients_dist(test_ctx):
"bookmark_count": 10, "bookmark_count": 10,
"tab_open_count": 10, "tab_open_count": 10,
"total_uri": 100, "total_uri": 100,
"unique_tlds": 10 "unique_tlds": 10,
} },
] ]
per_client_test = [] per_client_test = []
@ -260,27 +277,39 @@ def test_distance_functions(test_ctx):
assert len(recs) > 0 assert len(recs) > 0
# Make it a generally poor match for the donors. # Make it a generally poor match for the donors.
test_client.update({'total_uri': 10, 'bookmark_count': 2, 'subsession_length': 10}) test_client.update({"total_uri": 10, "bookmark_count": 2, "subsession_length": 10})
all_client_values_zero = test_client all_client_values_zero = test_client
# Make all categorical variables non-matching with any donor. # Make all categorical variables non-matching with any donor.
all_client_values_zero.update({key: 'zero' for key in test_client.keys() if key in CATEGORICAL_FEATURES}) all_client_values_zero.update(
{key: "zero" for key in test_client.keys() if key in CATEGORICAL_FEATURES}
)
recs = r.recommend(all_client_values_zero, 10) recs = r.recommend(all_client_values_zero, 10)
assert len(recs) == 0 assert len(recs) == 0
# Make all continuous variables equal to zero. # Make all continuous variables equal to zero.
all_client_values_zero.update({key: 0 for key in test_client.keys() if key in CONTINUOUS_FEATURES}) all_client_values_zero.update(
{key: 0 for key in test_client.keys() if key in CONTINUOUS_FEATURES}
)
recs = r.recommend(all_client_values_zero, 10) recs = r.recommend(all_client_values_zero, 10)
assert len(recs) == 0 assert len(recs) == 0
# Make all categorical variables non-matching with any donor. # Make all categorical variables non-matching with any donor.
all_client_values_high = test_client all_client_values_high = test_client
all_client_values_high.update({key: 'one billion' for key in test_client.keys() if key in CATEGORICAL_FEATURES}) all_client_values_high.update(
{
key: "one billion"
for key in test_client.keys()
if key in CATEGORICAL_FEATURES
}
)
recs = r.recommend(all_client_values_high, 10) recs = r.recommend(all_client_values_high, 10)
assert len(recs) == 0 assert len(recs) == 0
# Make all continuous variables equal to a very high numerical value. # Make all continuous variables equal to a very high numerical value.
all_client_values_high.update({key: 1e60 for key in test_client.keys() if key in CONTINUOUS_FEATURES}) all_client_values_high.update(
{key: 1e60 for key in test_client.keys() if key in CONTINUOUS_FEATURES}
)
recs = r.recommend(all_client_values_high, 10) recs = r.recommend(all_client_values_high, 10)
assert len(recs) == 0 assert len(recs) == 0
@ -300,7 +329,7 @@ def test_weights_continuous(test_ctx):
# In the ensemble method recommendations should be a sorted list of tuples # In the ensemble method recommendations should be a sorted list of tuples
# containing [(guid, weight), (guid, weight)... (guid, weight)]. # containing [(guid, weight), (guid, weight)... (guid, weight)].
recommendation_list = r.recommend(generate_a_fake_taar_client(), 2) recommendation_list = r.recommend(generate_a_fake_taar_client(), 2)
with open('/tmp/similarity_recommender.json', 'w') as fout: with open("/tmp/similarity_recommender.json", "w") as fout:
fout.write(json.dumps(recommendation_list)) fout.write(json.dumps(recommendation_list))
# Make sure the structure of the recommendations is correct and # Make sure the structure of the recommendations is correct and
@ -326,14 +355,14 @@ def test_weights_continuous(test_ctx):
@mock_s3 @mock_s3
def test_weights_categorical(test_ctx): def test_weights_categorical(test_ctx):
''' """
This should get : This should get :
["{test-guid-1}", "{test-guid-2}", "{test-guid-3}", "{test-guid-4}"], ["{test-guid-1}", "{test-guid-2}", "{test-guid-3}", "{test-guid-4}"],
["{test-guid-9}", "{test-guid-10}", "{test-guid-11}", "{test-guid-12}"] ["{test-guid-9}", "{test-guid-10}", "{test-guid-11}", "{test-guid-12}"]
from the first two entries in the sample data where the geo_city from the first two entries in the sample data where the geo_city
data data
''' """
# Create a new instance of a SimilarityRecommender. # Create a new instance of a SimilarityRecommender.
cat_ctx = install_categorical_data(test_ctx) cat_ctx = install_categorical_data(test_ctx)
cts_ctx = install_continuous_data(test_ctx) cts_ctx = install_continuous_data(test_ctx)