2018-08-15 17:30:21 +03:00
|
|
|
|
2018-08-01 09:48:06 +03:00
|
|
|
from automation import CommandSequence, TaskManager
|
|
|
|
|
2014-07-01 20:37:17 +04:00
|
|
|
# The list of sites that we wish to crawl
|
2020-02-28 19:39:39 +03:00
|
|
|
NUM_BROWSERS = 1
|
2020-05-08 02:27:52 +03:00
|
|
|
sites = [
|
2020-05-26 19:34:16 +03:00
|
|
|
'https://mattressfirm.com',
|
|
|
|
'https://fingerprintjs.com/demo',
|
2020-05-08 02:27:52 +03:00
|
|
|
]
|
2014-07-01 20:37:17 +04:00
|
|
|
|
2019-07-14 00:48:58 +03:00
|
|
|
# Loads the default manager params
|
|
|
|
# and NUM_BROWSERS copies of the default browser params
|
2015-09-14 18:05:50 +03:00
|
|
|
manager_params, browser_params = TaskManager.load_default_params(NUM_BROWSERS)
|
2014-07-01 20:37:17 +04:00
|
|
|
|
2015-09-14 18:05:50 +03:00
|
|
|
# Update browser configuration (use this for per-browser settings)
|
2017-03-09 19:00:54 +03:00
|
|
|
for i in range(NUM_BROWSERS):
|
2017-07-28 23:37:35 +03:00
|
|
|
# Record HTTP Requests and Responses
|
|
|
|
browser_params[i]['http_instrument'] = True
|
2019-06-12 18:07:55 +03:00
|
|
|
# Record cookie changes
|
|
|
|
browser_params[i]['cookie_instrument'] = True
|
2019-05-09 11:56:44 +03:00
|
|
|
# Record Navigations
|
|
|
|
browser_params[i]['navigation_instrument'] = True
|
|
|
|
# Record JS Web API calls
|
|
|
|
browser_params[i]['js_instrument'] = True
|
2020-01-17 18:40:13 +03:00
|
|
|
# Record the callstack of all WebRequests made
|
|
|
|
browser_params[i]['callstack_instrument'] = True
|
2020-05-05 21:43:21 +03:00
|
|
|
# Launch only browser 0 headless
|
2020-05-26 19:34:16 +03:00
|
|
|
browser_params[0]['display_mode'] = 'xvfb'
|
2014-07-01 20:37:17 +04:00
|
|
|
|
2015-09-14 18:05:50 +03:00
|
|
|
# Update TaskManager configuration (use this for crawl-wide settings)
|
2020-05-26 19:34:16 +03:00
|
|
|
manager_params['data_directory'] = 'crawl_data'
|
|
|
|
manager_params['log_directory'] = 'crawl_data'
|
2014-07-01 20:37:17 +04:00
|
|
|
|
|
|
|
# Instantiates the measurement platform
|
|
|
|
# Commands time out by default after 60 seconds
|
2015-09-14 18:05:50 +03:00
|
|
|
manager = TaskManager.TaskManager(manager_params, browser_params)
|
2014-07-01 20:37:17 +04:00
|
|
|
|
2019-09-17 16:23:09 +03:00
|
|
|
# Visits the sites
|
2014-07-01 20:37:17 +04:00
|
|
|
for site in sites:
|
2019-09-17 16:23:09 +03:00
|
|
|
|
|
|
|
# Parallelize sites over all number of browsers set above.
|
2020-03-06 14:35:40 +03:00
|
|
|
command_sequence = CommandSequence.CommandSequence(
|
2020-04-20 22:28:13 +03:00
|
|
|
site, reset=True,
|
2020-05-11 15:47:25 +03:00
|
|
|
callback=lambda success, val=site:
|
|
|
|
print("CommandSequence {} done".format(val)))
|
2016-10-27 17:56:03 +03:00
|
|
|
|
|
|
|
# Start by visiting the page
|
2020-05-26 19:34:16 +03:00
|
|
|
command_sequence.get(sleep=10, timeout=60)
|
|
|
|
|
|
|
|
# Screenshot
|
|
|
|
command_sequence.save_screenshot()
|
2016-10-27 17:56:03 +03:00
|
|
|
|
2019-08-02 15:50:30 +03:00
|
|
|
# Run commands across the three browsers (simple parallelization)
|
|
|
|
manager.execute_command_sequence(command_sequence)
|
2014-07-01 20:37:17 +04:00
|
|
|
|
|
|
|
# Shuts down the browsers and waits for the data to finish logging
|
|
|
|
manager.close()
|