зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1597336 - add a fxa+sync scenario r=lina,markh
This patch adds a scenario that ends up syncing the generated profile into FxSync. Differential Revision: https://phabricator.services.mozilla.com/D53789 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
a464412e8e
Коммит
dc72ba77ca
1
.flake8
1
.flake8
|
@ -33,6 +33,7 @@ exclude =
|
|||
testing/condprofile/condprof/runner.py,
|
||||
testing/condprofile/condprof/scenarii/heavy.py,
|
||||
testing/condprofile/condprof/scenarii/settled.py,
|
||||
testing/condprofile/condprof/scenarii/synced.py
|
||||
testing/condprofile/condprof/helpers.py,
|
||||
testing/jsshell/benchmark.py,
|
||||
testing/marionette/mach_commands.py,
|
||||
|
|
|
@ -49,6 +49,10 @@ var gExceptionPaths = [
|
|||
// Exclude all search-extensions because they aren't referenced by filename
|
||||
"resource://search-extensions/",
|
||||
|
||||
// Exclude all services-automation because they are used through webdriver
|
||||
"resource://gre/modules/services-automation/",
|
||||
"resource://services-automation/ServicesAutomation.jsm",
|
||||
|
||||
// Bug 1550165 - Exclude localized App/Play store badges. These badges
|
||||
// are displayed in a promo area on the first load of about:logins.
|
||||
"chrome://browser/content/aboutlogins/third-party/app-store/",
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
"env": {
|
||||
"browser": true
|
||||
},
|
||||
"rules": {
|
||||
"no-unused-vars": ["error", {"args": "none", "varsIgnorePattern": "^EXPORTED_SYMBOLS|^Sync"}],
|
||||
}
|
||||
};
|
|
@ -0,0 +1,2 @@
|
|||
# Register resource aliases
|
||||
resource services-automation resource://gre/modules/services-automation/
|
|
@ -0,0 +1,426 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
"use strict";
|
||||
|
||||
/*
|
||||
* This module is used in automation to connect the browser to
|
||||
* a specific FxA account and trigger FX Sync.
|
||||
*
|
||||
* To use it, you can call this sequence:
|
||||
*
|
||||
* initConfig("https://accounts.stage.mozaws.net");
|
||||
* await Authentication.signIn(username, password);
|
||||
* await Sync.triggerSync();
|
||||
* await Authentication.signOut();
|
||||
*
|
||||
*
|
||||
* Where username is your FxA e-mail. it will connect your browser
|
||||
* to that account and trigger a Sync (on stage servers.)
|
||||
*
|
||||
* You can also use the convenience function that does everything:
|
||||
*
|
||||
* await triggerSync(username, password, "https://accounts.stage.mozaws.net");
|
||||
*
|
||||
*/
|
||||
var EXPORTED_SYMBOLS = ["Sync", "Authentication", "initConfig", "triggerSync"];
|
||||
|
||||
const { XPCOMUtils } = ChromeUtils.import(
|
||||
"resource://gre/modules/XPCOMUtils.jsm"
|
||||
);
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetters(this, {
|
||||
Services: "resource://gre/modules/Services.jsm",
|
||||
Log: "resource://gre/modules/Log.jsm",
|
||||
Weave: "resource://services-sync/main.js",
|
||||
Svc: "resource://services-sync/util.js",
|
||||
fxAccounts: "resource://gre/modules/FxAccounts.jsm",
|
||||
FxAccountsClient: "resource://gre/modules/FxAccountsClient.jsm",
|
||||
FxAccountsConfig: "resource://gre/modules/FxAccountsConfig.jsm",
|
||||
OS: "resource://gre/modules/osfile.jsm",
|
||||
setTimeout: "resource://gre/modules/Timer.jsm",
|
||||
clearTimeout: "resource://gre/modules/Timer.jsm",
|
||||
});
|
||||
|
||||
XPCOMUtils.defineLazyGlobalGetters(this, ["fetch"]);
|
||||
|
||||
const AUTOCONFIG_PREF = "identity.fxaccounts.autoconfig.uri";
|
||||
|
||||
/*
|
||||
* Log helpers.
|
||||
*/
|
||||
var _LOG = [];
|
||||
|
||||
function LOG(msg, error) {
|
||||
console.debug(msg);
|
||||
_LOG.push(msg);
|
||||
if (error) {
|
||||
console.debug(JSON.stringify(error));
|
||||
_LOG.push(JSON.stringify(error));
|
||||
}
|
||||
}
|
||||
|
||||
function dumpLogs() {
|
||||
let res = _LOG.join("\n");
|
||||
_LOG = [];
|
||||
return res;
|
||||
}
|
||||
|
||||
function promiseObserver(aEventName) {
|
||||
LOG("wait for " + aEventName);
|
||||
return new Promise(resolve => {
|
||||
let handler = () => {
|
||||
Svc.Obs.remove(aEventName, handler);
|
||||
resolve();
|
||||
};
|
||||
let handlerTimeout = () => {
|
||||
Svc.Obs.remove(aEventName, handler);
|
||||
LOG("handler timed out " + aEventName);
|
||||
resolve();
|
||||
};
|
||||
Svc.Obs.add(aEventName, handler);
|
||||
setTimeout(handlerTimeout, 3000);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Authentication
|
||||
*
|
||||
* Used to sign in an FxA account, takes care of
|
||||
* the e-mail verification flow.
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
* await Authentication.signIn(username, password);
|
||||
*/
|
||||
var Authentication = {
|
||||
async isLoggedIn() {
|
||||
return !!(await this.getSignedInUser());
|
||||
},
|
||||
|
||||
async isReady() {
|
||||
let user = await this.getSignedInUser();
|
||||
if (user) {
|
||||
LOG("current user " + JSON.stringify(user));
|
||||
}
|
||||
return user && user.verified;
|
||||
},
|
||||
|
||||
async getSignedInUser() {
|
||||
try {
|
||||
return await fxAccounts.getSignedInUser();
|
||||
} catch (error) {
|
||||
LOG("getSignedInUser() failed", error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async shortWaitForVerification(ms) {
|
||||
LOG("shortWaitForVerification");
|
||||
let userData = await this.getSignedInUser();
|
||||
let timeoutID;
|
||||
LOG("set a timeout");
|
||||
let timeoutPromise = new Promise(resolve => {
|
||||
timeoutID = setTimeout(() => {
|
||||
LOG(`Warning: no verification after ${ms}ms.`);
|
||||
resolve();
|
||||
}, ms);
|
||||
});
|
||||
LOG("set a fxAccounts.whenVerified");
|
||||
await Promise.race([
|
||||
fxAccounts.whenVerified(userData).finally(() => clearTimeout(timeoutID)),
|
||||
timeoutPromise,
|
||||
]);
|
||||
LOG("done");
|
||||
return this.isReady();
|
||||
},
|
||||
|
||||
async _confirmUser(uri) {
|
||||
LOG("Open new tab and load verification page");
|
||||
let mainWindow = Services.wm.getMostRecentWindow("navigator:browser");
|
||||
let newtab = mainWindow.gBrowser.addWebTab(uri);
|
||||
let win = mainWindow.gBrowser.getBrowserForTab(newtab);
|
||||
win.addEventListener("load", function(e) {
|
||||
LOG("load");
|
||||
});
|
||||
|
||||
win.addEventListener("loadstart", function(e) {
|
||||
LOG("loadstart");
|
||||
});
|
||||
|
||||
win.addEventListener("error", function(msg, url, lineNo, columnNo, error) {
|
||||
var string = msg.toLowerCase();
|
||||
var substring = "script error";
|
||||
if (string.indexOf(substring) > -1) {
|
||||
LOG("Script Error: See Browser Console for Detail");
|
||||
} else {
|
||||
var message = [
|
||||
"Message: " + msg,
|
||||
"URL: " + url,
|
||||
"Line: " + lineNo,
|
||||
"Column: " + columnNo,
|
||||
"Error object: " + JSON.stringify(error),
|
||||
].join(" - ");
|
||||
|
||||
LOG(message);
|
||||
}
|
||||
});
|
||||
|
||||
LOG("wait for page to load");
|
||||
await new Promise(resolve => {
|
||||
let handlerTimeout = () => {
|
||||
LOG("timed out ");
|
||||
resolve();
|
||||
};
|
||||
var timer = setTimeout(handlerTimeout, 10000);
|
||||
win.addEventListener("loadend", function() {
|
||||
resolve();
|
||||
clearTimeout(timer);
|
||||
});
|
||||
});
|
||||
LOG("Page Loaded");
|
||||
let didVerify = await this.shortWaitForVerification(10000);
|
||||
LOG("remove tab");
|
||||
mainWindow.gBrowser.removeTab(newtab);
|
||||
return didVerify;
|
||||
},
|
||||
|
||||
/*
|
||||
* This whole verification process may be bypassed if the
|
||||
* account is whitelisted.
|
||||
*/
|
||||
async _completeVerification(username) {
|
||||
LOG("Fetching mail (from restmail) for user " + username);
|
||||
let restmailURI = `https://www.restmail.net/mail/${encodeURIComponent(
|
||||
username
|
||||
)}`;
|
||||
let triedAlready = new Set();
|
||||
const tries = 10;
|
||||
const normalWait = 4000;
|
||||
for (let i = 0; i < tries; ++i) {
|
||||
let resp = await fetch(restmailURI);
|
||||
let messages = await resp.json();
|
||||
// Sort so that the most recent emails are first.
|
||||
messages.sort((a, b) => new Date(b.receivedAt) - new Date(a.receivedAt));
|
||||
for (let m of messages) {
|
||||
// We look for a link that has a x-link that we haven't yet tried.
|
||||
if (!m.headers["x-link"] || triedAlready.has(m.headers["x-link"])) {
|
||||
continue;
|
||||
}
|
||||
if (!m.headers["x-verify-code"]) {
|
||||
continue;
|
||||
}
|
||||
let confirmLink = m.headers["x-link"];
|
||||
triedAlready.add(confirmLink);
|
||||
LOG("Trying confirmation link " + confirmLink);
|
||||
try {
|
||||
if (await this._confirmUser(confirmLink)) {
|
||||
LOG("confirmation done");
|
||||
return true;
|
||||
}
|
||||
LOG("confirmation failed");
|
||||
} catch (e) {
|
||||
LOG(
|
||||
"Warning: Failed to follow confirmation link: " +
|
||||
Log.exceptionStr(e)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (i === 0) {
|
||||
// first time through after failing we'll do this.
|
||||
LOG("resendVerificationEmail");
|
||||
await fxAccounts.resendVerificationEmail();
|
||||
}
|
||||
if (await this.shortWaitForVerification(normalWait)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// One last try.
|
||||
return this.shortWaitForVerification(normalWait);
|
||||
},
|
||||
|
||||
async signIn(username, password) {
|
||||
LOG("Login user: " + username);
|
||||
try {
|
||||
// Required here since we don't go through the real login page
|
||||
LOG("Calling FxAccountsConfig.ensureConfigured");
|
||||
await FxAccountsConfig.ensureConfigured();
|
||||
let client = new FxAccountsClient();
|
||||
LOG("Signing in");
|
||||
let credentials = await client.signIn(username, password, true);
|
||||
LOG("Signed in, setting up the signed user in fxAccounts");
|
||||
await fxAccounts._internal.setSignedInUser(credentials);
|
||||
|
||||
// If the account is not whitelisted for tests, we need to verify it
|
||||
if (!credentials.verified) {
|
||||
LOG("We need to verify the account");
|
||||
await this._completeVerification(username);
|
||||
} else {
|
||||
LOG("Credentials already verified");
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
LOG("signIn() failed", error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async signOut() {
|
||||
if (await Authentication.isLoggedIn()) {
|
||||
// Note: This will clean up the device ID.
|
||||
await fxAccounts.signOut();
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
/*
|
||||
* Sync
|
||||
*
|
||||
* Used to trigger sync.
|
||||
*
|
||||
* usage:
|
||||
*
|
||||
* await Sync.triggerSync();
|
||||
*/
|
||||
var Sync = {
|
||||
getSyncLogsDirectory() {
|
||||
return OS.Path.join(OS.Constants.Path.profileDir, ...["weave", "logs"]);
|
||||
},
|
||||
|
||||
async init() {
|
||||
Svc.Obs.add("weave:service:sync:error", this);
|
||||
Svc.Obs.add("weave:service:setup-complete", this);
|
||||
Svc.Obs.add("weave:service:tracking-started", this);
|
||||
// Delay the automatic sync operations, so we can trigger it manually
|
||||
Weave.Svc.Prefs.set("scheduler.immediateInterval", 7200);
|
||||
Weave.Svc.Prefs.set("scheduler.idleInterval", 7200);
|
||||
Weave.Svc.Prefs.set("scheduler.activeInterval", 7200);
|
||||
Weave.Svc.Prefs.set("syncThreshold", 10000000);
|
||||
// Wipe all the logs
|
||||
await this.wipeLogs();
|
||||
},
|
||||
|
||||
observe(subject, topic, data) {
|
||||
LOG("Event received " + topic);
|
||||
},
|
||||
|
||||
async configureSync() {
|
||||
// todo, enable all sync engines here
|
||||
// the addon engine requires kinto creds...
|
||||
LOG("configuring sync");
|
||||
console.assert(await Authentication.isReady(), "You are not connected");
|
||||
await Weave.Service.configure();
|
||||
if (!Weave.Status.ready) {
|
||||
await promiseObserver("weave:service:ready");
|
||||
}
|
||||
if (Weave.Service.locked) {
|
||||
await promiseObserver("weave:service:resyncs-finished");
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* triggerSync() runs the whole process of Syncing.
|
||||
*
|
||||
* returns 1 on success, 0 on failure.
|
||||
*/
|
||||
async triggerSync() {
|
||||
if (!(await Authentication.isLoggedIn())) {
|
||||
LOG("Not connected");
|
||||
return 1;
|
||||
}
|
||||
await this.init();
|
||||
let result = 1;
|
||||
try {
|
||||
await this.configureSync();
|
||||
LOG("Triggering a sync");
|
||||
await Weave.Service.sync();
|
||||
|
||||
// wait a second for things to settle...
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
LOG("Sync done");
|
||||
result = 0;
|
||||
} catch (error) {
|
||||
LOG("triggerSync() failed", error);
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
async wipeLogs() {
|
||||
let outputDirectory = this.getSyncLogsDirectory();
|
||||
if (!(await OS.File.exists(outputDirectory))) {
|
||||
return;
|
||||
}
|
||||
LOG("Wiping existing Sync logs");
|
||||
try {
|
||||
let iterator = new OS.File.DirectoryIterator(outputDirectory);
|
||||
await iterator.forEach(async entry => {
|
||||
try {
|
||||
await OS.File.remove(entry.path);
|
||||
} catch (error) {
|
||||
LOG("wipeLogs() could not remove " + entry.path, error);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
LOG("wipeLogs() failed", error);
|
||||
}
|
||||
},
|
||||
|
||||
async getLogs() {
|
||||
let outputDirectory = this.getSyncLogsDirectory();
|
||||
let entries = [];
|
||||
|
||||
if (await OS.File.exists(outputDirectory)) {
|
||||
// Iterate through the directory
|
||||
let iterator = new OS.File.DirectoryIterator(outputDirectory);
|
||||
|
||||
await iterator.forEach(async entry => {
|
||||
let info = await OS.File.stat(entry.path);
|
||||
entries.push({
|
||||
path: entry.path,
|
||||
name: entry.name,
|
||||
lastModificationDate: info.lastModificationDate,
|
||||
});
|
||||
});
|
||||
entries.sort(function(a, b) {
|
||||
return b.lastModificationDate - a.lastModificationDate;
|
||||
});
|
||||
}
|
||||
|
||||
const promises = entries.map(async entry => {
|
||||
let content = await OS.File.read(entry.path, {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
return {
|
||||
name: entry.name,
|
||||
content,
|
||||
};
|
||||
});
|
||||
return Promise.all(promises);
|
||||
},
|
||||
};
|
||||
|
||||
function initConfig(autoconfig) {
|
||||
Services.prefs.setCharPref(AUTOCONFIG_PREF, autoconfig);
|
||||
}
|
||||
|
||||
async function triggerSync(username, password, autoconfig) {
|
||||
initConfig(autoconfig);
|
||||
await Authentication.signIn(username, password);
|
||||
var result = await Sync.triggerSync();
|
||||
await Authentication.signOut();
|
||||
var logs = {
|
||||
sync: await Sync.getLogs(),
|
||||
condprof: [
|
||||
{
|
||||
name: "console.txt",
|
||||
content: dumpLogs(),
|
||||
},
|
||||
],
|
||||
};
|
||||
return {
|
||||
result,
|
||||
logs,
|
||||
};
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
with Files('**'):
|
||||
BUG_COMPONENT = ('Firefox', 'Services Automation')
|
||||
|
||||
EXTRA_COMPONENTS += [
|
||||
'AutomationComponents.manifest',
|
||||
]
|
||||
|
||||
EXTRA_JS_MODULES['services-automation'] += [
|
||||
'ServicesAutomation.jsm',
|
||||
]
|
|
@ -13,6 +13,12 @@ DIRS += [
|
|||
'settings',
|
||||
]
|
||||
|
||||
# The automation dir is only included in nightlies and debug
|
||||
if not CONFIG['RELEASE_OR_BETA'] or CONFIG['MOZ_DEBUG']:
|
||||
DIRS += [
|
||||
'automation'
|
||||
]
|
||||
|
||||
if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'android':
|
||||
DIRS += [
|
||||
'fxaccounts',
|
||||
|
|
|
@ -22,8 +22,11 @@ job-defaults:
|
|||
using: run-task
|
||||
checkout: false
|
||||
tooltool-downloads: public
|
||||
scopes:
|
||||
- secrets:get:project/releng/gecko/build/level-{level}/conditioned-profiles
|
||||
worker:
|
||||
max-run-time: 5400
|
||||
taskcluster-proxy: true
|
||||
max-run-time: 10800
|
||||
env:
|
||||
MOZ_FETCHES_DIR: fetches
|
||||
CONDPROF_ARCHIVES: /builds/worker/archive
|
||||
|
|
|
@ -28,7 +28,7 @@ import os
|
|||
from arsenic import get_session
|
||||
from arsenic.browsers import Firefox
|
||||
|
||||
from condprof.util import fresh_profile, LOG, ERROR
|
||||
from condprof.util import fresh_profile, LOG, ERROR, obfuscate_file, obfuscate
|
||||
from condprof.scenarii import scenarii
|
||||
from condprof.client import get_profile, ProfileNotFoundError
|
||||
from condprof.archiver import Archiver
|
||||
|
@ -59,6 +59,16 @@ class ProfileCreator:
|
|||
|
||||
async def run(self, headless=True):
|
||||
LOG("Building %s x %s" % (self.scenario, self.customization_data["name"]))
|
||||
|
||||
if self.scenario in self.customization_data.get("ignore_scenario", []):
|
||||
LOG("Skipping (ignored scenario in that customization)")
|
||||
return
|
||||
|
||||
filter_by_platform = self.customization_data.get("platforms")
|
||||
if filter_by_platform and self.env.target_platform not in filter_by_platform:
|
||||
LOG("Skipping (ignored platform in that customization)")
|
||||
return
|
||||
|
||||
with self.env.get_device(2828, verbose=True) as device:
|
||||
try:
|
||||
with self.env.get_browser():
|
||||
|
@ -83,9 +93,18 @@ class ProfileCreator:
|
|||
LOG("Archive created at %s" % archive_name)
|
||||
statinfo = os.stat(archive_name)
|
||||
LOG("Current size is %d" % statinfo.st_size)
|
||||
LOG("Extracting logs")
|
||||
if "logs" in metadata:
|
||||
logs = metadata.pop("logs")
|
||||
for prefix, prefixed_logs in logs.items():
|
||||
for log in prefixed_logs:
|
||||
content = obfuscate(log["content"])[1]
|
||||
with open(os.path.join(dir, prefix + "-" + log["name"]), "wb") as f:
|
||||
f.write(content.encode("utf-8"))
|
||||
|
||||
if metadata.get("result", 0) != 0:
|
||||
LOG("The scenario returned a bad exit code")
|
||||
raise Exception("scenario error")
|
||||
raise Exception(metadata.get("result_message", "scenario error"))
|
||||
self.changelog.append("update", **metadata)
|
||||
|
||||
async def build_profile(self, device, headless):
|
||||
|
@ -117,7 +136,8 @@ class ProfileCreator:
|
|||
metadata = Metadata(profile)
|
||||
|
||||
LOG("Starting the Gecko app...")
|
||||
self.env.prepare(logfile=self._log_filename("adb"))
|
||||
adb_logs = self._log_filename("adb")
|
||||
self.env.prepare(logfile=adb_logs)
|
||||
geckodriver_logs = self._log_filename("geckodriver")
|
||||
LOG("Writing geckodriver logs in %s" % geckodriver_logs)
|
||||
step = START
|
||||
|
@ -146,7 +166,9 @@ class ProfileCreator:
|
|||
raise
|
||||
finally:
|
||||
self.env.stop_browser()
|
||||
|
||||
for logfile in (adb_logs, geckodriver_logs):
|
||||
if os.path.exists(logfile):
|
||||
obfuscate_file(logfile)
|
||||
self.env.collect_profile()
|
||||
|
||||
# writing metadata
|
||||
|
|
|
@ -12,14 +12,21 @@ def get_customizations():
|
|||
yield os.path.join(HERE, f)
|
||||
|
||||
|
||||
def get_customization(path):
|
||||
if not path.endswith(".json"):
|
||||
path += ".json"
|
||||
if not os.path.exists(path):
|
||||
def find_customization(path_or_name):
|
||||
if not path_or_name.endswith(".json"):
|
||||
path_or_name += ".json"
|
||||
if not os.path.exists(path_or_name):
|
||||
# trying relative
|
||||
rpath = os.path.join(HERE, path)
|
||||
rpath = os.path.join(HERE, path_or_name)
|
||||
if not os.path.exists(rpath):
|
||||
raise IOError("Can't find the customization file %r" % path)
|
||||
path = rpath
|
||||
return None
|
||||
path_or_name = rpath
|
||||
return path_or_name
|
||||
|
||||
|
||||
def get_customization(path_or_name):
|
||||
path = find_customization(path_or_name)
|
||||
if path is None:
|
||||
raise IOError("Can't find the customization file %r" % path_or_name)
|
||||
with open(path) as f:
|
||||
return json.loads(f.read())
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
},
|
||||
"scenario": {
|
||||
"heavy": {"max_urls": 50}
|
||||
}
|
||||
"heavy": {"max_urls": 100}
|
||||
},
|
||||
"ignore_scenario": ["synced"]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"name": "sync",
|
||||
"addons":{},
|
||||
"prefs":{
|
||||
|
||||
},
|
||||
"scenario": {
|
||||
"synced": {"max_urls": 250}
|
||||
},
|
||||
"platforms": ["macosx64", "win32", "win64", "linux64"],
|
||||
"ignore_scenario": ["heavy", "settled"]
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
""" Helpers to build scenarii
|
||||
"""
|
||||
from condprof.util import ERROR
|
||||
from condprof.util import ERROR, LOG
|
||||
|
||||
|
||||
_SUPPORTED_MOBILE_BROWSERS = "fenix", "gecko", "fennec"
|
||||
|
@ -56,3 +56,50 @@ class TabSwitcher:
|
|||
await self.session.switch_to_window(handle)
|
||||
except Exception:
|
||||
ERROR("Could not switch to handle %s" % str(handle))
|
||||
|
||||
|
||||
# 10 minutes
|
||||
_SCRIPT_TIMEOUT = 10 * 60 * 1000
|
||||
|
||||
|
||||
async def execute_async_script(session, script, *args):
|
||||
# switch to the right context if needed
|
||||
current_context = await session._request(url="/moz/context", method="GET")
|
||||
if current_context != "chrome":
|
||||
LOG("Switching to chrome context")
|
||||
await session._request(
|
||||
url="/moz/context", method="POST", data={"context": "chrome"}
|
||||
)
|
||||
switch_back = True
|
||||
else:
|
||||
switch_back = False
|
||||
LOG("Setting up script timeout")
|
||||
await session._request(
|
||||
url="/timeouts", method="POST", data={"script": _SCRIPT_TIMEOUT}
|
||||
)
|
||||
try:
|
||||
attempts = 0
|
||||
while True:
|
||||
LOG("Running triggerSync()")
|
||||
try:
|
||||
return await session._request(
|
||||
url="/execute/async",
|
||||
method="POST",
|
||||
data={"script": script, "args": list(args)},
|
||||
)
|
||||
except Exception as e:
|
||||
attempts += 1
|
||||
LOG("The script failed.")
|
||||
LOG("Error:" + str(e))
|
||||
if attempts > 2:
|
||||
return {
|
||||
"result": 1,
|
||||
"result_message": str(e),
|
||||
"result_exc": e,
|
||||
"logs": {},
|
||||
}
|
||||
finally:
|
||||
if switch_back:
|
||||
await session._request(
|
||||
url="/moz/context", method="POST", data={"context": current_context}
|
||||
)
|
||||
|
|
|
@ -29,7 +29,7 @@ from condprof.util import (
|
|||
get_current_platform,
|
||||
extract_from_dmg,
|
||||
) # NOQA
|
||||
from condprof.customization import get_customizations # NOQA
|
||||
from condprof.customization import get_customizations, find_customization # NOQA
|
||||
from condprof.client import read_changelog, ProfileNotFoundError # NOQA
|
||||
|
||||
|
||||
|
@ -44,12 +44,6 @@ def main(args=sys.argv[1:]):
|
|||
parser.add_argument(
|
||||
"--customization", help="Profile customization to use", type=str, default="all"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--fresh-profile",
|
||||
help="Create a fresh profile",
|
||||
action="store_true",
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--visible", help="Don't use headless mode", action="store_true", default=False
|
||||
)
|
||||
|
@ -90,6 +84,14 @@ def main(args=sys.argv[1:]):
|
|||
|
||||
args.android = args.firefox is not None and args.firefox.startswith("org.mozilla")
|
||||
|
||||
# early checks to avoid extra work
|
||||
if args.customization != "all":
|
||||
if find_customization(args.customization) is None:
|
||||
raise IOError("Cannot find customization %r" % args.customization)
|
||||
|
||||
if args.scenario != "all" and args.scenario not in scenarii:
|
||||
raise IOError("Cannot find scenario %r" % args.scenario)
|
||||
|
||||
if not args.android and args.firefox is not None:
|
||||
LOG("Verifying Desktop Firefox binary")
|
||||
# we want to verify we do have a firefox binary
|
||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import absolute_import
|
|||
|
||||
from condprof.scenarii.heavy import heavy
|
||||
from condprof.scenarii.settled import settled
|
||||
from condprof.scenarii.synced import synced
|
||||
|
||||
|
||||
scenarii = {"heavy": heavy, "settled": settled}
|
||||
scenarii = {"heavy": heavy, "settled": settled, "synced": synced}
|
||||
|
|
|
@ -2,7 +2,7 @@ import random
|
|||
import os
|
||||
import asyncio
|
||||
|
||||
from condprof.util import get_logger
|
||||
from condprof.util import LOG
|
||||
from condprof.helpers import TabSwitcher
|
||||
|
||||
|
||||
|
@ -47,7 +47,7 @@ async def heavy(session, options):
|
|||
visited = 0
|
||||
|
||||
for current, url in enumerate(URL_LIST):
|
||||
get_logger().visit_url(index=current + 1, total=max, url=url)
|
||||
LOG("%d/%d %s" % (current + 1, max, url))
|
||||
retries = 0
|
||||
while retries < 3:
|
||||
try:
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const { triggerSync } = ChromeUtils.import(
|
||||
"resource://gre/modules/services-automation/ServicesAutomation.jsm"
|
||||
);
|
||||
|
||||
let resolve = arguments[3]; // eslint-disable-line
|
||||
try {
|
||||
triggerSync(arguments[0], arguments[1], arguments[2]).then(res => { // eslint-disable-line
|
||||
resolve(res); // eslint-disable-line
|
||||
});
|
||||
} catch (error) {
|
||||
let res = { logs: {}, result: 1, result_message: error.toString() };
|
||||
resolve(res);
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
import os
|
||||
import asyncio
|
||||
|
||||
from condprof.scenarii.heavy import URL_LIST
|
||||
from condprof.util import LOG, get_credentials
|
||||
from condprof.helpers import TabSwitcher, execute_async_script
|
||||
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), "sync.js")) as f:
|
||||
SYNC_SCRIPT = f.read()
|
||||
|
||||
|
||||
async def synced(session, options):
|
||||
username, password = get_credentials()
|
||||
if username is None:
|
||||
raise ValueError("The synced scenario needs an fxa username and" " password")
|
||||
metadata = {}
|
||||
max = options.get("max_urls", 150)
|
||||
tabs = TabSwitcher(session, options)
|
||||
await tabs.create_windows()
|
||||
visited = 0
|
||||
|
||||
for current, url in enumerate(URL_LIST):
|
||||
LOG("%d/%d %s" % (current + 1, max, url))
|
||||
retries = 0
|
||||
while retries < 3:
|
||||
try:
|
||||
await asyncio.wait_for(session.get(url), 5)
|
||||
visited += 1
|
||||
break
|
||||
except asyncio.TimeoutError:
|
||||
retries += 1
|
||||
|
||||
if max != -1 and current + 1 == max:
|
||||
break
|
||||
|
||||
# switch to the next tab
|
||||
await tabs.switch()
|
||||
|
||||
# now that we've visited all pages, we want to upload to FXSync
|
||||
LOG("Syncing profile to FxSync")
|
||||
LOG("Username is %s, password is %s" % (username, password))
|
||||
script_res = await execute_async_script(
|
||||
session, SYNC_SCRIPT, username, password, "https://accounts.stage.mozaws.net"
|
||||
)
|
||||
if script_res is None:
|
||||
script_res = {}
|
||||
metadata["logs"] = script_res.get("logs", {})
|
||||
metadata["result"] = script_res.get("result", 0)
|
||||
metadata["result_message"] = script_res.get("result_message", "SUCCESS")
|
||||
metadata["visited_url"] = visited
|
||||
return metadata
|
|
@ -4,12 +4,17 @@ import tempfile
|
|||
import shutil
|
||||
import responses
|
||||
import re
|
||||
import json
|
||||
|
||||
from condprof.client import get_profile, ROOT_URL
|
||||
from condprof.util import _DEFAULT_SERVER
|
||||
|
||||
|
||||
PROFILE = re.compile(ROOT_URL + "/.*/.*tgz")
|
||||
with open(os.path.join(os.path.dirname(__file__), "profile.tgz"), "rb") as f:
|
||||
PROFILE_DATA = f.read()
|
||||
SECRETS = re.compile(_DEFAULT_SERVER + "/.*")
|
||||
SECRETS_PROXY = re.compile("http://taskcluster/secrets/.*")
|
||||
|
||||
|
||||
class TestClient(unittest.TestCase):
|
||||
|
@ -35,6 +40,17 @@ class TestClient(unittest.TestCase):
|
|||
status=200,
|
||||
)
|
||||
|
||||
secret = {"secret": {"username": "user", "password": "pass"}}
|
||||
secret = json.dumps(secret)
|
||||
for pattern in (SECRETS, SECRETS_PROXY):
|
||||
responses.add(
|
||||
responses.GET,
|
||||
pattern,
|
||||
body=secret,
|
||||
headers={"content-length": str(len(secret))},
|
||||
status=200,
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.target)
|
||||
shutil.rmtree(self.download_dir)
|
||||
|
@ -52,8 +68,9 @@ class TestClient(unittest.TestCase):
|
|||
# grabbing a profile should generate two files
|
||||
self.assertEqual(len(os.listdir(download_dir)), num_elmts + 2)
|
||||
|
||||
# we do two network calls when getting a file, a HEAD and a GET
|
||||
self.assertEqual(len(responses.calls), 2)
|
||||
# we do at least two network calls when getting a file,
|
||||
# a HEAD and a GET and possibly a TC secret
|
||||
self.assertTrue(len(responses.calls) >= 2)
|
||||
|
||||
# reseting the response counters
|
||||
responses.calls.reset()
|
||||
|
@ -64,7 +81,8 @@ class TestClient(unittest.TestCase):
|
|||
# grabbing a profile should not download new stuff
|
||||
self.assertEqual(len(os.listdir(download_dir)), num_elmts + 2)
|
||||
|
||||
# and do a single extra HEAD call
|
||||
# and do a single extra HEAD call, everything else is cached,
|
||||
# even the TC secret
|
||||
self.assertEqual(len(responses.calls), 1)
|
||||
|
||||
|
||||
|
|
|
@ -86,7 +86,23 @@ class TestRunner(unittest.TestCase):
|
|||
|
||||
@responses.activate
|
||||
def test_runner(self):
|
||||
args = ["--geckodriver", GECKODRIVER, "--firefox", FIREFOX, self.archive_dir]
|
||||
main(args)
|
||||
# XXX we want a bunch of assertions here to check
|
||||
# that the archives dir gets filled correctly
|
||||
if "FXA_USERNAME" not in os.environ:
|
||||
os.environ["FXA_USERNAME"] = "me"
|
||||
if "FXA_PASSWORD" not in os.environ:
|
||||
os.environ["FXA_PASSWORD"] = "password"
|
||||
try:
|
||||
args = [
|
||||
"--geckodriver",
|
||||
GECKODRIVER,
|
||||
"--firefox",
|
||||
FIREFOX,
|
||||
self.archive_dir,
|
||||
]
|
||||
main(args)
|
||||
# XXX we want a bunch of assertions here to check
|
||||
# that the archives dir gets filled correctly
|
||||
finally:
|
||||
if os.environ["FXA_USERNAME"] == "me":
|
||||
del os.environ["FXA_USERNAME"]
|
||||
if os.environ["FXA_PASSWORD"] == "password":
|
||||
del os.environ["FXA_PASSWORD"]
|
||||
|
|
|
@ -21,6 +21,7 @@ from datetime import datetime
|
|||
|
||||
import requests
|
||||
from requests.exceptions import ConnectionError
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
from condprof import progress
|
||||
|
||||
|
@ -70,7 +71,7 @@ class NullLogger:
|
|||
traceback.print_exc(file=sys.stdout)
|
||||
|
||||
|
||||
def get_logger():
|
||||
def _get_logger():
|
||||
global _LOGGER
|
||||
if _LOGGER is not None:
|
||||
return _LOGGER
|
||||
|
@ -95,12 +96,12 @@ def get_logger():
|
|||
|
||||
def LOG(msg):
|
||||
msg = "[%s] %s" % (datetime.now().isoformat(), msg)
|
||||
get_logger().msg(msg)
|
||||
_get_logger().msg(obfuscate(msg)[1])
|
||||
|
||||
|
||||
def ERROR(msg):
|
||||
msg = "[%s] %s" % (datetime.now().isoformat(), msg)
|
||||
get_logger().error(msg)
|
||||
_get_logger().error(obfuscate(msg)[1])
|
||||
|
||||
|
||||
def fresh_profile(profile, customization_data):
|
||||
|
@ -111,12 +112,15 @@ def fresh_profile(profile, customization_data):
|
|||
new_profile = create_profile(app="firefox")
|
||||
prefs = customization_data["prefs"]
|
||||
prefs.update(DEFAULT_PREFS)
|
||||
LOG("Setting prefs %s" % str(prefs.items()))
|
||||
new_profile.set_preferences(prefs)
|
||||
extensions = []
|
||||
for name, url in customization_data["addons"].items():
|
||||
LOG("Downloading addon %s" % name)
|
||||
extension = download_file(url)
|
||||
extensions.append(extension)
|
||||
LOG("Installing addons")
|
||||
new_profile.addons.install(extensions, unpack=True)
|
||||
new_profile.addons.install(extensions)
|
||||
shutil.copytree(new_profile.profile, profile)
|
||||
return profile
|
||||
|
@ -279,12 +283,12 @@ def latest_nightly(binary=None):
|
|||
|
||||
|
||||
def write_yml_file(yml_file, yml_data):
|
||||
get_logger().info("writing %s to %s" % (yml_data, yml_file))
|
||||
LOG("writing %s to %s" % (yml_data, yml_file))
|
||||
try:
|
||||
with open(yml_file, "w") as outfile:
|
||||
yaml.dump(yml_data, outfile, default_flow_style=False)
|
||||
except Exception as e:
|
||||
get_logger().critical("failed to write yaml file, exeption: %s" % e)
|
||||
ERROR("failed to write yaml file, exeption: %s" % e)
|
||||
|
||||
|
||||
def get_version(firefox):
|
||||
|
@ -357,3 +361,70 @@ class BaseEnv:
|
|||
|
||||
def stop_browser(self):
|
||||
pass
|
||||
|
||||
|
||||
_URL = (
|
||||
"{0}/secrets/v1/secret/project"
|
||||
"{1}releng{1}gecko{1}build{1}level-{2}{1}conditioned-profiles"
|
||||
)
|
||||
_DEFAULT_SERVER = "https://firefox-ci-tc.services.mozilla.com"
|
||||
|
||||
|
||||
def get_tc_secret():
|
||||
if "TASK_ID" not in os.environ:
|
||||
raise OSError("Not running in Taskcluster")
|
||||
session = requests.Session()
|
||||
retry = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504])
|
||||
http_adapter = requests.adapters.HTTPAdapter(max_retries=retry)
|
||||
session.mount("https://", http_adapter)
|
||||
session.mount("http://", http_adapter)
|
||||
secrets_url = _URL.format(
|
||||
os.environ.get("TASKCLUSTER_PROXY_URL", _DEFAULT_SERVER),
|
||||
"%2F",
|
||||
os.environ.get("MOZ_SCM_LEVEL", "1"),
|
||||
)
|
||||
res = session.get(secrets_url)
|
||||
res.raise_for_status()
|
||||
return res.json()["secret"]
|
||||
|
||||
|
||||
_CACHED = {}
|
||||
|
||||
|
||||
def obfuscate(text):
|
||||
username, password = get_credentials()
|
||||
if username is None:
|
||||
return False, text
|
||||
if username not in text and password not in text:
|
||||
return False, text
|
||||
text = text.replace(password, "<PASSWORD>")
|
||||
text = text.replace(username, "<USERNAME>")
|
||||
return True, text
|
||||
|
||||
|
||||
def obfuscate_file(path):
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
hit, data = obfuscate(data)
|
||||
if not hit:
|
||||
return
|
||||
with open(path, "w") as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def get_credentials():
|
||||
if "creds" in _CACHED:
|
||||
return _CACHED["creds"]
|
||||
password = os.environ.get("FXA_PASSWORD")
|
||||
username = os.environ.get("FXA_USERNAME")
|
||||
if username is None or password is None:
|
||||
if "TASK_ID" not in os.environ:
|
||||
return None, None
|
||||
try:
|
||||
secret = get_tc_secret()
|
||||
except Exception:
|
||||
return None, None
|
||||
password = secret["password"]
|
||||
username = secret["username"]
|
||||
_CACHED["creds"] = username, password
|
||||
return username, password
|
||||
|
|
Загрузка…
Ссылка в новой задаче