refactor(packages): use workspace references

This commit is contained in:
Danny Coates 2020-05-14 17:48:39 -07:00
Родитель d237266cf8
Коммит 81575019a9
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4C442633C62E00CB
195 изменённых файлов: 169799 добавлений и 264839 удалений

Просмотреть файл

@ -22,8 +22,8 @@ package directories.
Packages are tested with `./test-package.sh`. The default action is to run:
```sh
npm ci
npm test
yarn install
yarn test
```
Packages may define a `scripts/test-ci.sh` as a custom test script.

Просмотреть файл

@ -5,12 +5,14 @@ DIR=$(dirname "$0")
cd "$DIR/.."
# npm install just enough to run these scripts
npm ci --ignore-scripts --no-optional --only=prod
npm i --ignore-scripts --no-optional --only=prod --no-package-lock
node .circleci/modules-to-test.js | tee packages/test.list
./.circleci/assert-branch.sh
./_scripts/create-version-json.sh
sudo apt-get install -y graphicsmagick
# only run a full npm install if required
if [[ "$MODULE" == "all" ]] || grep -e "$MODULE" -e 'all' packages/test.list > /dev/null; then
npm ci
if [[ "$MODULE" == "all" ]]; then
yarn install --immutable
fi

Просмотреть файл

@ -23,7 +23,7 @@ commands:
parameters:
package:
type: string
default: all
default: none
steps:
- checkout
- run: ./.circleci/base-install.sh << parameters.package >>
@ -37,7 +37,7 @@ commands:
default: 6
steps:
- base-install:
package: fxa-content-server
package: all
- run:
name: Running test section << parameters.index >> of << parameters.total >>
environment:
@ -54,7 +54,7 @@ commands:
jobs:
test-package:
docker:
- image: mozilla/fxa-circleci
- image: circleci/node:12
- image: redis
- image: memcached
- image: pafortin/goaws
@ -202,6 +202,7 @@ jobs:
steps:
- setup_remote_docker:
version: 18.09.3
- checkout
- run:
name: Build & deploy fxa-circleci
command: ./.circleci/build-and-deploy-fxa-circleci.sh

Просмотреть файл

@ -12,7 +12,7 @@ if grep -e "$MODULE" -e 'all' "$DIR/../packages/test.list" > /dev/null; then
time ./scripts/test-ci.sh
else
# default action
time (npm ci && npm test)
time (yarn workspaces focus "$MODULE" && yarn test)
fi
else
echo -e "\n###################################"

Просмотреть файл

@ -1,6 +1,7 @@
.circleci
.vscode
.git
.DS_Store
**/coverage
**/docs
**/node_modules
@ -9,12 +10,9 @@
**/.nyc_output
**/fxa-content-server-l10n
packages/*/build
packages/*/static/bower_components
packages/fxa-auth-server/.mail_output
!packages/fxa-auth-server/docs/pushpayloads.schema.json
packages/fxa-amplitude-send
packages/fxa-dev-launcher
packages/fxa-email-event-proxy
packages/fxa-email-service
packages/fxa-content-server/dist
packages/fxa-geodb/db

15
.gitignore поставляемый
Просмотреть файл

@ -34,7 +34,6 @@ storybooks-publish
# Dependencies
**/node_modules
**/browser_modules
**/bower_components
# Logging
*.log*
@ -52,6 +51,11 @@ coverage.html
.DS_Store
Thumbs.db
# Yarn
.yarn/cache
.yarn/build-state.yml
.yarn/install-state.gz
## Package-specific ##
# circleci
@ -61,11 +65,6 @@ Thumbs.db
packages/browserid-verifier/loadtest/venv
packages/browserid-verifier/loadtest/*.pyc
# fxa-amplitude-send
packages/fxa-amplitude-send/data.stage
packages/fxa-amplitude-send/lambda.zip
packages/fxa-amplitude-send/*.parquet
# fxa-auth-db-mysql
packages/fxa-auth-db-mysql/config/dev.js
packages/fxa-auth-db-mysql/sandbox
@ -112,9 +111,7 @@ packages/fxa-email-service/**/*.rs.bk
packages/fxa-email-service/config/local.*
packages/fxa-email-service/fxa-auth-db-mysql
packages/fxa-email-service/target
# fxa-geodb
packages/fxa-geodb/db
packages/fxa-email-service/.sourcehash
# fxa-js-client
packages/fxa-js-client/components

1
.vscode/extensions.json поставляемый
Просмотреть файл

@ -1,6 +1,5 @@
{
"recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin",
"rbbit.typescript-hero",
"waderyan.nodejs-extension-pack",
"esbenp.prettier-vscode",

1
.vscode/settings.json поставляемый
Просмотреть файл

@ -13,7 +13,6 @@
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"editor.codeActionsOnSave": {
"source.fixAll.tslint": true
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"

3351
.yarn/plugins/@yarnpkg/plugin-typescript.js поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

985
.yarn/plugins/@yarnpkg/plugin-workspace-tools.js поставляемый Normal file
Просмотреть файл

@ -0,0 +1,985 @@
/* eslint-disable*/
module.exports = {
name: "@yarnpkg/plugin-workspace-tools",
factory: function(require) {
var plugin = /******/ (function(modules) {
// webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {}; // The require function
/******/
/******/ /******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if (installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/
} // Create a new module (and put it into the cache)
/******/ /******/ var module = (installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/
}); // Execute the module function
/******/
/******/ /******/ modules[moduleId].call(
module.exports,
module,
module.exports,
__webpack_require__
); // Flag the module as loaded
/******/
/******/ /******/ module.l = true; // Return the exports of the module
/******/
/******/ /******/ return module.exports;
/******/
} // expose the modules object (__webpack_modules__)
/******/
/******/
/******/ /******/ __webpack_require__.m = modules; // expose the module cache
/******/
/******/ /******/ __webpack_require__.c = installedModules; // define getter function for harmony exports
/******/
/******/ /******/ __webpack_require__.d = function(
exports,
name,
getter
) {
/******/ if (!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
enumerable: true,
get: getter
});
/******/
}
/******/
}; // define __esModule on exports
/******/
/******/ /******/ __webpack_require__.r = function(exports) {
/******/ if (typeof Symbol !== "undefined" && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, {
value: "Module"
});
/******/
}
/******/ Object.defineProperty(exports, "__esModule", { value: true });
/******/
}; // create a fake namespace object // mode & 1: value is a module id, require it // mode & 2: merge all properties of value into the ns // mode & 4: return value when already ns object // mode & 8|1: behave like require
/******/
/******/ /******/ /******/ /******/ /******/ /******/ __webpack_require__.t = function(
value,
mode
) {
/******/ if (mode & 1) value = __webpack_require__(value);
/******/ if (mode & 8) return value;
/******/ if (
mode & 4 &&
typeof value === "object" &&
value &&
value.__esModule
)
return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, "default", {
enumerable: true,
value: value
});
/******/ if (mode & 2 && typeof value != "string")
for (var key in value)
__webpack_require__.d(
ns,
key,
function(key) {
return value[key];
}.bind(null, key)
);
/******/ return ns;
/******/
}; // getDefaultExport function for compatibility with non-harmony modules
/******/
/******/ /******/ __webpack_require__.n = function(module) {
/******/ var getter =
module && module.__esModule
? /******/ function getDefault() {
return module["default"];
}
: /******/ function getModuleExports() {
return module;
};
/******/ __webpack_require__.d(getter, "a", getter);
/******/ return getter;
/******/
}; // Object.prototype.hasOwnProperty.call
/******/
/******/ /******/ __webpack_require__.o = function(object, property) {
return Object.prototype.hasOwnProperty.call(object, property);
}; // __webpack_public_path__
/******/
/******/ /******/ __webpack_require__.p = ""; // Load entry module and return exports
/******/
/******/
/******/ /******/ return __webpack_require__((__webpack_require__.s = 0));
/******/
})(
/************************************************************************/
/******/ [
/* 0 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var __importDefault =
(this && this.__importDefault) ||
function(mod) {
return mod && mod.__esModule
? mod
: {
default: mod
};
};
Object.defineProperty(exports, "__esModule", {
value: true
});
const focus_1 = __importDefault(__webpack_require__(1));
const foreach_1 = __importDefault(__webpack_require__(5));
const plugin = {
commands: [focus_1.default, foreach_1.default]
}; // eslint-disable-next-line arca/no-default-export
exports.default = plugin;
/***/
},
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var __decorate =
(this && this.__decorate) ||
function(decorators, target, key, desc) {
var c = arguments.length,
r =
c < 3
? target
: desc === null
? (desc = Object.getOwnPropertyDescriptor(target, key))
: desc,
d;
if (
typeof Reflect === "object" &&
typeof Reflect.decorate === "function"
)
r = Reflect.decorate(decorators, target, key, desc);
else
for (var i = decorators.length - 1; i >= 0; i--)
if ((d = decorators[i]))
r =
(c < 3
? d(r)
: c > 3
? d(target, key, r)
: d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", {
value: true
});
const cli_1 = __webpack_require__(2);
const core_1 = __webpack_require__(3);
const core_2 = __webpack_require__(3);
const clipanion_1 = __webpack_require__(4); // eslint-disable-next-line arca/no-default-export
class WorkspacesFocus extends cli_1.BaseCommand {
constructor() {
super(...arguments);
this.workspaces = [];
this.json = false;
this.production = false;
}
async execute() {
const configuration = await core_1.Configuration.find(
this.context.cwd,
this.context.plugins
);
const { project, workspace } = await core_1.Project.find(
configuration,
this.context.cwd
);
const cache = await core_1.Cache.find(configuration);
let requiredWorkspaces;
if (this.workspaces.length === 0) {
if (!workspace)
throw new cli_1.WorkspaceRequiredError(
project.cwd,
this.context.cwd
);
requiredWorkspaces = new Set([workspace]);
} else {
requiredWorkspaces = new Set(
this.workspaces.map(name => {
return project.getWorkspaceByIdent(
core_2.structUtils.parseIdent(name)
);
})
);
} // First we compute the dependency chain to see what workspaces are
// dependencies of the one we're trying to focus on.
//
// Note: remember that new elements can be added in a set even while
// iterating over it (because they're added at the end)
for (const workspace of requiredWorkspaces) {
for (const dependencyType of core_1.Manifest.hardDependencies) {
for (const descriptor of workspace.manifest
.getForScope(dependencyType)
.values()) {
const matchingWorkspace = project.tryWorkspaceByDescriptor(
descriptor
);
if (matchingWorkspace === null) continue;
requiredWorkspaces.add(matchingWorkspace);
}
}
} // Then we go over each workspace that didn't get selected, and remove all
// their dependencies.
for (const workspace of project.workspaces) {
if (requiredWorkspaces.has(workspace)) {
if (this.production) {
workspace.manifest.devDependencies.clear();
}
} else {
workspace.manifest.dependencies.clear();
workspace.manifest.devDependencies.clear();
workspace.manifest.peerDependencies.clear();
}
} // And finally we can run the install, but we have to make sure we don't
// persist the project state on the disk (otherwise all workspaces would
// lose their dependencies!).
const report = await core_1.StreamReport.start(
{
configuration,
json: this.json,
stdout: this.context.stdout,
includeLogs: true
},
async report => {
await project.install({
cache,
report,
persistProject: false
}); // Virtual package references may have changed so persist just the install state.
await project.persistInstallStateFile();
}
);
return report.exitCode();
}
}
WorkspacesFocus.usage = clipanion_1.Command.Usage({
category: `Workspace-related commands`,
description: `install a single workspace and its dependencies`,
details: `
This command will run an install as if the specified workspaces (and all other workspaces they depend on) were the only ones in the project. If no workspaces are explicitly listed, the active one will be assumed.
Note that this command is only very moderately useful when using zero-installs, since the cache will contain all the packages anyway - meaning that the only difference between a full install and a focused install would just be a few extra lines in the \`.pnp.js\` file, at the cost of introducing an extra complexity.
If the \`--production\` flag is set, only regular dependencies will be installed, and dev dependencies will be omitted.
If the \`--json\` flag is set the output will follow a JSON-stream output also known as NDJSON (https://github.com/ndjson/ndjson-spec).
`
});
__decorate(
[clipanion_1.Command.Rest()],
WorkspacesFocus.prototype,
"workspaces",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`--json`)],
WorkspacesFocus.prototype,
"json",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`--production`)],
WorkspacesFocus.prototype,
"production",
void 0
);
__decorate(
[clipanion_1.Command.Path(`workspaces`, `focus`)],
WorkspacesFocus.prototype,
"execute",
null
);
exports.default = WorkspacesFocus;
/***/
},
/* 2 */
/***/ function(module, exports) {
module.exports = require("@yarnpkg/cli");
/***/
},
/* 3 */
/***/ function(module, exports) {
module.exports = require("@yarnpkg/core");
/***/
},
/* 4 */
/***/ function(module, exports) {
module.exports = require("clipanion");
/***/
},
/* 5 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var __decorate =
(this && this.__decorate) ||
function(decorators, target, key, desc) {
var c = arguments.length,
r =
c < 3
? target
: desc === null
? (desc = Object.getOwnPropertyDescriptor(target, key))
: desc,
d;
if (
typeof Reflect === "object" &&
typeof Reflect.decorate === "function"
)
r = Reflect.decorate(decorators, target, key, desc);
else
for (var i = decorators.length - 1; i >= 0; i--)
if ((d = decorators[i]))
r =
(c < 3
? d(r)
: c > 3
? d(target, key, r)
: d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __importDefault =
(this && this.__importDefault) ||
function(mod) {
return mod && mod.__esModule
? mod
: {
default: mod
};
};
var __importStar =
(this && this.__importStar) ||
function(mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null)
for (var k in mod)
if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", {
value: true
});
const cli_1 = __webpack_require__(2);
const core_1 = __webpack_require__(3);
const core_2 = __webpack_require__(3);
const core_3 = __webpack_require__(3);
const clipanion_1 = __webpack_require__(4);
const os_1 = __webpack_require__(6);
const p_limit_1 = __importDefault(__webpack_require__(7));
const yup = __importStar(__webpack_require__(9));
/**
* Retrieves all the child workspaces of a given root workspace recursively
*
* @param rootWorkspace root workspace
* @param project project
*
* @returns all the child workspaces
*/
const getWorkspaceChildrenRecursive = (rootWorkspace, project) => {
const workspaceList = [];
for (const childWorkspaceCwd of rootWorkspace.workspacesCwds) {
const childWorkspace = project.workspacesByCwd.get(
childWorkspaceCwd
);
if (childWorkspace) {
workspaceList.push(
childWorkspace,
...getWorkspaceChildrenRecursive(childWorkspace, project)
);
}
}
return workspaceList;
}; // eslint-disable-next-line arca/no-default-export
class WorkspacesForeachCommand extends cli_1.BaseCommand {
constructor() {
super(...arguments);
this.args = [];
this.all = false;
this.verbose = false;
this.parallel = false;
this.interlaced = false;
this.topological = false;
this.topologicalDev = false;
this.include = [];
this.exclude = [];
this.private = true;
}
async execute() {
const configuration = await core_1.Configuration.find(
this.context.cwd,
this.context.plugins
);
const {
project,
workspace: cwdWorkspace
} = await core_1.Project.find(configuration, this.context.cwd);
if (!this.all && !cwdWorkspace)
throw new cli_1.WorkspaceRequiredError(
project.cwd,
this.context.cwd
);
const command = this.cli.process([
this.commandName,
...this.args
]);
const scriptName =
command.path.length === 1 &&
command.path[0] === `run` &&
typeof command.scriptName !== `undefined`
? command.scriptName
: null;
if (command.path.length === 0)
throw new clipanion_1.UsageError(
`Invalid subcommand name for iteration - use the 'run' keyword if you wish to execute a script`
);
const rootWorkspace = this.all
? project.topLevelWorkspace
: cwdWorkspace;
const candidates = [
rootWorkspace,
...getWorkspaceChildrenRecursive(rootWorkspace, project)
];
const workspaces = [];
for (const workspace of candidates) {
if (scriptName && !workspace.manifest.scripts.has(scriptName))
continue; // Prevents infinite loop in the case of configuring a script as such:
// "lint": "yarn workspaces foreach --all lint"
if (
scriptName === process.env.npm_lifecycle_event &&
workspace.cwd === cwdWorkspace.cwd
)
continue;
if (
this.include.length > 0 &&
!this.include.includes(
core_3.structUtils.stringifyIdent(workspace.locator)
)
)
continue;
if (
this.exclude.length > 0 &&
this.exclude.includes(
core_3.structUtils.stringifyIdent(workspace.locator)
)
)
continue;
if (
this.private === false &&
workspace.manifest.private === true
)
continue;
workspaces.push(workspace);
}
let interlaced = this.interlaced; // No need to buffer the output if we're executing the commands sequentially
if (!this.parallel) interlaced = true;
const needsProcessing = new Map();
const processing = new Set();
const concurrency = this.parallel
? Math.max(1, os_1.cpus().length / 2)
: 1;
const limit = p_limit_1.default(this.jobs || concurrency);
let commandCount = 0;
let finalExitCode = null;
const report = await core_2.StreamReport.start(
{
configuration,
stdout: this.context.stdout
},
async report => {
const runCommand = async (workspace, { commandIndex }) => {
if (!this.parallel && this.verbose && commandIndex > 1)
report.reportSeparator();
const prefix = getPrefix(workspace, {
configuration,
verbose: this.verbose,
commandIndex
});
const [stdout, stdoutEnd] = createStream(report, {
prefix,
interlaced
});
const [stderr, stderrEnd] = createStream(report, {
prefix,
interlaced
});
try {
const exitCode =
(await this.cli.run([this.commandName, ...this.args], {
cwd: workspace.cwd,
stdout,
stderr
})) || 0;
stdout.end();
stderr.end();
const emptyStdout = await stdoutEnd;
const emptyStderr = await stderrEnd;
if (this.verbose && emptyStdout && emptyStderr)
report.reportInfo(
null,
`${prefix} Process exited without output (exit code ${exitCode})`
);
return exitCode;
} catch (err) {
stdout.end();
stderr.end();
await stdoutEnd;
await stderrEnd;
throw err;
}
};
for (const workspace of workspaces)
needsProcessing.set(
workspace.anchoredLocator.locatorHash,
workspace
);
while (needsProcessing.size > 0) {
if (report.hasErrors()) break;
const commandPromises = [];
for (const [identHash, workspace] of needsProcessing) {
// If we are already running the command on that workspace, skip
if (
processing.has(
workspace.anchoredDescriptor.descriptorHash
)
)
continue;
let isRunnable = true;
if (this.topological || this.topologicalDev) {
const resolvedSet = this.topologicalDev
? new Map([
...workspace.manifest.dependencies,
...workspace.manifest.devDependencies
])
: workspace.manifest.dependencies;
for (const descriptor of resolvedSet.values()) {
const workspace = project.tryWorkspaceByDescriptor(
descriptor
);
isRunnable =
workspace === null ||
!needsProcessing.has(
workspace.anchoredLocator.locatorHash
);
if (!isRunnable) {
break;
}
}
}
if (!isRunnable) continue;
processing.add(
workspace.anchoredDescriptor.descriptorHash
);
commandPromises.push(
limit(async () => {
const exitCode = await runCommand(workspace, {
commandIndex: ++commandCount
});
needsProcessing.delete(identHash);
processing.delete(
workspace.anchoredDescriptor.descriptorHash
);
return exitCode;
})
); // If we're not executing processes in parallel we can just wait for it
// to finish outside of this loop (it'll then reenter it anyway)
if (!this.parallel) {
break;
}
}
if (commandPromises.length === 0) {
const cycle = Array.from(needsProcessing.values())
.map(workspace => {
return core_3.structUtils.prettyLocator(
configuration,
workspace.anchoredLocator
);
})
.join(`, `);
report.reportError(
core_2.MessageName.CYCLIC_DEPENDENCIES,
`Dependency cycle detected (${cycle})`
);
return;
}
const exitCodes = await Promise.all(commandPromises);
const errorCode = exitCodes.find(code => code !== 0); // The order in which the exit codes will be processed is fairly
// opaque, so better just return a generic "1" for determinism.
finalExitCode =
typeof errorCode !== `undefined` ? 1 : finalExitCode;
if (
(this.topological || this.topologicalDev) &&
typeof errorCode !== `undefined`
) {
report.reportError(
core_2.MessageName.UNNAMED,
`The command failed for workspaces that are depended upon by other workspaces; can't satisfy the dependency graph`
);
}
}
}
);
if (finalExitCode !== null) {
return finalExitCode;
} else {
return report.exitCode();
}
}
}
WorkspacesForeachCommand.schema = yup.object().shape({
jobs: yup.number().min(2),
parallel: yup.boolean().when(`jobs`, {
is: val => val > 1,
then: yup
.boolean()
.oneOf([true], `--parallel must be set when using --jobs`),
otherwise: yup.boolean()
})
});
WorkspacesForeachCommand.usage = clipanion_1.Command.Usage({
category: `Workspace-related commands`,
description: `run a command on all workspaces`,
details: `
This command will run a given sub-command on current and all its descendant workspaces. Various flags can alter the exact behavior of the command:
- If \`-p,--parallel\` is set, the commands will be ran in parallel; they'll by default be limited to a number of parallel tasks roughly equal to half your core number, but that can be overridden via \`-j,--jobs\`.
- If \`-p,--parallel\` and \`-i,--interlaced\` are both set, Yarn will print the lines from the output as it receives them. If \`-i,--interlaced\` wasn't set, it would instead buffer the output from each process and print the resulting buffers only after their source processes have exited.
- If \`-t,--topological\` is set, Yarn will only run the command after all workspaces that depend on it through the \`dependencies\` field have successfully finished executing. If \`--tological-dev\` is set, both the \`dependencies\` and \`devDependencies\` fields will be considered when figuring out the wait points.
- If \`--all\` is set, Yarn will run the command on all the workspaces of a project. By default yarn runs the command only on current and all its descendant workspaces.
- The command may apply to only some workspaces through the use of \`--include\` which acts as a whitelist. The \`--exclude\` flag will do the opposite and will be a list of packages that mustn't execute the script.
Adding the \`-v,--verbose\` flag will cause Yarn to print more information; in particular the name of the workspace that generated the output will be printed at the front of each line.
If the command is \`run\` and the script being run does not exist the child workspace will be skipped without error.
`,
examples: [
[
`Publish current and all descendant packages`,
`yarn workspaces foreach npm publish --tolerate-republish`
],
[
`Run build script on current and all descendant packages`,
`yarn workspaces foreach run build`
],
[
`Run build script on current and all descendant packages in parallel, building dependent packages first`,
`yarn workspaces foreach -pt run build`
]
]
});
__decorate(
[clipanion_1.Command.String()],
WorkspacesForeachCommand.prototype,
"commandName",
void 0
);
__decorate(
[clipanion_1.Command.Proxy()],
WorkspacesForeachCommand.prototype,
"args",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`-a,--all`)],
WorkspacesForeachCommand.prototype,
"all",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`-v,--verbose`)],
WorkspacesForeachCommand.prototype,
"verbose",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`-p,--parallel`)],
WorkspacesForeachCommand.prototype,
"parallel",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`-i,--interlaced`)],
WorkspacesForeachCommand.prototype,
"interlaced",
void 0
);
__decorate(
[clipanion_1.Command.String(`-j,--jobs`)],
WorkspacesForeachCommand.prototype,
"jobs",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`-t,--topological`)],
WorkspacesForeachCommand.prototype,
"topological",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`--topological-dev`)],
WorkspacesForeachCommand.prototype,
"topologicalDev",
void 0
);
__decorate(
[clipanion_1.Command.Array(`--include`)],
WorkspacesForeachCommand.prototype,
"include",
void 0
);
__decorate(
[clipanion_1.Command.Array(`--exclude`)],
WorkspacesForeachCommand.prototype,
"exclude",
void 0
);
__decorate(
[clipanion_1.Command.Boolean(`--private`)],
WorkspacesForeachCommand.prototype,
"private",
void 0
);
__decorate(
[clipanion_1.Command.Path(`workspaces`, `foreach`)],
WorkspacesForeachCommand.prototype,
"execute",
null
);
exports.default = WorkspacesForeachCommand;
function createStream(report, { prefix, interlaced }) {
const streamReporter = report.createStreamReporter(prefix);
const defaultStream = new core_3.miscUtils.DefaultStream();
defaultStream.pipe(streamReporter, {
end: false
});
defaultStream.on(`finish`, () => {
streamReporter.end();
});
const promise = new Promise(resolve => {
streamReporter.on(`finish`, () => {
resolve(defaultStream.active);
});
});
if (interlaced) return [defaultStream, promise];
const streamBuffer = new core_3.miscUtils.BufferStream();
streamBuffer.pipe(defaultStream, {
end: false
});
streamBuffer.on(`finish`, () => {
defaultStream.end();
});
return [streamBuffer, promise];
}
function getPrefix(
workspace,
{ configuration, commandIndex, verbose }
) {
if (!verbose) return null;
const ident = core_3.structUtils.convertToIdent(workspace.locator);
const name = core_3.structUtils.stringifyIdent(ident);
const prefix = `[${name}]:`;
const colors = [
`#2E86AB`,
`#A23B72`,
`#F18F01`,
`#C73E1D`,
`#CCE2A3`
];
const colorName = colors[commandIndex % colors.length];
return configuration.format(prefix, colorName);
}
/***/
},
/* 6 */
/***/ function(module, exports) {
module.exports = require("os");
/***/
},
/* 7 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
const pTry = __webpack_require__(8);
const pLimit = concurrency => {
if (concurrency < 1) {
throw new TypeError(
"Expected `concurrency` to be a number from 1 and up"
);
}
const queue = [];
let activeCount = 0;
const next = () => {
activeCount--;
if (queue.length > 0) {
queue.shift()();
}
};
const run = (fn, resolve, ...args) => {
activeCount++;
const result = pTry(fn, ...args);
resolve(result);
result.then(next, next);
};
const enqueue = (fn, resolve, ...args) => {
if (activeCount < concurrency) {
run(fn, resolve, ...args);
} else {
queue.push(run.bind(null, fn, resolve, ...args));
}
};
const generator = (fn, ...args) =>
new Promise(resolve => enqueue(fn, resolve, ...args));
Object.defineProperties(generator, {
activeCount: {
get: () => activeCount
},
pendingCount: {
get: () => queue.length
}
});
return generator;
};
module.exports = pLimit;
module.exports.default = pLimit;
/***/
},
/* 8 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
module.exports = (callback, ...args) =>
new Promise(resolve => {
resolve(callback(...args));
});
/***/
},
/* 9 */
/***/ function(module, exports) {
module.exports = require("yup");
/***/
}
/******/
]
);
return plugin;
}
};

123952
.yarn/releases/yarn-2.0.0-rc.33.js поставляемый Executable file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

54
.yarnrc.yml Normal file
Просмотреть файл

@ -0,0 +1,54 @@
nodeLinker: node-modules
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-typescript.js
spec: "@yarnpkg/plugin-typescript"
- path: .yarn/plugins/@yarnpkg/plugin-workspace-tools.js
spec: "@yarnpkg/plugin-workspace-tools"
yarnPath: .yarn/releases/yarn-2.0.0-rc.33.js
packageExtensions:
intern@*:
dependencies:
typescript: "3.8.3"
"@storybook/addons@*":
dependencies:
regenerator-runtime: "0.13.5"
"@storybook/api@*":
dependencies:
react-dom: "16.13.1"
"@storybook/addon-actions@*":
dependencies:
react-dom: "16.13.1"
regenerator-runtime: "0.13.5"
"@storybook/addon-links@*":
dependencies:
react-dom: "16.13.1"
regenerator-runtime: "0.13.5"
apollo-link-error@*:
dependencies:
graphql: "14.6.0"
"@apollo/react-common@*":
dependencies:
apollo-client: "2.6.10"
apollo-utilities: "1.3.4"
graphql: "14.6.0"
"@types/react": "16.9.35"
"@apollo/react-components@*":
dependencies:
apollo-cache: "1.3.5"
apollo-client: "2.6.10"
apollo-link: "1.2.14"
apollo-utilities: "1.3.4"
"@apollo/react-ssr@*":
dependencies:
apollo-client: "2.6.10"
graphql: "14.6.0"
"@types/react": "16.9.35"
fxa-pairing-channel@*:
dependencies:
webpack: "4.43.0"
react-document-title@*:
dependencies:
react: "16.13.1"

Просмотреть файл

@ -37,10 +37,12 @@ The Firefox Accounts (fxa) monorepo
```sh
cd fxa
npm install
yarn install
npm start
```
Note: If `yarn install` fails, ensure your `yarn -v` is at least `1.22.0`.
Note this starts up all required services, including Redis, MySQL, and Memcached. It is recommended that you don't run these services yourself, or occupy any of the [server ports](https://github.com/mozilla/fxa/blob/master/mysql_servers.json). Doing so may result in errors.
4. Visit [localhost:3030](http://localhost:3030/).
@ -94,6 +96,18 @@ When you signup for an account using the form on `localhost:3030/signup` the "in
If you get an `error` status for any of the servers please verify that you installed all required dependencies. Otherwise file an issue on this repository or [connect with the team on Firefox Accounts Riot](https://chat.mozilla.org/#/room/#fxa:mozilla.org).
### Managing dependencies
Use `yarn` to add dependencies. To add a dependency to a single package, which is what you'll usually want, run something like:
```sh
yarn workspace fxa-shared add --dev eslint
```
For multiple packages use [workspaces foreach](https://yarnpkg.com/cli/workspaces/foreach).
To update dependencies use [yarn up](https://yarnpkg.com/cli/up) or `yarn workspace <name> up`.
---
---
@ -111,7 +125,7 @@ If you get an `error` status for any of the servers please verify that you insta
> [libgmp](https://gmplib.org/),
> [graphicsmagick](http://www.graphicsmagick.org/),
> [docker](https://docs.docker.com/),
> [gcloud CLI](https://cloud.google.com/sdk/)
> [gcloud CLI](https://cloud.google.com/sdk/) > [Yarn 2](https://yarnpkg.com)
##### OS X (with [Brew](http://brew.sh/)):

Просмотреть файл

@ -14,6 +14,7 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
COPY --from=fxa-builder:latest --chown=app:app /fxa/_scripts/check-mysql.sh /app/
COPY --from=fxa-builder:latest --chown=app:app /fxa/node_modules /node_modules
COPY --from=fxa-builder:latest --chown=app:app /fxa/packages/fxa-geodb /fxa-geodb
COPY --from=fxa-builder:latest --chown=app:app /fxa/packages/fxa-shared /fxa-shared
USER app

Просмотреть файл

@ -7,7 +7,30 @@ for d in ./packages/*/ ; do
(cd "$d" && mkdir -p config && cp ../version.json . && cp ../version.json config)
done
npm i lerna
npx lerna bootstrap --hoist pm2
npx lerna run --stream build
npx lerna exec --stream --concurrency 2 --no-bail -- npm prune --production
# `npx yarn` because `npm i -g yarn` needs sudo
npx yarn install
SKIP_PREFLIGHT_CHECK=true npx yarn workspaces foreach --topological run build
rm -rf node_modules
rm -rf packages/*/node_modules
npx yarn workspaces focus --production \
123done \
browserid-verifier \
fxa-admin-panel \
fxa-admin-server \
fxa-auth-db-mysql \
fxa-auth-server \
fxa-content-server \
fxa-customs-server \
fxa-event-broker \
fxa-geodb \
fxa-graphql-api \
fxa-js-client \
fxa-metrics-processor \
fxa-payments-server \
fxa-profile-server \
fxa-react \
fxa-settings \
fxa-shared \
fxa-support-panel
npx yarn cache clean --all
rm -rf artifacts

Просмотреть файл

@ -0,0 +1,16 @@
#!/bin/bash -e
if [[ "${npm_execpath:(-4)}" != "yarn" ]]; then
if [[ ! $(command -v yarn) ]]; then
echo -n "install yarn now? [y/n]: "
read -rn 1 ans
printf "\n"
if [[ $ans == "y" ]]; then
npm i -g yarn
fi
fi
echo -e "\n##################################################\n"
echo "please use 'yarn install' instead of 'npm install'"
echo -e "\n##################################################\n"
exit 1
fi

Просмотреть файл

@ -12,13 +12,8 @@ echo "Building docs."
cd packages/fxa-email-service
cargo doc --no-deps
# fxa-payments-server relies on fxa-content-server .scss styles, which in turn
# rely on some modules in package.json
cd ../../packages/fxa-content-server
npm ci
cd ../../packages/fxa-payments-server
npm ci
yarn workspaces focus fxa-payments-server
npm run build-storybook
cd ../..

Просмотреть файл

@ -1,15 +0,0 @@
#!/bin/bash -e
if [ "${SKIP_PACKAGES}" != "true" ]; then
# Set ulimit, need it for npm
ulimit -S -n 2048 || echo "Setting ulimit failed"
if [ "${CI}" = "true" ]; then
# it seems the filesystem on circleci can't handle full concurrency
npx lerna bootstrap --hoist pm2 --concurrency 6 --ignore fxa-amplitude-send
else
npx lerna bootstrap --ci --hoist pm2
fi
ln -sf node_modules/.bin/pm2 pm2
fi

Просмотреть файл

@ -10,17 +10,17 @@ npm test fxa-auth-db-mysql fxa-auth-server
npm test all
"
scopes=""
for scope in "$@"
workspaces=""
for workspace in "$@"
do
scopes="$scopes --scope $scope"
workspaces="$workspaces --include $workspace"
done
if [[ -z "$PACKAGE" ]]; then
>&2 echo "$help"
exit 1
elif [[ "$PACKAGE" == "all" ]]; then
lerna run test --stream --no-prefix --loglevel success --concurrency 1 --ignore fxa-amplitude-send
yarn workspaces foreach --topological-dev run test
else
echo "$scopes" | xargs lerna run test --stream --no-prefix --loglevel success --concurrency 1
echo "$workspaces run test" | xargs yarn workspaces foreach --topological-dev
fi

Просмотреть файл

@ -38,7 +38,7 @@ Chosen options: "CSS variables" and "SCSS", because:
- While using CSS variables with SASS will prevent using mixins (or SASS functions like `darken`) that rely on variable values, they can be used without impeding core functionality FxA uses SASS for like nested selectors, extends, and functions and mixins that don't rely on variable values like the generation and use of our media queries.
- CSS variables are native to the browser and decision to use them now is a prudent one. They have good browser support and can be used without a preprocessor and changed after preprocessor compilation. They are targetable by JavaScript, easily themed, and can be scoped globally or on an element, providing options for usage.
- Scoped component solutions (CSS-in-JS, CSS modules) encourage self-contained components rather than building the UI as a whole. While one goal of FxA is to reuse components across the ecosystem where possible (e.g. the `fxa-components` package), FxA will likely reap more benefits from a class-based approach. This allows for globally shared styles and a loosely coupled stylesheet rather than a tightly coupled CSS-in-JS solution with conditional styles based on props and how the component is used. Classes promote a DRYer and more consistent approach.
- Scoped component solutions (CSS-in-JS, CSS modules) encourage self-contained components rather than building the UI as a whole. While one goal of FxA is to reuse components across the ecosystem where possible (e.g. the `fxa-react` package), FxA will likely reap more benefits from a class-based approach. This allows for globally shared styles and a loosely coupled stylesheet rather than a tightly coupled CSS-in-JS solution with conditional styles based on props and how the component is used. Classes promote a DRYer and more consistent approach.
- CSS-in-JS would add one additional layer of tech to learn (like `styled-components` syntax and best practices) while the Settings Redesign project is already introducing other novel tech, and members of the FxA engineering team also as a whole personally prefer not to use CSS-in-JS.
- This decision doesn't preclude the option of implementing CSS modules at a later time if it's determined that we would prefer component scoped styles with SCSS, and CSS variables could still be used if we later switch to CSS-in-JS.

Просмотреть файл

@ -1,11 +0,0 @@
{
"packages": [
"packages/*"
],
"version": "independent",
"command": {
"bootstrap": {
"ignore": "fxa-amplitude-send"
}
}
}

11351
package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,18 +1,19 @@
{
"name": "fxa",
"private": true,
"version": "2.0.0",
"description": "Firefox Accounts monorepo",
"scripts": {
"preinstall": "_scripts/check-package-manager.sh",
"authors": "git shortlog -s | cut -c8- | sort -f > AUTHORS",
"postinstall": "_scripts/install-all.sh",
"audit": "lerna run audit --parallel",
"audit": "echo 'audit is currently unavailable'",
"start": "nps --prefix=start",
"stop": "nps --prefix=stop",
"restart": "nps --prefix=restart",
"adb-reverse": "./_scripts/adb-reverse.sh",
"test": "_scripts/test-package.sh",
"config-fxios": "node _scripts/config-fxios.js",
"format": "lerna run format",
"format": "yarn workspaces foreach run format",
"ports": "pm2 jlist | json -d'\t' -a -c 'this.pm2_env.env.PORT' pm2_env.env.PORT name"
},
"homepage": "https://github.com/mozilla/fxa",
@ -29,12 +30,11 @@
"@dannycoates/nps": "^5.9.12",
"diffparser": "^2.0.1",
"husky": "^4.2.3",
"lerna": "^3.20.2",
"lint-staged": "^10.0.8",
"node-fetch": "^2.6.0",
"nodemon": "^2.0.3",
"pm2": "^4.2.3",
"prettier": "^1.19.1",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"replace-in-file": "^5.0.2"
},
"engines": {
@ -95,5 +95,13 @@
"eslint-plugin-jest": "^23.8.2",
"eslint-plugin-react": "^7.19.0",
"json": "^9.0.6"
},
"workspaces": [
"packages/*"
],
"resolutions": {
"gobbledygook": "git://github.com/mozilla-fxa/gobbledygook.git#354042684056e57ca77f036989e907707a36cff2",
"tap/typescript": "3.8.3",
"@types/node": "12.12.38"
}
}

Просмотреть файл

@ -1,3 +0,0 @@
{
"directory": "static/bower_components"
}

Просмотреть файл

@ -5,5 +5,5 @@ Dockerfile
*.ico
*.txt
ansible/*
static/bower_components/*
static/img/*
static/components/*
static/img/*

Просмотреть файл

@ -1,23 +0,0 @@
{
"name": "123done",
"version": "0.0.0",
"homepage": "https://github.com/mozilla/123done",
"authors": ["johngruen <john.gruen@gmail.com>"],
"description": "fxa-oauth-demo",
"license": "MIT",
"ignore": [
"**/.*",
"node_modules",
"bower_components",
"app/bower_components",
"test",
"tests",
"static/bower_components"
],
"private": true,
"dependencies": {
"jquery": "2.1.0",
"normalize-css": "3.0.1",
"modernizr": "2.7.2"
}
}

3555
packages/123done/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -20,7 +20,6 @@
},
"private": true,
"dependencies": {
"bower": "*",
"client-sessions": "0.6.x",
"express": "4.16.4",
"fxa-jwtool": "0.7.x",
@ -35,17 +34,15 @@
},
"devDependencies": {
"audit-filter": "0.5.0",
"eslint": "6.6.0",
"eslint": "^6.8.0",
"eslint-plugin-fxa": "2.0.1",
"npm-run-all": "4.1.5",
"pm2": "^4.2.3",
"prettier": "1.18.2"
"fxa-shared": "workspace:*",
"pm2": "^4.4.0",
"prettier": "^2.0.5"
},
"scripts": {
"lint": "npm-run-all --parallel lint:*",
"audit": "npm audit --json | audit-filter --nsp-config=.nsprc --audit=-",
"lint:eslint": "eslint .",
"postinstall": "bower install --config.interactive=false -s",
"lint": "eslint .",
"start": "pm2 start pm2.config.js",
"stop": "pm2 stop pm2.config.js",
"restart": "pm2 restart pm2.config.js",

Просмотреть файл

@ -14,6 +14,7 @@ module.exports = {
NODE_ENV: 'dev',
PORT: '8080',
},
filter_env: ['npm_'],
min_uptime: '2m',
},
{
@ -26,6 +27,7 @@ module.exports = {
NODE_ENV: 'dev',
PORT: '10139',
},
filter_env: ['npm_'],
min_uptime: '2m',
},
],

5
packages/123done/static/components/jquery.min.js поставляемый Normal file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Разница между файлами не показана из-за своего большого размера Загрузить разницу

425
packages/123done/static/components/normalize.css поставляемый Normal file
Просмотреть файл

@ -0,0 +1,425 @@
/*! normalize.css v3.0.1 | MIT License | git.io/normalize */
/**
* 1. Set default font family to sans-serif.
* 2. Prevent iOS text size adjust after orientation change, without disabling
* user zoom.
*/
html {
font-family: sans-serif; /* 1 */
-ms-text-size-adjust: 100%; /* 2 */
-webkit-text-size-adjust: 100%; /* 2 */
}
/**
* Remove default margin.
*/
body {
margin: 0;
}
/* HTML5 display definitions
========================================================================== */
/**
* Correct `block` display not defined for any HTML5 element in IE 8/9.
* Correct `block` display not defined for `details` or `summary` in IE 10/11 and Firefox.
* Correct `block` display not defined for `main` in IE 11.
*/
article,
aside,
details,
figcaption,
figure,
footer,
header,
hgroup,
main,
nav,
section,
summary {
display: block;
}
/**
* 1. Correct `inline-block` display not defined in IE 8/9.
* 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera.
*/
audio,
canvas,
progress,
video {
display: inline-block; /* 1 */
vertical-align: baseline; /* 2 */
}
/**
* Prevent modern browsers from displaying `audio` without controls.
* Remove excess height in iOS 5 devices.
*/
audio:not([controls]) {
display: none;
height: 0;
}
/**
* Address `[hidden]` styling not present in IE 8/9/10.
* Hide the `template` element in IE 8/9/11, Safari, and Firefox < 22.
*/
[hidden],
template {
display: none;
}
/* Links
========================================================================== */
/**
* Remove the gray background color from active links in IE 10.
*/
a {
background: transparent;
}
/**
* Improve readability when focused and also mouse hovered in all browsers.
*/
a:active,
a:hover {
outline: 0;
}
/* Text-level semantics
========================================================================== */
/**
* Address styling not present in IE 8/9/10/11, Safari, and Chrome.
*/
abbr[title] {
border-bottom: 1px dotted;
}
/**
* Address style set to `bolder` in Firefox 4+, Safari, and Chrome.
*/
b,
strong {
font-weight: bold;
}
/**
* Address styling not present in Safari and Chrome.
*/
dfn {
font-style: italic;
}
/**
* Address variable `h1` font-size and margin within `section` and `article`
* contexts in Firefox 4+, Safari, and Chrome.
*/
h1 {
font-size: 2em;
margin: 0.67em 0;
}
/**
* Address styling not present in IE 8/9.
*/
mark {
background: #ff0;
color: #000;
}
/**
* Address inconsistent and variable font size in all browsers.
*/
small {
font-size: 80%;
}
/**
* Prevent `sub` and `sup` affecting `line-height` in all browsers.
*/
sub,
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
/* Embedded content
========================================================================== */
/**
* Remove border when inside `a` element in IE 8/9/10.
*/
img {
border: 0;
}
/**
* Correct overflow not hidden in IE 9/10/11.
*/
svg:not(:root) {
overflow: hidden;
}
/* Grouping content
========================================================================== */
/**
* Address margin not present in IE 8/9 and Safari.
*/
figure {
margin: 1em 40px;
}
/**
* Address differences between Firefox and other browsers.
*/
hr {
-moz-box-sizing: content-box;
box-sizing: content-box;
height: 0;
}
/**
* Contain overflow in all browsers.
*/
pre {
overflow: auto;
}
/**
* Address odd `em`-unit font size rendering in all browsers.
*/
code,
kbd,
pre,
samp {
font-family: monospace, monospace;
font-size: 1em;
}
/* Forms
========================================================================== */
/**
* Known limitation: by default, Chrome and Safari on OS X allow very limited
* styling of `select`, unless a `border` property is set.
*/
/**
* 1. Correct color not being inherited.
* Known issue: affects color of disabled elements.
* 2. Correct font properties not being inherited.
* 3. Address margins set differently in Firefox 4+, Safari, and Chrome.
*/
button,
input,
optgroup,
select,
textarea {
color: inherit; /* 1 */
font: inherit; /* 2 */
margin: 0; /* 3 */
}
/**
* Address `overflow` set to `hidden` in IE 8/9/10/11.
*/
button {
overflow: visible;
}
/**
* Address inconsistent `text-transform` inheritance for `button` and `select`.
* All other form control elements do not inherit `text-transform` values.
* Correct `button` style inheritance in Firefox, IE 8/9/10/11, and Opera.
* Correct `select` style inheritance in Firefox.
*/
button,
select {
text-transform: none;
}
/**
* 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`
* and `video` controls.
* 2. Correct inability to style clickable `input` types in iOS.
* 3. Improve usability and consistency of cursor style between image-type
* `input` and others.
*/
button,
html input[type="button"], /* 1 */
input[type="reset"],
input[type="submit"] {
-webkit-appearance: button; /* 2 */
cursor: pointer; /* 3 */
}
/**
* Re-set default cursor for disabled elements.
*/
button[disabled],
html input[disabled] {
cursor: default;
}
/**
* Remove inner padding and border in Firefox 4+.
*/
button::-moz-focus-inner,
input::-moz-focus-inner {
border: 0;
padding: 0;
}
/**
* Address Firefox 4+ setting `line-height` on `input` using `!important` in
* the UA stylesheet.
*/
input {
line-height: normal;
}
/**
* It's recommended that you don't attempt to style these elements.
* Firefox's implementation doesn't respect box-sizing, padding, or width.
*
* 1. Address box sizing set to `content-box` in IE 8/9/10.
* 2. Remove excess padding in IE 8/9/10.
*/
input[type="checkbox"],
input[type="radio"] {
box-sizing: border-box; /* 1 */
padding: 0; /* 2 */
}
/**
* Fix the cursor style for Chrome's increment/decrement buttons. For certain
* `font-size` values of the `input`, it causes the cursor style of the
* decrement button to change from `default` to `text`.
*/
input[type="number"]::-webkit-inner-spin-button,
input[type="number"]::-webkit-outer-spin-button {
height: auto;
}
/**
* 1. Address `appearance` set to `searchfield` in Safari and Chrome.
* 2. Address `box-sizing` set to `border-box` in Safari and Chrome
* (include `-moz` to future-proof).
*/
input[type="search"] {
-webkit-appearance: textfield; /* 1 */
-moz-box-sizing: content-box;
-webkit-box-sizing: content-box; /* 2 */
box-sizing: content-box;
}
/**
* Remove inner padding and search cancel button in Safari and Chrome on OS X.
* Safari (but not Chrome) clips the cancel button when the search input has
* padding (and `textfield` appearance).
*/
input[type="search"]::-webkit-search-cancel-button,
input[type="search"]::-webkit-search-decoration {
-webkit-appearance: none;
}
/**
* Define consistent border, margin, and padding.
*/
fieldset {
border: 1px solid #c0c0c0;
margin: 0 2px;
padding: 0.35em 0.625em 0.75em;
}
/**
* 1. Correct `color` not being inherited in IE 8/9/10/11.
* 2. Remove padding so people aren't caught out if they zero out fieldsets.
*/
legend {
border: 0; /* 1 */
padding: 0; /* 2 */
}
/**
* Remove default vertical scrollbar in IE 8/9/10/11.
*/
textarea {
overflow: auto;
}
/**
* Don't inherit the `font-weight` (applied by a rule above).
* NOTE: the default cannot safely be changed in Chrome and Safari on OS X.
*/
optgroup {
font-weight: bold;
}
/* Tables
========================================================================== */
/**
* Remove most spacing between table cells.
*/
table {
border-collapse: collapse;
border-spacing: 0;
}
td,
th {
padding: 0;
}

Просмотреть файл

@ -9,12 +9,12 @@
<meta name="firefox-accounts" content="supported" />
<link
rel="stylesheet"
href="/bower_components/normalize-css/normalize.css"
href="/components/normalize.css"
type="text/css"
/>
<link rel="stylesheet" href="/css/main.css" type="text/css" />
<script src="/bower_components/modernizr/modernizr.js"></script>
<script src="/bower_components/jquery/dist/jquery.min.js"></script>
<script src="/components/modernizr.js"></script>
<script src="/components/jquery.min.js"></script>
</head>
<body>

5128
packages/browserid-verifier/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -30,21 +30,20 @@
},
"devDependencies": {
"audit-filter": "0.5.0",
"eslint": "6.6.0",
"eslint": "^6.8.0",
"eslint-plugin-fxa": "2.0.1",
"fxa-shared": "workspace:*",
"mocha": "5.2.0",
"npm-run-all": "4.1.5",
"pm2": "^4.2.3",
"prettier": "1.18.2",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"request": "2.88.0",
"should": "13.2.3",
"temp": "0.9.0",
"walk": "2.3.14"
},
"scripts": {
"lint": "npm-run-all --parallel lint:*",
"audit": "npm audit --json | audit-filter --nsp-config=.nsprc --audit=-",
"lint:eslint": "eslint .",
"lint": "eslint .",
"pretest": "npm run lint",
"test": "mocha --exit -t 5000 -R spec tests/*.js",
"format": "prettier '**' --write",

Просмотреть файл

@ -13,6 +13,7 @@ module.exports = {
IP_ADDRESS: '0.0.0.0',
FORCE_INSECURE_LOOKUP_OVER_HTTP: 'true',
},
filter_env: ['npm_'],
max_restarts: '1',
min_uptime: '2m',
},

Просмотреть файл

@ -5,5 +5,4 @@ Dockerfile
*.ico
*.txt
ansible/*
static/bower_components/*
static/img/*
static/img/*

9425
packages/fortress/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,5 +1,5 @@
{
"name": "firefox-fortress",
"name": "fortress",
"description": "A simple tasklist app that demonstrates FxA Sign-In",
"version": "0.0.2",
"author": {
@ -32,17 +32,18 @@
"devDependencies": {
"audit-filter": "0.5.0",
"browser-sync": "^2.26.7",
"eslint": "6.6.0",
"eslint": "^6.8.0",
"eslint-plugin-fxa": "2.0.1",
"fxa-shared": "workspace:*",
"nodemon": "^2.0.3",
"npm-run-all": "4.1.5",
"pm2": "^4.2.3",
"prettier": "1.18.2",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"stylelint": "^13.3.3",
"stylelint-config-standard": "^20.0.0"
},
"scripts": {
"lint": "npm-run-all --parallel lint:*",
"lint": "npm-run-all --parallel lint:eslint lint:style",
"audit": "npm audit --json | audit-filter --nsp-config=.nsprc --audit=-",
"lint:eslint": "eslint .",
"lint:style": "stylelint static/*.css",

Просмотреть файл

@ -14,6 +14,7 @@ module.exports = {
NODE_ENV: 'dev',
PORT: '9292',
},
filter_env: ['npm_'],
min_uptime: '2m',
},
],

24556
packages/fxa-admin-panel/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -6,23 +6,25 @@
"start": "pm2 start pm2.config.js",
"stop": "pm2 stop pm2.config.js",
"restart": "pm2 restart pm2.config.js",
"lint:eslint": "eslint .",
"lint": "eslint .",
"build": "npm-run-all build:client build:server",
"build:client": "PUBLIC_URL=/ INLINE_RUNTIME_CHUNK=false rescripts build",
"build:client": "SKIP_PREFLIGHT_CHECK=true PUBLIC_URL=/ INLINE_RUNTIME_CHUNK=false CI=false rescripts build",
"build:server": "tsc -p server/tsconfig.json",
"test": "npm-run-all test:*",
"test:frontend": "rescripts test --coverage --verbose",
"test": "npm-run-all test:frontend test:server",
"test:frontend": "SKIP_PREFLIGHT_CHECK=true PUBLIC_URL=/ INLINE_RUNTIME_CHUNK=false CI=true rescripts test --coverage --verbose",
"test:server": "jest --coverage --verbose --config server/jest.config.js",
"format": "prettier '**' --write",
"eject": "react-scripts eject"
},
"dependencies": {
"apollo-boost": "^0.4.7",
"apollo-boost": "^0.4.9",
"apollo-client": "^2.6.4",
"body-parser": "^1.19.0",
"convict": "^5.2.0",
"dateformat": "^3.0.3",
"express": "^4.17.1",
"express-http-proxy": "^1.6.0",
"fxa-react": "workspace:*",
"graphql": "^14.6.0",
"helmet": "^3.21.3",
"mozlog": "^2.2.0",
@ -33,7 +35,7 @@
"react-apollo": "^3.1.3",
"react-dom": "^16.13.0",
"react-router-dom": "^5.1.2",
"react-scripts": "3.4.0",
"react-scripts": "^3.4.1",
"serve-static": "^1.14.1"
},
"eslintConfig": {
@ -54,32 +56,40 @@
"devDependencies": {
"@apollo/react-testing": "3.0.0",
"@rescripts/cli": "0.0.14",
"@testing-library/dom": "~5.0.0",
"@testing-library/jest-dom": "^4.2.4",
"@testing-library/react": "^9.4.1",
"@testing-library/user-event": "^7.2.1",
"@types/chance": "^1.0.10",
"@types/convict": "^4.2.1",
"@types/dateformat": "^3.0.1",
"@types/eslint": "6.8.1",
"@types/express": "^4.17.2",
"@types/helmet": "0.0.45",
"@types/jest": "^25.1.3",
"@types/jsdom": "^12.2.4",
"@types/on-headers": "^1.0.0",
"@types/react": "^16.8.0",
"@types/react-router-dom": "^5.1.3",
"@types/serve-static": "^1.13.3",
"@types/supertest": "^2.0.8",
"@typescript-eslint/eslint-plugin": "^2.21.0",
"@typescript-eslint/parser": "^2.21.0",
"apollo-cache-inmemory": "^1.6.2",
"apollo-link": "^1.2.12",
"apollo-utilities": "^1.3.2",
"chance": "^1.1.4",
"eslint": "^6.8.0",
"eslint-plugin-fxa": "^2.0.1",
"eslint-plugin-jest": "^23.8.1",
"eslint-plugin-react": "^7.18.3",
"fxa-react": "file:../fxa-react",
"pm2": "^4.2.3",
"prettier": "^1.19.1",
"fxa-shared": "workspace:*",
"jest": "^25.1.3",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"supertest": "^4.0.2",
"ts-jest": "^25.2.1",
"ts-node": "^8.6.2",
"typescript": "^3.8.3"
"ts-node": "^8.10.1",
"typescript": "3.8.3"
}
}

Просмотреть файл

@ -2,12 +2,14 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
const { resolve } = require('path');
module.exports = {
apps: [
{
name: 'admin',
cwd: __dirname,
script: 'ts-node -P server/tsconfig.json server/bin/fxa-admin-panel.ts',
cwd: resolve(__dirname, 'server'),
script: 'node -r ts-node/register bin/fxa-admin-panel.ts',
max_restarts: '1',
min_uptime: '2m',
env: {
@ -15,22 +17,28 @@ module.exports = {
NODE_ENV: 'development',
NODE_OPTIONS: '--inspect=9140',
PROXY_STATIC_RESOURCES_FROM: 'http://localhost:8092',
CONFIG_FILES: 'config/secrets.json',
CONFIG_FILES: '../config/secrets.json',
PORT: '8091',
},
filter_env: ['npm_'],
},
{
name: 'admin-react',
cwd: __dirname,
script: 'rescripts start',
script: 'yarn rescripts start',
max_restarts: '1',
min_uptime: '2m',
env: {
SKIP_PREFLIGHT_CHECK: 'true',
NODE_ENV: 'development',
PUBLIC_URL: 'http://localhost:8091',
BROWSER: 'NONE',
PORT: '8092',
PATH: process.env.PATH.split(':')
.filter(p => !p.includes(process.env.TMPDIR))
.join(':'),
},
filter_env: ['npm_', 'BERRY_BIN_FOLDER'],
},
],
};

Просмотреть файл

@ -1,14 +1,5 @@
#!/bin/bash -ex
DIR=$(dirname "$0")
cd "$DIR/../../../"
npx lerna bootstrap \
--scope fxa-react \
--scope fxa-admin-panel
cd packages/fxa-admin-panel
PUBLIC_URL=/ INLINE_RUNTIME_CHUNK=false CI=false npm run build
CI=yes npm test
yarn workspaces focus fxa-admin-panel
yarn build
yarn test

Просмотреть файл

@ -13,7 +13,8 @@
"forceConsistentCasingInFileNames": true,
"moduleResolution": "node",
"resolveJsonModule": true,
"typeRoots": ["../types", "../node_modules/@types"]
"typeRoots": ["../types", "../node_modules/@types", "../../../node_modules/@types"],
"types": ["jest", "mozlog"]
},
"include": ["."]
}

Просмотреть файл

@ -25,7 +25,8 @@
"jsx": "preserve",
"typeRoots": [
"./types",
"node_modules/@types"
"node_modules/@types",
"../../node_modules/@types"
]
},
"include": [

7114
packages/fxa-admin-server/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -3,15 +3,14 @@
"version": "1.154.0",
"description": "FxA GraphQL Admin Server",
"scripts": {
"build": "./node_modules/typescript/bin/tsc",
"lint": "npm-run-all --parallel lint:*",
"build": "tsc",
"lint": "eslint *",
"audit": "npm audit --json | audit-filter --nsp-config=.nsprc --audit=-",
"lint:tslint": "./node_modules/tslint/bin/tslint -p .",
"watch": "tsc -w",
"start": "pm2 start pm2.config.js",
"stop": "pm2 stop pm2.config.js",
"restart": "pm2 restart pm2.config.js",
"test": "./node_modules/mocha/bin/mocha -r ts-node/register src/test/**/*.spec.ts src/test/**/**/*.spec.ts src/test/**/**/**/*.spec.ts",
"test": "mocha -r ts-node/register src/test/**/*.spec.ts src/test/**/**/*.spec.ts src/test/**/**/**/*.spec.ts",
"email-bounce": "ts-node ./scripts/email-bounce.ts"
},
"repository": {
@ -48,6 +47,7 @@
"@types/chai": "^4.2.10",
"@types/chance": "^1.0.9",
"@types/convict": "^4.2.1",
"@types/eslint": "6.8.1",
"@types/graphql": "^14.5.0",
"@types/mocha": "^7.0.2",
"@types/node": "^13.9.1",
@ -58,20 +58,18 @@
"audit-filter": "^0.5.0",
"chai": "^4.2.0",
"chance": "^1.1.4",
"eslint": "^6.8.0",
"fxa-shared": "workspace:*",
"mocha": "^7.1.2",
"nodemon": "^2.0.2",
"npm-run-all": "^4.1.5",
"pm2": "^4.2.3",
"prettier": "^1.19.1",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"proxyquire": "^2.1.3",
"sinon": "^9.0.1",
"supertest": "^4.0.2",
"ts-node": "^8.8.2",
"ts-node": "^8.10.1",
"ts-sinon": "^1.0.25",
"tslint": "^6.1.0",
"tslint-config-prettier": "^1.18.0",
"tslint-plugin-prettier": "^2.1.0",
"typescript": "^3.8.3",
"typescript": "3.8.3",
"yargs": "^15.3.1"
}
}

Просмотреть файл

@ -6,7 +6,7 @@ module.exports = {
apps: [
{
name: 'admin-server',
script: 'ts-node src/bin/main.ts',
script: 'node -r ts-node/register src/bin/main.ts',
cwd: __dirname,
max_restarts: '1',
min_uptime: '2m',
@ -17,6 +17,7 @@ module.exports = {
TS_NODE_FILES: 'true',
PORT: '8090' // TODO: this needs to get added to src/config.ts
},
filter_env: ['npm_'],
watch: ['src']
}
]

Просмотреть файл

@ -38,7 +38,6 @@ async function addBounceToDB() {
await knex.destroy();
// tslint:disable-next-line: no-console
console.log(
`=> Created ${count} email ${count === 1 ? 'bounce' : 'bounces'} for ${bounce.email}`
);

Просмотреть файл

@ -15,7 +15,7 @@
"forceConsistentCasingInFileNames": true,
"noEmitHelpers": true,
"importHelpers": true,
"typeRoots": ["./types", "node_modules/@types"]
"typeRoots": ["./types", "node_modules/@types", "../../node_modules/@types"]
},
"include": ["./src"],
"exclude": ["node_modules"]

Просмотреть файл

@ -1,9 +0,0 @@
{
"extends": ["tslint:recommended", "tslint-config-prettier"],
"rulesDirectory": ["tslint-plugin-prettier"],
"rules": {
"interface-name": [true, "never-prefix"],
"interface-over-type-literal": false,
"prettier": [true, ".prettierrc"]
}
}

Просмотреть файл

@ -1,62 +0,0 @@
version: 2
jobs:
build:
docker:
- image: docker:stable-git
steps:
- checkout
- setup_remote_docker:
docker_layer_caching: true
- run:
name: Build Docker image
command: docker build -f Dockerfile-node -t ci:latest-node .
- run:
name: Save Docker image
command: mkdir -p /cache; docker save -o /cache/docker.tar "ci:latest-node"
- save_cache:
key: v1-{{ .Branch }}-{{ .Revision }}
paths:
- /cache/docker.tar
deploy:
docker:
- image: docker:stable-git
steps:
- setup_remote_docker:
docker_layer_caching: true
- restore_cache:
key: v1-{{ .Branch }}-{{ .Revision }}
- run:
name: Load Docker image
command: docker load -i /cache/docker.tar
- run:
name : Push to Docker Hub
command: |
echo $DOCKER_PASS | docker login -u $DOCKER_USER --password-stdin
if [ "${CIRCLE_BRANCH}" == "master" ]; then
docker tag ci:latest-node ${DOCKERHUB_REPO}:latest-node
docker images
docker push ${DOCKERHUB_REPO}:latest-node
elif [ ! -z "${CIRCLE_TAG}" ]; then
echo "${DOCKERHUB_REPO}:${CIRCLE_TAG}"
docker tag ci:latest-node "${DOCKERHUB_REPO}:${CIRCLE_TAG}-node"
docker images
docker push "${DOCKERHUB_REPO}:${CIRCLE_TAG}-node"
fi
workflows:
version: 2
build-deploy:
jobs:
- build:
filters:
tags:
only: /.*/
- deploy:
requires:
- build
filters:
tags:
only: /.*/
branches:
only: master

Просмотреть файл

@ -1,194 +0,0 @@
parserOptions:
ecmaVersion: 2018
plugins:
- fxa
extends:
- plugin:fxa/server
rules:
accessor-pairs: [ 2, { "setWithoutGet": true } ]
array-bracket-spacing: [ 2, "always" ]
array-callback-return: 0
arrow-body-style: 0
arrow-parens: 0
arrow-spacing: [ 2, { "before": true, "after": true } ]
block-scoped-var: 1
block-spacing: [ 2, "always" ]
brace-style: [ 2, "1tbs" ]
callback-return: 0
comma-spacing: [ 2, { "before": false, "after": true } ]
computed-property-spacing: [ 2, "never" ]
consistent-this: 0
constructor-super: 0
curly: [ 2, "all" ]
default-case: 0
dot-location: [ 2, "property" ]
dot-notation: [ 2, { "allowKeywords": true } ]
eqeqeq: 2
func-names: 0
func-style: [ 2, "declaration" ]
generator-star-spacing: [ 2, "after" ]
global-require: 0
guard-for-in: 2
handle-callback-err: [ 2, "error" ]
id-blacklist: 0
id-length: 0
id-match: 0
indent: [ 2, 2, { "SwitchCase": 1 } ]
init-declarations: 0
jsx-quotes: 0
key-spacing: [ 2, { "beforeColon": false, "afterColon": true } ]
keyword-spacing: [ 2, { "before": true, "after": true } ]
linebreak-style: [ 2, "unix" ]
lines-around-comment: 0
max-depth: [ 1, 4 ]
max-nested-callbacks: [ 1, 3 ]
max-params: 0
max-statements: 0
new-cap: 2
new-parens: 2
newline-after-var: 0
newline-before-return: 0
newline-per-chained-call: 1
no-alert: 2
no-array-constructor: 2
no-bitwise: 1
no-caller: 2
no-case-declarations: 2
no-catch-shadow: 2
no-class-assign: 2
no-cond-assign: [ 2, "always" ]
no-confusing-arrow: 0
no-console: 0
no-constant-condition: 2
no-const-assign: 2
no-continue: 0
no-control-regex: 2
no-delete-var: 2
no-div-regex: 0
no-dupe-args: 2
no-dupe-class-members: 2
no-dupe-keys: 2
no-duplicate-case: 2
no-else-return: 2
no-empty: 2
no-empty-character-class: 2
no-empty-function: 0
no-empty-pattern: 2
no-eq-null: 2
no-ex-assign: 2
no-extend-native: 2
no-extra-bind: 2
no-extra-boolean-cast: 2
no-extra-label: 2
no-extra-parens: [ 2, "all", { "nestedBinaryExpressions": false } ]
no-extra-semi: 2
no-fallthrough: 1
no-floating-decimal: 2
no-func-assign: 2
no-implicit-coercion: 0
no-implicit-globals: 0
no-implied-eval: 2
no-inline-comments: 2
no-inner-declarations: [ 2, "both" ]
no-invalid-regexp: 2
no-invalid-this: 0
no-iterator: 2
no-labels: 2
no-label-var: 2
no-lonely-if: 2
no-lone-blocks: 1
no-loop-func: 2
no-magic-numbers: 0
no-mixed-requires: 0
no-mixed-spaces-and-tabs: 2
no-multiple-empty-lines: [ 2, { "max": 1, "maxEOF": 1, "maxBOF": 0 } ]
no-multi-spaces: 2
no-multi-str: 2
no-native-reassign: 2
no-negated-condition: 2
no-negated-in-lhs: 2
no-nested-ternary: 2
no-new-func: 2
no-new-object: 2
no-new-require: 2
no-new-symbol: 2
no-new-wrappers: 2
no-obj-calls: 2
no-octal: 2
no-octal-escape: 2
no-param-reassign: 0
no-path-concat: 2
no-plusplus: 0
no-process-env: 0
no-proto: 2
no-redeclare: [ 2, { "builtinGlobals": true } ]
no-regex-spaces: 2
no-restricted-globals: 2
no-restricted-imports: 0
no-restricted-modules: 0
no-restricted-syntax: 0
no-return-assign: 0
no-self-assign: 2
no-self-compare: 2
no-shadow: [ 2, { "hoist": "all" } ]
no-shadow-restricted-names: 2
no-spaced-func: 2
no-sparse-arrays: 2
no-sync: 0
no-this-before-super: 2
no-throw-literal: 1
no-undef-init: 2
no-undefined: 0
no-underscore-dangle: 2
no-unexpected-multiline: 2
no-unmodified-loop-condition: 2
no-unneeded-ternary: 2
no-unreachable: 2
no-unused-expressions: 2
no-unused-labels: 2
no-unused-vars: [ 2, "all" ]
no-useless-call: 2
no-useless-concat: 2
no-useless-constructor: 2
no-use-before-define: 0
no-var: 1
no-void: 2
no-warning-comments: 0
no-whitespace-before-property: 2
object-curly-spacing: [ 2, "always" ]
object-shorthand: [ 2, "always" ]
one-var: 0
one-var-declaration-per-line: 0
operator-assignment: [ 2, "always" ]
operator-linebreak: [ 2, "after" ]
padded-blocks: [ 2, "never" ]
prefer-arrow-callback: 2
prefer-const: 2
prefer-reflect: 0
prefer-rest-params: 2
prefer-spread: 2
prefer-template: 2
quote-props: [ 2, "as-needed" ]
radix: 0
require-jsdoc: 0
require-yield: 2
semi-spacing: 0
sort-imports: 0
sort-vars: 0
space-before-blocks: [ 2, "always" ]
space-before-function-paren: [ 2, { "anonymous": "always", "named": "always" } ]
space-infix-ops: 2
space-unary-ops: 0
spaced-comment: 0
strict: [ 2, "global" ]
template-curly-spacing: [ 2, "never" ]
use-isnan: 2
valid-jsdoc: 0
vars-on-top: 0
wrap-iife: 2
wrap-regex: 0
yield-star-spacing: [ 2, { "before": false, "after": true } ]
yoda: [ 2, "never" ]

Просмотреть файл

@ -1 +0,0 @@
2018-02-14

Просмотреть файл

@ -1 +0,0 @@
2018-02-22

Просмотреть файл

@ -1,12 +0,0 @@
language: node_js
node_js:
- "8"
branches:
only:
- master
before_install:
- sudo apt-get update -qq
- sudo apt-get install libboost-all-dev
script:
- npm run lint

Просмотреть файл

@ -1,15 +0,0 @@
# Community Participation Guidelines
This repository is governed by Mozilla's code of conduct and etiquette guidelines.
For more details, please read the
[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/).
## How to Report
For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page.
<!--
## Project Specific Etiquette
In some cases, there will be additional project etiquette i.e.: (https://bugzilla.mozilla.org/page.cgi?id=etiquette.html).
Please update for your project.
-->

Просмотреть файл

@ -1,15 +0,0 @@
FROM amazonlinux:latest
RUN yum install -y gcc zlib zlib-devel openssl openssl-devel wget zip && \
wget https://www.python.org/ftp/python/2.7.13/Python-2.7.13.tgz && \
tar -xzvf Python-2.7.13.tgz && \
cd Python-2.7.13 && ./configure && make && make install
RUN python -m ensurepip --upgrade
RUN mkdir /app
WORKDIR /app
COPY requirements.txt /app/
RUN pip install -r requirements.txt -t lambda_package
COPY amplitude.py /app/
RUN zip lambda *.py && \
cd lambda_package && zip -r ../lambda.zip .

Просмотреть файл

@ -1,17 +0,0 @@
FROM node:8-stretch AS node-builder
RUN apt-get update && \
apt-get install -y bison cmake flex libboost-dev libboost-filesystem-dev libboost-regex-dev libboost-system-dev
USER node
RUN mkdir /home/node/fxa-amplitude-send
WORKDIR /home/node/fxa-amplitude-send
COPY package*.json ./
RUN npm install
FROM node:8-stretch-slim
RUN apt-get update && \
apt-get install -y libboost-regex-dev
USER node
RUN mkdir /home/node/fxa-amplitude-send
WORKDIR /home/node/fxa-amplitude-send
COPY --chown=node:node --from=node-builder /home/node/fxa-amplitude-send .
COPY --chown=node:node . .

Просмотреть файл

@ -1,373 +0,0 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

Просмотреть файл

@ -1,22 +0,0 @@
SYSTEMPYTHON = `which python2 python | head -n 1`
VIRTUALENV = virtualenv --python=$(SYSTEMPYTHON)
ENV = ./build
PIP_INSTALL = $(ENV)/bin/pip install
DEPS = $(ENV)/.done
.PHONY: test package
package:
docker build . -t fxa-amplitude-send:latest
$(eval CONTAINER_ID := $(shell docker create fxa-amplitude-send:latest))
docker cp $(CONTAINER_ID):/app/lambda.zip .
docker rm $(CONTAINER_ID)
test: $(DEPS)
./test.sh
$(DEPS):
$(VIRTUALENV) --no-site-packages $(ENV)
$(PIP_INSTALL) -r requirements.txt
touch $(DEPS)

Просмотреть файл

@ -1,257 +0,0 @@
from base64 import b64decode
from Queue import Queue
import boto3
import hashlib
import hmac
import json
import os
import requests
import sys
import threading
import zlib
AMPLITUDE_API_KEY = os.environ["FXA_AMPLITUDE_API_KEY"]
HMAC_KEY = os.environ["FXA_AMPLITUDE_HMAC_KEY"]
THREAD_COUNT = int(os.environ["FXA_AMPLITUDE_THREAD_COUNT"])
# For crude pre-emptive rate-limit obedience.
MAX_EVENTS_PER_BATCH = 10
MAX_BATCHES_PER_SECOND = 100
IDENTIFY_VERBS = ("$set", "$setOnce", "$add", "$append", "$unset")
# Cargo-culted from the internet. zlib >= 1.2.3.5 apparently supports
# specifying wbits=0 but that didn't work for me locally. This did.
ZLIB_WBITS = 32 + zlib.MAX_WBITS
def kms_decrypt_env(key):
"""Decrypt environment variable"""
return kms_decrypt(b64decode(os.environ[key]))
def kms_decrypt(encrypted_data):
"""Decrypt KMS variables"""
res = boto3.client("kms").decrypt(
CiphertextBlob=encrypted_data,
)
return res["Plaintext"].decode("utf-8")
if "LAMBDA_TASK_ROOT" in os.environ:
AMPLITUDE_API_KEY = str(kms_decrypt_env("FXA_AMPLITUDE_API_KEY"))
HMAC_KEY = str(kms_decrypt_env("FXA_AMPLITUDE_HMAC_KEY"))
def handle (message, context):
# http://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
if (type(message) is str):
message = json.loads(message)
records = message["Records"]
for record in records:
if record["eventSource"] != "aws:s3":
continue
print record["s3"]["bucket"]["name"], record["s3"]["object"]["key"]
s3 = boto3.resource("s3", region_name=record["awsRegion"])
s3_object = s3.Object(record["s3"]["bucket"]["name"], record["s3"]["object"]["key"])
with SenderThreadPool() as pool:
# This will fail if the data is not compressed.
process_compressed(pool, s3_object.get()["Body"].read())
def process_compressed (pool, data):
events = ""
batches = None
for chunk in decompress(data):
events += chunk
partitioned_events = partition_available_events(events)
if is_partitioned(partitioned_events):
events = partitioned_events[2]
batches = process(pool, partitioned_events[0], batches, False)
process(pool, events, batches)
def decompress (data):
decompressor = zlib.decompressobj(ZLIB_WBITS)
for chunk in data:
decompressed = decompressor.decompress(chunk)
if decompressed:
yield decompressed
remaining = decompressor.flush()
if len(remaining) > 0:
yield remaining
def partition_available_events (events):
partitioned_events = events.rpartition("\n")
if not is_partitioned(partitioned_events):
partitioned_events = events.rpartition("\r")
return partitioned_events
def is_partitioned (partition):
return partition[1] != ""
def process (pool, events, batches = None, is_last_call = True):
if batches is None:
batches = {"identify": [], "event": []}
for event_string in events.splitlines():
event = json.loads(event_string)
if "Fields" in event:
# Auth server events are wrapped inside a `Fields` property.
event = event["Fields"]
if "op" in event and "data" in event:
# Mailer events have an extra layer of indirection.
event = json.loads(event["data"])
else:
# Non-mailer events have stringified `event_properties` and `user_properties`.
if "event_properties" in event:
event["event_properties"] = json.loads(event["event_properties"])
if "user_properties" in event:
event["user_properties"] = json.loads(event["user_properties"])
if not is_event_ok(event):
print "skipping malformed event", event
continue
user_id = device_id = None
insert_id_hmac = hmac.new(HMAC_KEY, digestmod=hashlib.sha256)
if "user_id" in event:
user_id_hmac = hmac.new(HMAC_KEY, event["user_id"], hashlib.sha256)
user_id = event["user_id"] = user_id_hmac.hexdigest()
insert_id_hmac.update(user_id)
if "device_id" in event:
device_id = event["device_id"]
insert_id_hmac.update(device_id)
if "session_id" in event:
insert_id_hmac.update(str(event["session_id"]))
insert_id_hmac.update(event["event_type"])
insert_id_hmac.update(str(event["time"]))
event["insert_id"] = insert_id_hmac.hexdigest()
if contains_identify_verbs(event["user_properties"]):
result = process_identify_verbs(event["user_properties"])
batches["identify"].append({"user_id": user_id, "device_id": device_id,
"user_properties": result["identify"]})
event["user_properties"] = result["pruned"]
batches["event"].append(event)
if len(batches["event"]) == MAX_EVENTS_PER_BATCH:
pool.send(batches)
batches = {"identify": [], "event": []}
if not is_last_call:
return batches
if len(batches["event"]) > 0:
send(batches)
def is_event_ok (event):
# https://amplitude.zendesk.com/hc/en-us/articles/204771828#keys-for-the-event-argument
return ("device_id" in event or "user_id" in event) and "event_type" in event and "time" in event
def contains_identify_verbs (user_properties):
return reduce(lambda contains, verb: contains or verb in user_properties, IDENTIFY_VERBS, False)
def process_identify_verbs (user_properties):
def split (payloads, key):
payloads["identify" if key in IDENTIFY_VERBS else "pruned"][key] = user_properties[key]
return payloads
return reduce(split, user_properties.keys(), {"identify": {}, "pruned": {}})
def send (batches):
if len(batches["identify"]) > 0:
# https://amplitude.zendesk.com/hc/en-us/articles/205406617-Identify-API-Modify-User-Properties#request-format
response = requests.post("https://api.amplitude.com/identify",
data={"api_key": AMPLITUDE_API_KEY, "identification": json.dumps(batches["identify"])})
response.raise_for_status()
# https://amplitude.zendesk.com/hc/en-us/articles/204771828#request-format
response = requests.post("https://api.amplitude.com/httpapi",
data={"api_key": AMPLITUDE_API_KEY, "event": json.dumps(batches["event"])})
# For want of a better error-handling mechanism,
# one failed request fails an entire dump from S3.
response.raise_for_status()
class SenderThreadPool:
"""A simple single-producer multi-consumer thread pool to send batches.
This class manages a pool of background threads to send event batches.
Use it like so:
with SenderThreadPool() as p:
for batches in do_some_stuff_to_generate_batches():
p.send(batches)
The call to send() will push the batch onto an internal queue where it
will get picked up async by the worker threads. The `with` statement
will join all threads before exiting, to ensure that the send gets
completed.
"""
def __init__(self):
self._queue = Queue()
self._threads = []
self._err = None
def __enter__(self):
for _ in xrange(THREAD_COUNT):
t = threading.Thread(target=self._worker_thread)
self._threads.append(t)
t.start()
return self
def __exit__(self, exc_typ, exc_val, exc_tb):
# Push a sentinel so each thread will shut down cleanly.
for t in self._threads:
self._queue.put(None)
# Wait for all the threads to shut down.
for t in self._threads:
t.join()
# If we're existing successfully, but there was an error
# in one of the worker threads, raise it now.
if exc_typ is None and self._err is not None:
raise self._err
def send(self, batches):
# If one of the worker threads raised an error,
# re-raise it in the main thread.
if self._err is not None:
raise self._err
self._queue.put(batches)
def _worker_thread(self):
try:
batches = self._queue.get()
while batches is not None:
send(batches)
batches = self._queue.get()
except Exception as err:
self._err = err
if __name__ == "__main__":
argc = len(sys.argv)
if argc == 1:
with SenderThreadPool() as pool:
process(pool, sys.stdin.read())
elif argc == 2:
with SenderThreadPool() as pool:
with open(sys.argv[1]) as f:
process_compressed(pool, f)
else:
sys.exit("Usage: {} <path-to-gzipped-log-file>\nOR pipe uncompressed logs via stdin".format(sys.argv[0]))

Просмотреть файл

@ -1,22 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
const fs = require('fs')
const marketing = require('../marketing')
if (process.argv.length !== 3) {
console.error(`Usage: ${process.argv[1]} file`)
process.exit(1)
}
main()
async function main () {
const local_file_stream = fs.createReadStream(process.argv[2])
await marketing.processStream(local_file_stream)
}

Просмотреть файл

@ -1,57 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
const AWS = require('aws-sdk')
const S3 = new AWS.S3({})
const SQS = new AWS.SQS({})
const marketing = require('../marketing')
const queue_url = process.env.SQS_QUEUE_URL
main()
async function main () {
console.log(`Fetching message from ${queue_url}`)
const messages = await SQS.receiveMessage({
MaxNumberOfMessages: 1,
QueueUrl: queue_url,
WaitTimeSeconds: 20
}).promise()
if (! messages.Messages) {
return console.log('No messages in queue')
}
for (const message of messages.Messages) {
const receipt_handle = message.ReceiptHandle
const s3_notification = JSON.parse(message.Body)
if (! s3_notification.Records) {
break
}
for (const s3_object of s3_notification.Records) {
const s3_bucket = s3_object.s3.bucket.name
const s3_key = s3_object.s3.object.key
console.log(`Fetching file from s3://${s3_bucket}/${s3_key}`)
const remote_file_stream = S3.getObject({
Bucket: s3_bucket,
Key: s3_key
}).createReadStream()
const eventCount = await marketing.processStream(remote_file_stream)
console.log(`Done sending ${eventCount} events to Amplitude`)
const delete_response = await SQS.deleteMessage({
QueueUrl: queue_url,
ReceiptHandle: receipt_handle
}).promise()
console.log(delete_response)
}
}
}

Просмотреть файл

@ -1,97 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
const aws = require('aws-sdk')
const Promise = require('bluebird')
const { SQS_QUEUE_URL: QUEUE_URL } = process.env
const IMPL_PATTERN = /^(?:events|summary)$/
const [ IMPL, PATH ] = parseArgs(process.argv)
const impl = require(`../sync-${IMPL}`)
run()
function parseArgs (argv) {
const argc = argv.length
if (argc < 3 || argc > 4 || ! IMPL_PATTERN.test(argv[2])) {
console.error(`Usage: ${argv[1]} events|summary [path]`)
console.error('Specify either `events` or `summary` to indicate which Sync import job you wish to run.')
console.error('[path] may be on the file system or S3. S3 paths must be prefixed with `s3://`.')
console.error('Omit [path] to process messages from SQS.')
process.exit(1)
}
[ 'FXA_AMPLITUDE_API_KEY', 'SYNC_INSERTID_HMAC_KEY' ].forEach(key => {
if (! process.env[key]) {
console.error(`Error: You must set the ${key} environment variable`)
process.exit(1)
}
})
if (argc === 3 && ! QUEUE_URL) {
console.error('Error: You must set the SQS_QUEUE_URL environment variable')
process.exit(1)
}
return [ argv[2], argv[3] ]
}
async function run () {
try {
let paths
if (PATH) {
paths = [ PATH ]
} else {
paths = await processQueue()
}
paths.forEach(path => impl.run(path))
} catch (error) {
console.error(error.stack)
process.exit(1)
}
}
async function processQueue () {
console.log(`Fetching message from ${QUEUE_URL}`)
const sqs = new aws.SQS({})
const { Messages: messages } = await sqs.receiveMessage({
MaxNumberOfMessages: 10,
QueueUrl: QUEUE_URL,
WaitTimeSeconds: 20
}).promise()
if (! messages) {
console.log('Empty queue')
return []
}
return Promise.all(messages.reduce((paths, message) => {
const { records } = JSON.parse(message.Body)
if (records) {
records.forEach(record => {
const { s3 } = record
if (s3) {
paths.push(`s3://${s3.bucket.name}/${s3.object.key}`)
}
})
}
sqs.deleteMessage({
QueueUrl: QUEUE_URL,
ReceiptHandle: message.ReceiptHandle
})
return paths
}, []))
}

Просмотреть файл

@ -1,237 +0,0 @@
#!/usr/bin/env node
'use strict'
const fs = require('fs')
const path = require('path')
const zlib = require('zlib')
const TIME_FORMAT = /(201[78])-([0-9]{2})-([0-9]{2})-([0-9]{2})-([0-9]{2})/
const CATEGORY_FORMAT = /logging\.s3\.fxa\.([a-z]+)_server/
const VERBOSE = false
const args = process.argv.slice(2)
const fileNames = args.map(dir => fs.readdirSync(dir).map((file) => path.join(dir, file)))
.reduce((result, fileName) => result.concat(fileName), [])
const missingUserAndDeviceAndSessionIds = createStat()
const missingUserAndDeviceIds = createStat()
const missingUserAndSessionIds = createStat()
const missingDeviceAndSessionIds = createStat()
const missingUserIds = createStat()
const missingDeviceIds = createStat()
const missingSessionIds = createStat()
const futureSessionIds = createStat()
const futureTimes = createStat()
const users = new Map()
const devices = new Map()
const events = fileNames.reduce((previousEvents, fileName) => {
let time = TIME_FORMAT.exec(fileName)
if (! time) {
return
}
time = time.slice(1)
let category = CATEGORY_FORMAT.exec(fileName)
if (! category) {
return
}
category = category[1]
if (! previousEvents[category]) {
return
}
const target = timestamp(time)
const isContentServerEvent = category === 'content'
const fileBuffer = fs.readFileSync(fileName)
let text
if (fileBuffer[0] === 0x1f && fileBuffer[1] === 0x8b && fileBuffer[2] === 0x8) {
text = zlib.gunzipSync(fileBuffer).toString('utf8')
} else {
text = fileBuffer.toString('utf8')
}
const lines = text.split('\n')
const data = lines
.filter(line => line.indexOf('amplitudeEvent') !== -1)
.map((line, index) => {
let event
try {
event = JSON.parse(line)
if (event.Fields) {
event = event.Fields
}
} catch (_) {
event = {}
}
const datum = {
file: fileName,
line: index + 1,
event
}
const uid = event.user_id
const deviceId = event.device_id
const sessionId = event.session_id
if (! uid) {
if (! deviceId) {
if (sessionId) {
missingUserAndDeviceIds[category].push(datum)
} else {
missingUserAndDeviceAndSessionIds[category].push(datum)
}
} else if (sessionId) {
missingUserIds[category].push(datum)
} else {
missingUserAndSessionIds[category].push(datum)
}
} else if (! deviceId) {
if (sessionId) {
missingDeviceIds[category].push(datum)
} else {
missingDeviceAndSessionIds[category].push(datum)
}
} else if (! sessionId) {
missingSessionIds[category].push(datum)
}
if (sessionId > target) {
futureSessionIds[category].push(datum)
}
if (event.time > target) {
futureTimes[category].push(datum)
}
if (isContentServerEvent && uid && deviceId && sessionId) {
const user = getUser(uid)
multiMapSet(user.deviceSessions, deviceId, sessionId)
multiMapSet(user.sessionDevices, sessionId, deviceId)
users.set(uid, user)
const device = getDevice(deviceId)
multiMapSet(device.sessionUsers, sessionId, uid)
devices.set(deviceId, device)
}
return datum
})
previousEvents[category] = previousEvents[category].concat(data)
return previousEvents
}, createStat())
displayStat(events, 'EVENTS')
displayStatVerbose(missingUserAndDeviceAndSessionIds, 'MISSING user_id AND device_id AND session_id')
displayStatVerbose(missingUserAndDeviceIds, 'MISSING user_id AND device_id')
displayStatVerbose(missingUserAndSessionIds, 'MISSING user_id AND session_id')
displayStatVerbose(missingDeviceAndSessionIds, 'MISSING device_id AND session_id')
displayStatVerbose(missingUserIds, 'MISSING user_id')
displayStatVerbose(missingDeviceIds, 'MISSING device_id')
displayStatVerbose(missingSessionIds, 'MISSING session_id')
displayStatVerbose(futureSessionIds, 'FUTURE session_id')
displayStatVerbose(futureTimes, 'FUTURE time')
const conflictingUserIds = []
const conflictingDeviceIds = []
const conflictingSessionIds = []
events.auth.forEach(datum => {
const event = datum.event
const uid = event.user_id
const deviceId = event.device_id
const sessionId = event.session_id
const user = getUser(uid)
const device = getDevice(deviceId)
optionallySetConflict(conflictingSessionIds, datum, user.deviceSessions, deviceId, sessionId)
optionallySetConflict(conflictingDeviceIds, datum, user.sessionDevices, sessionId, deviceId)
optionallySetConflict(conflictingUserIds, datum, device.sessionUsers, sessionId, uid)
})
displayConflict('user_id', conflictingUserIds)
displayConflict('device_id', conflictingDeviceIds)
displayConflict('session_id', conflictingSessionIds)
function createStat () {
return {
content: [],
auth: []
}
}
function timestamp (time) {
return Date.parse(`${time[0]}-${time[1]}-${time[2]}T${time[3]}:${time[4]}:59.999`)
}
function getUser (uid) {
return users.get(uid) || {
deviceSessions: new Map(),
sessionDevices: new Map()
}
}
function getDevice (deviceId) {
return devices.get(deviceId) || {
sessionUsers: new Map()
}
}
function multiMapSet (map, key, value) {
const set = map.get(key) || new Set()
set.add(value)
map.set(key, set)
}
function displayStat (stat, description) {
const categories = Object.keys(stat).map(key => ({
category: key,
count: stat[key].length,
percentage: Math.round(stat[key].length / events[key].length * 100)
}))
const count = categories.reduce((sum, item) => sum + item.count, 0)
const eventCount = Object.keys(events).reduce((sum, key) => sum + events[key].length, 0)
const percentage = Math.round(count / eventCount * 100)
console.log(`${description}: ${count} (${percentage}%)`)
categories.forEach(item => console.log(` ${item.category}: ${item.count} (${item.percentage}%)`))
}
function displayStatVerbose (stat, description) {
displayStat(stat, description)
if (VERBOSE) {
Object.keys(stat).forEach(key => stat[key].forEach(datum => console.log(datum)))
}
}
function optionallySetConflict (conflicts, datum, map, key, value) {
const set = map.get(key)
if (set && ! set.has(value)) {
conflicts.push(datum)
}
}
function displayConflict (property, conflicts) {
const count = conflicts.length
const percentage = Math.round(count / events.auth.length * 100)
console.log(`CONFLICTING ${property}: ${count} (${percentage}%)`)
if (VERBOSE) {
conflicts.forEach(datum => console.log(datum))
}
}

Просмотреть файл

@ -1,15 +0,0 @@
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","event_type":"fxa_login - view","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","entrypoint":"menupanel","service":"sync"},"user_properties":{"flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","event_type":"fxa_login - engage","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","entrypoint":"menupanel","service":"sync"},"user_properties":{"flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","event_type":"fxa_login - submit","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","entrypoint":"menupanel","service":"sync"},"user_properties":{"flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_login - success","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England","$append":{"fxa_services_used":"sync"}},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_login - email_confirmed","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England","$append":{"fxa_services_used":"sync"}},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_activity - cert_signed","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England","$append":{"fxa_services_used":"sync"}},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_login - complete","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England","$append":{"fxa_services_used":"sync"}},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_pref - logout","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","event_type":"fxa_login - view","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","entrypoint":"menupanel","service":"sync"},"user_properties":{"flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","event_type":"fxa_login - engage","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","entrypoint":"menupanel","service":"sync"},"user_properties":{"flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","event_type":"fxa_login - submit","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","entrypoint":"menupanel","service":"sync"},"user_properties":{"flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_login - success","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_login - email_confirmed","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_activity - cert_signed","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England"},"app_version":"96","language":"en-GB"}
{"time":__TIME__,"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","user_id":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","event_type":"fxa_login - complete","session_id":__SESSION__,"event_properties":{"device_id":"deadbeefdeadbeefdeadbeefdeadbeef","service":"sync"},"user_properties":{"fxa_uid":"baadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfoodbaadfood","flow_id":"cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd","ua_browser":"Firefox","ua_version":"57","ua_os":"Linux","user_country":"United Kingdom","user_locale":"en-GB","user_state":"England"},"app_version":"96","language":"en-GB"}

Просмотреть файл

@ -1,141 +0,0 @@
'use strict'
const async = require('async')
const AutoDetectDecoderStream = require('autodetect-decoder-stream')
const crypto = require('crypto')
const csv = require('csv-parser')
const { lookup } = require('lookup-dns-cache')
const moment = require('moment-timezone')
const Promise = require('bluebird')
const request = require('request-promise')
const EVENT = /^mktg-([a-z]+-[a-z]+)$/
const MAX_EVENTS_PER_BATCH = 10
const HMAC_KEY = process.env.FXA_AMPLITUDE_HMAC_KEY
const API_KEY = process.env.FXA_AMPLITUDE_API_KEY
const WORKERS = process.env.FXA_AMPLITUDE_WORKERS || 8
if (! HMAC_KEY || ! API_KEY) {
console.error('Error: You must set FXA_AMPLITUDE_HMAC_KEY and FXA_AMPLITUDE_API_KEY environment variables')
process.exit(1)
}
module.exports.processStream = function processStream (stream) {
let eventCount = 0
const cargo = async.cargo(async tasks => await send(tasks), MAX_EVENTS_PER_BATCH)
cargo.concurrency = WORKERS
return new Promise(resolve => {
cargo.drain = () => {
resolve(eventCount)
}
stream
.pipe(new AutoDetectDecoderStream())
.pipe(csv())
.on('data', async (row) => {
const event = createEvent(row)
if (! event) {
return
}
cargo.push(event)
})
})
async function send (localBatch) {
const body = await sendBatch(localBatch)
if (body === 'success') {
eventCount += localBatch.length
} else {
console.log(body)
}
}
}
function createEvent (row) {
const eventType = getEventType(row)
if (! eventType) {
return
}
const time = getTime(row)
if (! time || time < 0) {
return
}
const user_id = row.FXA_ID
const email_id = row.EmailName
return {
event_type: `mktg - ${eventType}`,
time,
user_id: hash(user_id),
session_id: -1,
insert_id: hash(user_id, eventType, time, email_id),
event_properties: getEventProperties(row)
}
}
function getEventType (row) {
const eventParts = EVENT.exec(row.Event)
if (eventParts && eventParts.length === 2) {
return eventParts[1].replace(/-/g, '_')
}
}
function getTime (row) {
const time = moment.tz(row.EventDate, 'MMM D YYYY H:mmA', 'America/Los_Angeles')
if (time.isValid()) {
return time.unix()
}
}
function hash (...properties) {
const hmac = crypto.createHmac('sha256', HMAC_KEY)
properties.forEach(property => hmac.update(`${property}`))
return hmac.digest('hex')
}
// Event properties are parsed from the EmailName column, which has
// a somewhat arcane format defined here:
//
// https://docs.google.com/spreadsheets/d/11rvrVdF4fj5GaKOvlnLcNjnWB7U7yKV_MHmlBwRE-WA/edit#gid=1626564614
//
// You can see how this regex breaks it down, using the examples from
// that spreadsheet, here:
//
// https://regex101.com/r/Ih5SL4/3/
const EVENT_PROPERTIES = /^([A-Za-z]+)_([A-Z]+)_[A-Z]*_[0-9]{4}_[A-Z]+_([A-Z]+|DESK[_ ][A-Z]+)_(.+?)_(ALL|[A-Z]{2})_([A-Z]{2,4})_([A-Z-]+)(?:_[A-Za-z0-9]*)?$/
function getEventProperties (row) {
const properties = EVENT_PROPERTIES.exec(row.EmailName)
if (properties && properties.length === 8) {
return {
email_sender: properties[1],
email_region: properties[2],
email_format: properties[3],
email_id: properties[4],
email_country: properties[5],
email_language: properties[6],
email_channel: properties[7]
}
}
}
function sendBatch (batch) {
return request('https://api.amplitude.com/httpapi', {
method: 'POST',
lookup,
formData: {
api_key: API_KEY,
event: JSON.stringify(batch)
}
})
}

1798
packages/fxa-amplitude-send/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,39 +0,0 @@
{
"name": "fxa-amplitude-send",
"version": "1.1.0",
"dependencies": {
"async": "^2.6.2",
"autodetect-decoder-stream": "^1.0.0",
"aws-sdk": "^2.270.1",
"bluebird": "^3.5.4",
"csv-parser": "^1.12.0",
"lookup-dns-cache": "^2.1.0",
"moment": "^2.22.2",
"moment-timezone": "^0.5.21",
"node-parquet": "^0.2.6",
"request": "^2.88.0",
"request-promise": "^4.2.4",
"s3": "^4.4.0"
},
"devDependencies": {
"eslint": "6.6.0",
"eslint-plugin-fxa": "2.0.1"
},
"engines": {
"node": ">=12"
},
"scripts": {
"build-node-docker-image": "docker build -f Dockerfile-nodejs -t mozilla/fxa-amplitude-send:`git describe`-node .",
"lint": "eslint *.js bin/*.js",
"audit": "echo 'dependency checking not yet enabled for fxa-amplitude-send'"
},
"repository": {
"type": "git",
"url": "git+https://github.com/mozilla/fxa-amplitude-send.git"
},
"license": "MPL-2.0",
"bugs": {
"url": "https://github.com/mozilla/fxa-amplitude-send/issues"
},
"homepage": "https://github.com/mozilla/fxa-amplitude-send"
}

Просмотреть файл

@ -1,13 +0,0 @@
FROM node:12 AS node-builder
USER node
RUN mkdir /home/node/fxa-amplitude-send
WORKDIR /home/node/fxa-amplitude-send
COPY package*.json ./
RUN npm install
FROM node:12-slim
USER node
RUN mkdir /home/node/fxa-amplitude-send
WORKDIR /home/node/fxa-amplitude-send
COPY --chown=node:node --from=node-builder /home/node/fxa-amplitude-send .
COPY --chown=node:node . .

Просмотреть файл

@ -1,315 +0,0 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, you can obtain one at https://mozilla.org/MPL/2.0/.
'use strict'
const async = require('async')
const crypto = require('crypto')
const is = require('check-types')
const { lookup } = require('lookup-dns-cache')
const { PubSub } = require('@google-cloud/pubsub')
const request = require('request-promise')
const { AMPLITUDE_API_KEY, HMAC_KEY, PUBSUB_PROJECT, PUBSUB_TOPIC, PUBSUB_SUBSCRIPTION } = process.env
if (! AMPLITUDE_API_KEY || ! HMAC_KEY || ! PUBSUB_PROJECT || ! PUBSUB_TOPIC || ! PUBSUB_SUBSCRIPTION) {
console.log(timestamp(), 'Error: You must set AMPLITUDE_API_KEY, HMAC_KEY, PUBSUB_PROJECT, PUBSUB_TOPIC and PUBSUB_SUBSCRIPTION environment variables')
process.exit(1)
}
const SECOND = 1000;
const MINUTE = SECOND * 60;
const HOUR = MINUTE * 60;
const DAY = HOUR * 24;
// If TIMEOUT_THRESHOLD milliseconds pass with no messages arriving, the script will abort
const TIMEOUT_THRESHOLD = parseInt(process.env.TIMEOUT_THRESHOLD || MINUTE);
// If a message older than WARNING_THRESHOLD milliseconds arrives, the script will log a warning
const WARNING_THRESHOLD = parseInt(process.env.WARNING_THRESHOLD || DAY * 3);
const IGNORED_EVENTS = new Map()
if (process.env.IGNORED_EVENTS) {
// process.env.IGNORED_EVENTS is a JSON object of event_type:criteria, e.g.:
//
// {
// "fxa_activity - access_token_checked": [
// {
// "event_properties": {
// "oauth_client_id": "deadbeef"
// }
// },
// {
// "event_properties": {
// "oauth_client_id": "baadf00d"
// }
// }
// ],
// "fxa_activity - access_token_created": [
// {
// "event_properties": {
// "oauth_client_id": "deadbeef"
// }
// },
// {
// "event_properties": {
// "oauth_client_id": "baadf00d"
// }
// }
// ]
// }
Object.entries(JSON.parse(process.env.IGNORED_EVENTS)).forEach(([ type, criteria ]) => {
IGNORED_EVENTS.set(type, criteria)
})
}
const ENDPOINTS = {
HTTP_API: 'https://api.amplitude.com/httpapi',
IDENTIFY_API: 'https://api.amplitude.com/identify',
}
const KEYS = {
HTTP_API: 'event',
IDENTIFY_API: 'identification',
}
const IDENTIFY_VERBS = [ '$set', '$setOnce', '$add', '$append', '$unset' ]
const IDENTIFY_VERBS_SET = new Set(IDENTIFY_VERBS)
const MAX_EVENTS_PER_BATCH = 10
const WORKER_COUNT = process.env.WORKER_COUNT ? parseInt(process.env.WORKER_COUNT) : 1
const MESSAGES = new Map()
main()
.catch(error => {
console.log(timestamp(), error.stack)
process.exit(1)
})
async function main () {
const pubsub = new PubSub({
projectId: PUBSUB_PROJECT
})
const topic = pubsub.topic(PUBSUB_TOPIC)
const subscraption = topic.subscription(PUBSUB_SUBSCRIPTION)
const [ exists ] = await subscraption.exists()
const [ subscription ] = await (exists ? subscraption.get(PUBSUB_SUBSCRIPTION) : subscraption.create(PUBSUB_SUBSCRIPTION))
const cargo = {
httpapi: setupCargo(ENDPOINTS.HTTP_API, KEYS.HTTP_API),
identify: setupCargo(ENDPOINTS.IDENTIFY_API, KEYS.IDENTIFY_API),
}
let timeout;
subscription.on('message', message => {
if (timeout) {
clearTimeout(timeout);
}
timeout = setTimeout(onTimeout, TIMEOUT_THRESHOLD);
processMessage(cargo, message)
})
subscription.on('error', error => {
console.log(timestamp(), error.stack)
})
subscription.on('close', () => {
console.log(timestamp(), 'Error: subscription closed')
process.exit(1)
})
}
function timestamp () {
return new Date().toISOString()
}
function setupCargo (endpoint, key) {
const cargo = async.cargo(async payload => {
try {
await sendPayload(payload, endpoint, key)
clearMessages(payload, message => message.ack())
console.log(timestamp(), 'Success!', endpoint, payload.length)
} catch (error) {
console.log(timestamp(), endpoint, error.stack)
clearMessages(payload, message => message.nack(), true)
}
}, MAX_EVENTS_PER_BATCH)
cargo.concurrency = WORKER_COUNT
return cargo
}
function processMessage (cargo, message) {
const { httpapi, identify } = parseMessage(message)
if (message.publishTime < Date.now() - WARNING_THRESHOLD) {
console.log(timestamp(), 'Warning: Old message', { httpapi, identify })
}
if (httpapi) {
MESSAGES.set(httpapi.insert_id, { message, payloadCount: identify ? 2 : 1 })
cargo.httpapi.push(httpapi)
}
if (identify) {
cargo.identify.push(identify)
}
}
function parseMessage (message) {
let { jsonPayload: event } = JSON.parse(Buffer.from(message.data, 'base64').toString())
if (event.Fields) {
event = event.Fields
if (event.op && event.data) {
event = JSON.parse(event.data)
} else {
if (is.nonEmptyString(event.event_properties)) {
event.event_properties = JSON.parse(event.event_properties)
}
if (is.nonEmptyString(event.user_properties)) {
event.user_properties = JSON.parse(event.user_properties)
}
}
}
if (isIgnoredEvent(event)) {
return {}
}
if (! isEventOk(event)) {
console.log(timestamp(), 'Warning: Skipping malformed event', event)
return {}
}
if (event.user_id) {
event.user_id = hash(event.user_id)
}
event.insert_id = hash(event.user_id, event.device_id, event.session_id, event.event_type, event.time)
let identify
if (IDENTIFY_VERBS.some(verb => is.assigned(event.user_properties[verb]))) {
identify = {
device_id: event.device_id,
user_id: event.user_id,
user_properties: splitIdentifyPayload(event.user_properties),
// _insert_id is only here so we can uniquely identify each payload and
// link it back to its message. It's not actually sent to Amplitude.
_insert_id: event.insert_id,
}
}
return {
httpapi: event,
identify,
}
}
function isIgnoredEvent (event) {
if (! IGNORED_EVENTS.has(event.event_type)) {
return false
}
const criteria = IGNORED_EVENTS.get(event.event_type)
if (Array.isArray(criteria)) {
return criteria.some(criterion => deepMatch(event, criterion))
}
return deepMatch(event, criteria)
}
function deepMatch (object, criteria) {
if (criteria === undefined) {
return true
}
return Object.entries(criteria).every(([ key, value ]) => {
if (typeof value === 'object') {
return deepMatch(object[key], value)
}
return object[key] === value
})
}
function isEventOk (event) {
return (
is.nonEmptyString(event.device_id) ||
is.nonEmptyString(event.user_id)
) &&
is.nonEmptyString(event.event_type) &&
is.positive(event.time)
}
function hash (...properties) {
const hmac = crypto.createHmac('sha256', HMAC_KEY)
properties.forEach(property => {
if (property) {
hmac.update(`${property}`)
}
})
return hmac.digest('hex')
}
function splitIdentifyPayload (properties) {
return Object.entries(properties).reduce((payload, [ key, value ]) => {
if (IDENTIFY_VERBS_SET.has(key)) {
payload[key] = value
properties[key] = undefined
}
return payload
}, {})
}
function sendPayload (payload, endpoint, key) {
return request(endpoint, {
method: 'POST',
lookup,
formData: {
api_key: AMPLITUDE_API_KEY,
[key]: JSON.stringify(payload.map(item => ({ ...item, _insert_id: undefined })))
},
timeout: 5 * 1000
})
}
function clearMessages (payload, action, forceAction = false) {
payload.forEach(event => {
// eslint-disable-next-line no-underscore-dangle
const id = event.insert_id || event._insert_id
const item = MESSAGES.get(id)
if (! item) {
// In this case the message has already been cleared due to an earlier failure
return
}
const { message, payloadCount } = item
if (! message) {
return
}
if (forceAction || payloadCount === 1) {
action(message)
MESSAGES.delete(id)
} else {
MESSAGES.set(id, { message, payloadCount: payloadCount - 1 })
}
})
}
function onTimeout () {
console.log(timestamp(), `Error: no messages received in ${TIMEOUT_THRESHOLD / SECOND} seconds`)
process.exit(1)
}

1711
packages/fxa-amplitude-send/pubsub/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,14 +0,0 @@
{
"name": "fxa-amplitude-pubsub",
"version": "0.0.0",
"dependencies": {
"@google-cloud/pubsub": "^0.29.1",
"async": "^2.6.2",
"check-types": "^8.0.2",
"lookup-dns-cache": "^2.1.0",
"request": "^2.88.0",
"request-promise": "^4.2.4"
},
"license": "MPL-2.0",
"private": true
}

Просмотреть файл

@ -1,2 +0,0 @@
boto3
requests

Просмотреть файл

@ -1,3 +0,0 @@
#!/bin/bash -ex
exit 0

Просмотреть файл

@ -1,3 +0,0 @@
#!/bin/bash -ex
exit 0

Просмотреть файл

@ -1,260 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
const assert = require('assert')
const crypto = require('crypto')
const fs = require('fs')
const { ParquetReader } = require('node-parquet')
const path = require('path')
const Promise = require('bluebird')
const request = require('request-promise')
const s3 = require('s3')
fs.unlinkAsync = Promise.promisify(fs.unlink)
const PARQUET_BATCH_SIZE = 16384
const AWS_ACCESS_KEY = process.env.FXA_AWS_ACCESS_KEY
const AWS_SECRET_KEY = process.env.FXA_AWS_SECRET_KEY
const MAX_EVENTS_PER_BATCH = 10
const API_KEY = process.env.FXA_AMPLITUDE_API_KEY
const S3_PATH = /^s3:\/\/([\w.-]+)\/(.+)$/
const AMPLITUDE_BACKOFF = 30000
const AMPLITUDE_RETRY_LIMIT = 3
module.exports = { run, hash, getOs }
function run (dataPath, impl) {
const { createEventCounts, createEvent } = impl
assertFunction(createEventCounts)
assertFunction(createEvent)
return Promise.resolve()
.then(() => {
const parts = S3_PATH.exec(dataPath)
if (parts && parts.length === 3) {
return processDataFromS3(parts[1], parts[2])
}
// HACK: Use a made-up submissionDate for local testing
const submissionDate = new Date()
.toJSON()
.split('T')[0]
return processData(readLocalData(dataPath), submissionDate)
})
.then(eventCounts => {
let sum = 0
Object.entries(eventCounts).forEach(entry => {
const [ key, eventCount ] = entry
console.log(`${key}: ${eventCount}`)
sum += eventCount
})
console.log('sum:', sum)
})
.catch(error => {
console.error(error)
process.exit(1)
})
function readLocalData (fileName) {
const reader = new ParquetReader(fileName)
const metadata = reader.info()
const schema = parseSchema(metadata.spark_schema)
return { count: metadata.rows, reader, schema }
}
function parseSchema (source, shift = 0) {
return Object.keys(source).reduce((target, key, index) => {
const column = source[key]
// HACK: I'm ignoring list types for now because so far they're always
// at the end of the schema and there's no data in them we want
if (! column.list && ! column.type) {
const nestedKeys = getInterestingKeys(column)
target[key] = nestedKeys.reduce((parentColumn, nestedKey, nestedIndex) => {
const nestedColumn = column[nestedKey]
if (nestedColumn.type) {
// This branch parses sync_summary's failure_reason and status columns
parentColumn[nestedKey] = index + shift
if (nestedIndex < nestedKeys.length - 1) {
shift += 1
}
} else {
// This branch parses sync_event's `event_map_values` key/value columns
Object.assign(parentColumn, parseSchema(nestedColumn, index + shift))
shift += getInterestingKeys(nestedColumn).length - 1
}
return parentColumn
}, {})
} else {
target[key] = index + shift
}
return target
}, {})
}
function getInterestingKeys (column) {
return Object.keys(column).filter(key => key !== 'optional')
}
function processData ({
count,
reader,
schema,
eventCounts = createEventCounts(),
index = 0
}, submissionDate) {
if (index >= count) {
reader.close()
return Promise.resolve(eventCounts)
}
const rows = reader.rows(PARQUET_BATCH_SIZE)
let batch = []
return rows.reduce(async (promise, row) => {
await promise
const event = createEvent(schema, row, submissionDate)
if (! event) {
eventCounts.skipped += 1
return
}
eventCounts[event.event_type.split(' ')[2]] += 1
batch.push(event)
if (batch.length < MAX_EVENTS_PER_BATCH) {
return
}
const localBatch = batch.slice()
batch = []
return sendBatch(localBatch)
}, Promise.resolve())
.then(() => {
if (batch.length > 0) {
return sendBatch(batch)
}
})
.then(() => processData({ count, reader, schema, eventCounts, index: index + PARQUET_BATCH_SIZE }))
}
async function sendBatch (batch, iteration = 0) {
try {
return await request('https://api.amplitude.com/httpapi', {
simple: true,
method: 'POST',
formData: {
api_key: API_KEY,
event: JSON.stringify(batch)
}
}).promise()
} catch (error) {
iteration += 1
if (iteration === AMPLITUDE_RETRY_LIMIT) {
throw error
}
if (error.statusCode === 429) {
return new Promise((resolve, reject) => {
setTimeout(() => {
sendBatch(batch, iteration)
.then(resolve, reject)
}, AMPLITUDE_BACKOFF)
})
}
return sendBatch(batch, iteration)
}
}
async function processDataFromS3 (bucket, key) {
const submissionDate = formatDate(key.split('=')[1].substr(0, 8))
const fileName = await downloadFileFromS3(bucket, key)
const data = readLocalData(fileName)
const [ eventCounts ] = await Promise.all([ processData(data, submissionDate), fs.unlinkAsync(fileName) ])
return eventCounts
}
function downloadFileFromS3 (bucket, key) {
return new Promise((resolve, reject) => {
const fileName = path.resolve(`${key.split('=')[1].replace('/', '-')}`)
const client = s3.createClient({
s3Options: {
accessKeyId: AWS_ACCESS_KEY,
secretKey: AWS_SECRET_KEY,
}
})
const emitter = client.downloadFile({
localFile: fileName,
s3Params: {
Bucket: bucket,
Key: key,
}
})
emitter.on('error', error => reject(error))
emitter.on('end', () => resolve(fileName))
})
}
}
function assertFunction (argument) {
assert.equal(typeof argument, 'function', 'Invalid argument `createEvent`')
}
function formatDate (date) {
return `${date.substr(0, 4)}-${date.substr(4, 2)}-${date.substr(6)}`
}
function hash (key, ...properties) {
const hmac = crypto.createHmac('sha256', key)
properties.forEach(property => hmac.update(`${property}`))
return hmac.digest('hex')
}
function getOs (deviceOsName, deviceOsVersion) {
if (! deviceOsName) {
return
}
switch (deviceOsName) {
case 'Windows_NT':
case 'WINNT':
return {
os_name: 'Windows',
os_version: deviceOsVersion
}
case 'Darwin':
return {
os_name: 'Mac OS X',
os_version: getMacOsVersion(deviceOsVersion)
}
default:
return {
os_name: deviceOsName,
os_version: deviceOsVersion
}
}
}
function getMacOsVersion (deviceOsVersion) {
const parts = deviceOsVersion.split('.')
if (parts.length < 2) {
return
}
const major = parseInt(parts[0])
const minor = parseInt(parts[1])
if (major >= 5 && minor >= 0) {
// https://en.wikipedia.org/wiki/Darwin_(operating_system)#Release_history
return `10.${major - 4}.${minor}`
}
}

Просмотреть файл

@ -1,6 +0,0 @@
#!/bin/sh
cd /home/ec2-user/sync-amplitude
. ./.env
/usr/local/bin/node sync-events
/usr/local/bin/node sync-summary

Просмотреть файл

@ -1,100 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
const common = require('./sync-common')
const HMAC_KEY = process.env.SYNC_INSERTID_HMAC_KEY
module.exports.run = path => common.run(path, { createEventCounts, createEvent })
function createEventCounts () {
return {
tab_sent: 0,
tab_received: 0,
repair_triggered: 0,
repair_success: 0,
skipped: 0
}
}
function createEvent (schema, row) {
const eventType = getEventType(row[schema.event_method], row[schema.event_object])
if (! eventType) {
return
}
// serverTime is not at all accurate as an event timing, but it's the best thing we have
const time = getServerTime(row[schema.event_map_values.key], row[schema.event_map_values.value])
if (! time || time < 0) {
return
}
const uid = row[schema.uid]
const syncFlowId = row[schema.event_flow_id]
const appName = row[schema.app_name]
const appVersion = row[schema.app_version]
return Object.assign({
event_type: `sync - ${eventType}`,
time,
// user_id is already hashed in Sync telemetry data
user_id: uid,
// TODO: include device_id when we have a plan for matching it to the other events
session_id: -1,
insert_id: common.hash(HMAC_KEY, uid, row[schema.device_id], syncFlowId, time, row[schema.event_timestamp], eventType),
app_version: appVersion,
language: row[schema.device_os_locale],
event_properties: {
ua_browser: appName,
ua_version: appVersion,
flow_id: syncFlowId
}
}, common.getOs(row[schema.device_os_name], row[schema.device_os_version]))
}
function getEventType (method, object) {
if (method === 'displayURI') {
return getSendTabEventType(object)
}
if (object === 'repair') {
return getRepairEventType(method)
}
}
function getSendTabEventType (object) {
switch (object) {
case 'sendcommand':
return 'tab_sent'
case 'processcommand':
return 'tab_received'
}
}
function getRepairEventType (method) {
switch (method) {
case 'started':
return 'repair_triggered'
case 'finished':
return 'repair_success'
}
}
function getServerTime (keys, values) {
let serverTimeIndex
if (keys.some((key, index) => {
if (key === 'serverTime') {
serverTimeIndex = index
return true
}
})) {
return parseInt(values[serverTimeIndex] * 1000)
}
}

Просмотреть файл

@ -1,66 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
const common = require('./sync-common')
const HMAC_KEY = process.env.SYNC_INSERTID_HMAC_KEY
module.exports.run = path => common.run(path, { createEventCounts, createEvent })
function createEventCounts () {
return {
sync_complete: 0,
skipped: 0
}
}
function createEvent (schema, row, submissionDate) {
const eventType = getEventType(schema, row)
if (! eventType) {
return
}
// submissionDate is not at all accurate as an event timing, but it's the best thing we have
const time = getSubmissionTime(submissionDate)
if (! time || time < 0) {
return
}
const uid = row[schema.uid]
const appVersion = row[schema.app_version]
return Object.assign({
event_type: `sync - ${eventType}`,
time,
// user_id is already hashed in Sync telemetry data
user_id: uid,
// TODO: include device_id when we have a plan for matching it to the other events
session_id: -1,
insert_id: common.hash(HMAC_KEY, uid, row[schema.device_id], time, row[schema.when], eventType),
app_version: appVersion,
language: row[schema.os_locale],
event_properties: {
ua_browser: row[schema.app_name],
ua_version: appVersion
}
}, common.getOs(row[schema.os], row[schema.os_version]))
}
function getEventType (schema, row) {
const status = {
sync: row[schema.status.sync],
service: row[schema.status.service]
}
if (! status.sync && ! status.service) {
return 'sync_complete'
}
}
function getSubmissionTime (submissionDate) {
return Date.parse(`${submissionDate}T12:00`)
}

Просмотреть файл

@ -1,22 +0,0 @@
#!/bin/sh
TIME=`expr $(date +%s) \* 1000 - 60000`
SESSION="$TIME"
while read -r EVENT || [[ -n "$EVENT" ]]; do
EVENT=`sed "s/__TIME__/$TIME/g" <<< "$EVENT"`
EVENT=`sed "s/__SESSION__/$SESSION/g" <<< "$EVENT"`
if [ "$EVENTS" = "" ]; then
EVENTS="$EVENT"
else
EVENTS=`printf "$EVENTS\n$EVENT"`
fi
TIME=`expr $TIME + 1000`
done < ./fixtures.txt
export FXA_AMPLITUDE_HMAC_KEY="wibble"
./build/bin/python amplitude.py <<< "$EVENTS"

Просмотреть файл

@ -1,8 +0,0 @@
#!/bin/sh
export FXA_AMPLITUDE_HMAC_KEY="wibble"
for FILE in data.stage/*.gz; do
./build/bin/python amplitude.py "$FILE"
done

7059
packages/fxa-auth-db-mysql/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -7,16 +7,12 @@
"type": "git",
"url": "https://github.com/mozilla/fxa.git"
},
"bin": {
"fxa-auth-db-mysql": "bin/db_patcher.js"
},
"bin": "bin/db_patcher.js",
"scripts": {
"outdated": "npm outdated --depth 0 || exit 0",
"lint": "npm-run-all --parallel lint:*",
"audit": "npm audit --json | audit-filter --nsp-config=.nsprc --audit=-",
"lint:eslint": "eslint .",
"prestart": "../../_scripts/check-mysql.sh && node ./bin/db_patcher.js >/dev/null",
"start": "pm2 start pm2.config.js",
"lint": "eslint .",
"start": "../../_scripts/check-mysql.sh && node ./bin/db_patcher.js >/dev/null && pm2 start pm2.config.js",
"stop": "pm2 stop pm2.config.js",
"restart": "pm2 restart pm2.config.js",
"test": "npm run test-mysql && npm run test-server && npm run lint",
@ -36,6 +32,7 @@
"bluebird": "3.5.2",
"convict": "4.4.0",
"fxa-jwtool": "0.7.2",
"fxa-shared": "workspace:*",
"ip": "1.1.5",
"mozlog": "2.2.0",
"mysql": "^2.17.1",
@ -47,14 +44,13 @@
"devDependencies": {
"audit-filter": "0.5.0",
"chai": "4.2.0",
"eslint": "6.6.0",
"eslint": "^6.8.0",
"eslint-plugin-fxa": "2.0.1",
"mocha": "^6.2.2",
"nock": "8.0.0",
"npm-run-all": "4.1.5",
"nyc": "^14.1.1",
"pm2": "^4.2.3",
"prettier": "1.18.2",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"proxyquire": "1.7.10",
"restify-clients": "^2.6.7",
"sinon": "^7.5.0"

Просмотреть файл

@ -12,6 +12,7 @@ module.exports = {
NODE_ENV: 'dev',
PORT: '8000',
},
filter_env: ['npm_'],
max_restarts: '2',
min_uptime: '2m',
},

Просмотреть файл

@ -10,16 +10,14 @@ const path = require('path');
const spawn = require('child_process').spawn;
const MOCHA_BIN = path.join(
path.dirname(__dirname),
'node_modules',
'.bin',
path.dirname(require.resolve('mocha')),
'bin',
'mocha'
);
const NYC_BIN = path.join(
path.dirname(__dirname),
'node_modules',
'.bin',
'nyc'
path.dirname(require.resolve('nyc')),
'bin',
'nyc.js'
);
const bin = NYC_BIN;

0
packages/fxa-auth-server/bin/key_server.js Normal file → Executable file
Просмотреть файл

16827
packages/fxa-auth-server/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -11,12 +11,10 @@
"scripts": {
"build": "NODE_ENV=dev npm run gen-keys && tsc",
"bump-template-versions": "node scripts/template-version-bump",
"lint": "npm-run-all --parallel lint:*",
"audit": "npm audit --json | audit-filter --nsp-config=.nsprc --audit=-",
"lint:eslint": "eslint .",
"lint": "eslint .",
"postinstall": "scripts/download_l10n.sh",
"prestart": "NODE_ENV=dev npm run gen-keys && ../../_scripts/check-mysql.sh",
"start": "pm2 start pm2.config.js",
"start": "NODE_ENV=dev npm run gen-keys && ../../_scripts/check-mysql.sh && pm2 start pm2.config.js",
"stop": "pm2 stop pm2.config.js",
"restart": "pm2 restart pm2.config.js",
"test": "VERIFIER_VERSION=0 scripts/test-local.sh",
@ -58,8 +56,10 @@
"commander": "2.18.0",
"convict": "4.3.1",
"email-addresses": "2.0.2",
"fxa-geodb": "workspace:*",
"fxa-jwtool": "0.7.2",
"fxa-notifier-aws": "1.0.0",
"fxa-shared": "workspace:*",
"google-libphonenumber": "^3.2.7",
"handlebars": "^4.5.3",
"hapi-error": "^2.3.0",
@ -75,7 +75,7 @@
"mozlog": "2.2.0",
"mysql": "2.15.0",
"mysql-patcher": "0.7.0",
"node-uap": "git+https://github.com/dannycoates/node-uap.git#96dc1f9f224422ec184395b6408cd1fc40ee452a",
"node-uap": "git://github.com/mozilla-fxa/node-uap.git#96dc1f9f224422ec184395b6408cd1fc40ee452a",
"node-zendesk": "^1.4.0",
"nodemailer": "2.7.2",
"otplib": "11.0.1",
@ -86,6 +86,7 @@
"punycode.js": "2.1.0",
"qrcode": "^1.4.4",
"request": "2.88.0",
"safe-regex": "^1.1.0",
"safe-url-assembler": "1.3.5",
"sandboxed-regexp": "^0.3.0",
"stripe": "^8.1.0",
@ -98,7 +99,7 @@
"devDependencies": {
"@types/chai": "^4.2.4",
"@types/convict": "^4.2.1",
"@types/hapi": "^18.0.3",
"@types/hapi__hapi": "^19.0.3",
"@types/ioredis": "^4.14.8",
"@types/joi": "^14.3.3",
"@types/jsonwebtoken": "^8.3.5",
@ -108,46 +109,47 @@
"@types/nock": "^11.1.0",
"@types/node": "^12.11.7",
"@types/request": "^2.48.3",
"@types/safe-regex": "1.1.2",
"@types/verror": "^1.10.3",
"acorn": "^5.7.3",
"audit-filter": "^0.5.0",
"binary-split": "0.1.2",
"chai": "4.1.2",
"eslint": "6.6.0",
"eslint": "^6.8.0",
"eslint-plugin-fxa": "2.0.1",
"fxa-geodb": "file:../fxa-geodb",
"fxa-shared": "file:../fxa-shared",
"grunt": "1.0.4",
"fxa-auth-db-mysql": "workspace:*",
"fxa-shared": "workspace:*",
"grunt": "^1.1.0",
"grunt-cli": "1.2.0",
"grunt-contrib-copy": "1.0.0",
"grunt-copyright": "0.3.0",
"grunt-eslint": "19.0.0",
"grunt-eslint": "^22.0.0",
"grunt-newer": "1.2.0",
"hawk": "7.0.7",
"jsxgettext-recursive-next": "1.1.0",
"jws": "3.1.5",
"keypair": "1.0.1",
"leftpad": "0.0.0",
"load-grunt-tasks": "3.5.2",
"load-grunt-tasks": "^5.1.0",
"lodash.chunk": "4.2.0",
"lodash.pick": "4.4.0",
"mailparser": "0.6.1",
"mkdirp": "0.5.1",
"mocha": "5.2.0",
"moment": "^2.24.0",
"nock": "10.0.2",
"nodemon": "^2.0.3",
"npm-run-all": "4.1.5",
"nyc": "^14.1.1",
"pm2": "^4.2.3",
"prettier": "^1.18.2",
"pm2": "^4.4.0",
"prettier": "^2.0.5",
"proxyquire": "2.0.0",
"read": "1.0.7",
"rimraf": "2.6.2",
"simplesmtp": "0.3.35",
"sinon": "7.0.0",
"through": "2.3.8",
"ts-node": "^8.8.1",
"typescript": "^3.6.4",
"ts-node": "^8.10.1",
"typescript": "3.8.3",
"ws": "5.2.2"
}
}

Просмотреть файл

@ -6,7 +6,7 @@ module.exports = {
apps: [
{
name: 'auth',
script: 'ts-node bin/key_server.js',
script: 'node -r ts-node/register bin/key_server.js',
cwd: __dirname,
env: {
DB: 'mysql',
@ -20,18 +20,20 @@ module.exports = {
CONFIG_FILES: 'config/secrets.json',
PORT: '9000',
},
filter_env: ['npm_'],
watch: ['bin', 'config', 'lib'],
max_restarts: '1',
min_uptime: '2m',
},
{
name: 'inbox',
script: 'ts-node test/mail_helper.js',
script: 'node -r ts-node/register test/mail_helper.js',
cwd: __dirname,
env: {
NODE_ENV: 'dev',
MAILER_PORT: '9001',
},
filter_env: ['npm_'],
max_restarts: '1',
min_uptime: '2m',
},

Просмотреть файл

@ -14,9 +14,9 @@ DOWNLOAD_PATH="https://github.com/mozilla/fxa-content-server-l10n.git"
# Download L10N using git
if [ ! -d "fxa-content-server-l10n" ]; then
echo "Downloading L10N files from $DOWNLOAD_PATH..."
git clone --depth=20 $DOWNLOAD_PATH
git clone --depth 1 $DOWNLOAD_PATH
fi
cd fxa-content-server-l10n
cd fxa-content-server-l10n || exit 1
echo "Updating L100N files"
git checkout -- .
git checkout $FXA_L10N_SHA

Просмотреть файл

@ -66,7 +66,9 @@ function addKeyProperties(key) {
console.log('Generating keypair');
cp.exec(
'openssl genrsa 2048 | ../node_modules/pem-jwk/bin/pem-jwk.js',
`openssl genrsa 2048 | ${require
.resolve('pem-jwk')
.replace('index.js', 'bin/pem-jwk.js')}`,
{
cwd: __dirname,
},

Просмотреть файл

@ -10,16 +10,14 @@ const path = require('path');
const spawn = require('child_process').spawn;
const MOCHA_BIN = path.join(
path.dirname(__dirname),
'node_modules',
'.bin',
path.dirname(require.resolve('mocha')),
'bin',
'mocha'
);
const NYC_BIN = path.join(
path.dirname(__dirname),
'node_modules',
'.bin',
'nyc'
path.dirname(require.resolve('nyc')),
'bin',
'nyc.js'
);
const bin = NYC_BIN;

Просмотреть файл

@ -1,15 +1,5 @@
#!/bin/bash -ex
DIR=$(dirname "$0")
cd "$DIR/../../../"
npx lerna bootstrap \
--scope fxa-shared \
--scope fxa-geodb \
--scope fxa-auth-db-mysql \
--scope fxa-auth-server \
--concurrency 2
cd packages/fxa-auth-server
npm run test-ci
yarn workspaces focus fxa-auth-server
yarn workspace fxa-shared run build
yarn run test-ci

Просмотреть файл

@ -16,8 +16,7 @@ npx ts-node ./scripts/gen_keys.js
npx ts-node ./scripts/gen_vapid_keys.js
npx ts-node ./scripts/oauth_gen_keys.js
node ../fxa-auth-db-mysql/bin/db_patcher > /dev/null
npm run lint:eslint
grunt copyright
npm run lint
GLOB=$*
if [ -z "$GLOB" ]; then

Просмотреть файл

@ -11,7 +11,7 @@ const error = require('../../../lib/error');
const getRoute = require('../../routes_helpers').getRoute;
const isA = require('@hapi/joi');
const mocks = require('../../mocks');
const moment = require('../../../../fxa-shared/node_modules/moment'); // Ensure consistency with production code
const moment = require('moment'); // Ensure consistency with production code
const P = require('../../../lib/promise');
const proxyquire = require('proxyquire');
const uuid = require('uuid');

Просмотреть файл

@ -7,7 +7,7 @@
const sinon = require('sinon');
const assert = { ...sinon.assert, ...require('chai').assert };
const mocks = require('../../../mocks');
const moment = require('../../../../../fxa-shared/node_modules/moment');
const moment = require('moment');
const EARLIEST_SANE_TIMESTAMP = 31536000000;

Просмотреть файл

@ -28,7 +28,7 @@ describe('the signing-key management scripts', function() {
return execFileSync(
process.execPath,
[
path.join(base, '../node_modules/.bin/ts-node'),
require.resolve('ts-node').replace('index.js', 'bin.js'),
'-P',
path.join(base, '../tsconfig.json'),
path.join(base, name),

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше