Merge branch 'main' into fix-fixed-exposures-dashbaard-calculation

This commit is contained in:
Florian Zia 2023-10-25 00:02:12 +02:00 коммит произвёл GitHub
Родитель bb9f235e1e a1a0ceca75
Коммит 5dab9ab1b4
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
52 изменённых файлов: 1481 добавлений и 770 удалений

Просмотреть файл

@ -2,7 +2,6 @@
# These are still used, but ignored for Prettier to avoid a big-bang reformatting.
# We can do such a reformatting later, or format them if/when we convert them to TypeScript:
src/utils/**/*.js
src/scripts/
src/e2e/**/*.js
src/e2e/**/*.json
src/db/**/*.js

Просмотреть файл

@ -8,6 +8,7 @@ import type { Preview } from "@storybook/react";
import { action } from "@storybook/addon-actions";
import { linkTo } from "@storybook/addon-links";
import "../src/app/globals.css";
import { SessionProvider } from "../src/contextProviders/session";
import { L10nProvider } from "../src/contextProviders/localization";
import { metropolis } from "../src/app/fonts/Metropolis/metropolis";
import { ReactAriaI18nProvider } from "../src/contextProviders/react-aria";
@ -16,7 +17,7 @@ import { getEnL10nBundlesSync } from "../src/app/functions/server/mockL10n";
const inter = Inter({ subsets: ["latin"], variable: "--font-inter" });
const AppDecorator: Exclude<Preview["decorators"], undefined>[0] = (
storyFn
storyFn,
) => {
const l10nBundles = getEnL10nBundlesSync();
@ -32,7 +33,9 @@ const AppDecorator: Exclude<Preview["decorators"], undefined>[0] = (
return (
<L10nProvider bundleSources={l10nBundles}>
<ReactAriaI18nProvider locale="en">{storyFn()}</ReactAriaI18nProvider>
<SessionProvider session={null}>
<ReactAriaI18nProvider locale="en">{storyFn()}</ReactAriaI18nProvider>
</SessionProvider>
</L10nProvider>
);
};
@ -76,7 +79,7 @@ const preview: Preview = {
if (path === "/redesign/user/dashboard") {
linkTo(
"Pages/Dashboard",
"US user, without Premium, with unresolved scan results, with unresolved breaches"
"US user, without Premium, with unresolved scan results, with unresolved breaches",
)();
}
@ -93,7 +96,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/1b. Scan results",
"With a few unresolved scan results (free)"
"With a few unresolved scan results (free)",
)();
}
@ -109,7 +112,7 @@ const preview: Preview = {
"/redesign/user/dashboard/fix/data-broker-profiles/automatic-remove"
) {
linkTo(
"Pages/Guided resolution/1d. Automatically resolve brokers"
"Pages/Guided resolution/1d. Automatically resolve brokers",
)();
}
@ -118,7 +121,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/2. High-risk data breaches",
"2a. Social Security Number"
"2a. Social Security Number",
)();
}
@ -128,7 +131,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/2. High-risk data breaches",
"2b. Credit card"
"2b. Credit card",
)();
}
@ -138,7 +141,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/2. High-risk data breaches",
"2c. Bank account"
"2c. Bank account",
)();
}
@ -147,7 +150,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/2. High-risk data breaches",
"2d. PIN"
"2d. PIN",
)();
}
@ -156,7 +159,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/3. Leaked passwords",
"3a. Passwords"
"3a. Passwords",
)();
}
@ -166,7 +169,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/3. Leaked passwords",
"3b. Security questions"
"3b. Security questions",
)();
}
@ -176,7 +179,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/4. Security recommendations",
"4a. Phone number"
"4a. Phone number",
)();
}
@ -186,7 +189,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/4. Security recommendations",
"4b. Email address"
"4b. Email address",
)();
}
@ -195,7 +198,7 @@ const preview: Preview = {
) {
linkTo(
"Pages/Guided resolution/4. Security recommendations",
"4c. IP address"
"4c. IP address",
)();
}
},

Просмотреть файл

@ -279,7 +279,6 @@ dashboard-top-banner-your-data-scan-in-progress-all-fixed-description =
[one] Great work fixing { $exposures_resolved_num } exposure so far! Were still scanning sites that sell your personal info. This should be done within a few minutes.
*[other] Great work fixing { $exposures_resolved_num } exposures so far! Were still scanning sites that sell your personal info. This should be done within a few minutes.
}
dashboard-top-banner-your-data-is-protected-all-fixed-cta = Get continuous protection
dashboard-top-banner-non-us-no-exposures-found-description = Great news! We searched all known data breaches and found no exposures. Well keep monitoring your email address and will alert you if a new breach occurs.
# Variables:

Просмотреть файл

@ -724,7 +724,11 @@ ad-unit-6-before-you-complete = Maskér din mailadresse for at beskytte dine opl
# “account” can be localized, “Firefox” must be treated as a brand,
# and kept in English.
# Deprecated - to be replaced by -brand-mozilla-account
-brand-fx-account = Firefox-konto
# “account” can be localized, “Mozilla” must be treated as a brand,
# and kept in English.
-brand-mozilla-account = Mozilla-konto
## Search Engine Optimization
@ -740,7 +744,7 @@ sign-in = Log ind
site-nav-breaches-link = Løste datalæk
site-nav-settings-link = Indstillinger
site-nav-help-link = Hjælp og support
# This call-out is above 2 image links for Firefox Relay and Mozilla VPN
# This call-out is above 2 image links for Firefox Relay and Mozilla VPN
site-nav-ad-callout = Prøv vores andre sikkerhedsværktøjer:
brand-relay = { -brand-relay }
brand-mozilla-vpn = { -brand-mozilla-vpn }
@ -750,8 +754,12 @@ brand-mozilla-vpn = { -brand-mozilla-vpn }
menu-button-title = Brugermenu
menu-button-alt = Åbn brugermenuen
menu-list-accessible-label = Åbn kontomenuen
# Deprecated
menu-item-fxa = Håndter din { -brand-fx-account }
menu-item-fxa-2 = Håndter din { -brand-mozilla-account }
# Deprecated
menu-item-fxa-alt = Åbn siden { -brand-fx-account }
menu-item-fxa-alt-2 = Åbn siden { -brand-mozilla-account }
menu-item-settings = Indstillinger
menu-item-settings-alt = Åbn siden Indstillinger
menu-item-help = Hjælp og support

Просмотреть файл

@ -50,8 +50,12 @@ settings-cancel-premium-subscription-link-label = Annuller fra din { -brand-fx-a
## Deactivate account
settings-deactivate-account-title = Deaktiver konto
# Deprecated
settings-deactivate-account-info = Du kan deaktivere { -product-short-name } ved at slette din { -brand-fx-account }.
settings-deactivate-account-info-2 = Du kan deaktivere { -product-short-name } ved at slette din { -brand-mozilla-account }.
# Deprecated
settings-fxa-link-label = Gå til { -brand-firefox }-indstillinger
settings-fxa-link-label-3 = Gå til indstillingerne for { -brand-mozilla-account }
## Add email dialog

Просмотреть файл

@ -47,7 +47,7 @@ settings-deactivate-account-info = Bạn có thể hủy kích hoạt { -product
settings-deactivate-account-info-2 = Bạn có thể vô hiệu hóa { -product-short-name } bằng cách xoá { -brand-mozilla-account } của bạn.
# Deprecated
settings-fxa-link-label = Đi đến cài đặt { -brand-firefox }
settings-fxa-link-label-2 = Đi đến cài đặt { -brand-mozilla }
settings-fxa-link-label-3 = Đi đến cài đặt { -brand-mozilla-account }
## Add email dialog

28
package-lock.json сгенерированный
Просмотреть файл

@ -16,7 +16,7 @@
"@fluent/react": "^0.15.2",
"@google-cloud/logging-winston": "^6.0.0",
"@google-cloud/pubsub": "^4.0.6",
"@grpc/grpc-js": "1.9.6",
"@grpc/grpc-js": "1.9.7",
"@leeoniya/ufuzzy": "^1.0.11",
"@mozilla/glean": "2.0.5",
"@sentry/nextjs": "^7.74.1",
@ -35,7 +35,7 @@
"knex": "^3.0.1",
"next": "^13.5.6",
"next-auth": "^4.24.3",
"nodemailer": "^6.9.6",
"nodemailer": "^6.9.7",
"patch-package": "^8.0.0",
"pg": "^8.11.3",
"react": "^18.2.0",
@ -4257,9 +4257,9 @@
}
},
"node_modules/@grpc/grpc-js": {
"version": "1.9.6",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.9.6.tgz",
"integrity": "sha512-yq3qTy23u++8zdvf+h4mz4ohDFi681JAkMZZPTKh8zmUVh0AKLisFlgxcn22FMNowXz15oJ6pqgwT7DJ+PdJvg==",
"version": "1.9.7",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.9.7.tgz",
"integrity": "sha512-yMaA/cIsRhGzW3ymCNpdlPcInXcovztlgu/rirThj2b87u3RzWUszliOqZ/pldy7yhmJPS8uwog+kZSTa4A0PQ==",
"dependencies": {
"@grpc/proto-loader": "^0.7.8",
"@types/node": ">=12.12.47"
@ -23327,9 +23327,9 @@
"devOptional": true
},
"node_modules/nodemailer": {
"version": "6.9.6",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.6.tgz",
"integrity": "sha512-s7pDtWwe5fLMkQUhw8TkWB/wnZ7SRdd9HRZslq/s24hlZvBP3j32N/ETLmnqTpmj4xoBZL9fOWyCIZ7r2HORHg==",
"version": "6.9.7",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.7.tgz",
"integrity": "sha512-rUtR77ksqex/eZRLmQ21LKVH5nAAsVicAtAYudK7JgwenEDZ0UIQ1adUGqErz7sMkWYxWTTU1aeP2Jga6WQyJw==",
"engines": {
"node": ">=6.0.0"
}
@ -32474,9 +32474,9 @@
}
},
"@grpc/grpc-js": {
"version": "1.9.6",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.9.6.tgz",
"integrity": "sha512-yq3qTy23u++8zdvf+h4mz4ohDFi681JAkMZZPTKh8zmUVh0AKLisFlgxcn22FMNowXz15oJ6pqgwT7DJ+PdJvg==",
"version": "1.9.7",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.9.7.tgz",
"integrity": "sha512-yMaA/cIsRhGzW3ymCNpdlPcInXcovztlgu/rirThj2b87u3RzWUszliOqZ/pldy7yhmJPS8uwog+kZSTa4A0PQ==",
"requires": {
"@grpc/proto-loader": "^0.7.8",
"@types/node": ">=12.12.47"
@ -46807,9 +46807,9 @@
"devOptional": true
},
"nodemailer": {
"version": "6.9.6",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.6.tgz",
"integrity": "sha512-s7pDtWwe5fLMkQUhw8TkWB/wnZ7SRdd9HRZslq/s24hlZvBP3j32N/ETLmnqTpmj4xoBZL9fOWyCIZ7r2HORHg=="
"version": "6.9.7",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.7.tgz",
"integrity": "sha512-rUtR77ksqex/eZRLmQ21LKVH5nAAsVicAtAYudK7JgwenEDZ0UIQ1adUGqErz7sMkWYxWTTU1aeP2Jga6WQyJw=="
},
"normalize-package-data": {
"version": "3.0.3",

Просмотреть файл

@ -53,7 +53,7 @@
"@fluent/react": "^0.15.2",
"@google-cloud/logging-winston": "^6.0.0",
"@google-cloud/pubsub": "^4.0.6",
"@grpc/grpc-js": "1.9.6",
"@grpc/grpc-js": "1.9.7",
"@leeoniya/ufuzzy": "^1.0.11",
"@mozilla/glean": "2.0.5",
"@sentry/nextjs": "^7.74.1",
@ -72,7 +72,7 @@
"knex": "^3.0.1",
"next": "^13.5.6",
"next-auth": "^4.24.3",
"nodemailer": "^6.9.6",
"nodemailer": "^6.9.7",
"patch-package": "^8.0.0",
"pg": "^8.11.3",
"react": "^18.2.0",

Просмотреть файл

@ -7,6 +7,7 @@
import { signIn } from "next-auth/react";
import { usePathname } from "next/navigation";
import { useL10n } from "../../../hooks/l10n";
import { useEffect } from "react";
export type Props = {
autoSignIn?: boolean;
@ -19,10 +20,15 @@ function initSignIn(callbackUrl: string) {
export const SignInButton = ({ autoSignIn }: Props) => {
const l10n = useL10n();
const pathname = usePathname();
const callbackUrl = pathname === "/" ? "/user/breaches" : pathname;
useEffect(() => {
if (autoSignIn) {
initSignIn(callbackUrl);
}
}, [autoSignIn, callbackUrl]);
if (autoSignIn) {
initSignIn(callbackUrl);
return null;
}

Просмотреть файл

@ -909,7 +909,7 @@ it("shows the correct dashboard banner CTA for US users, without Premium, empty
const dashboardTopBanner = screen.getByRole("region", {
name: "Dashboard summary",
});
const dashboardTopBannerCta = getByRole(dashboardTopBanner, "link", {
const dashboardTopBannerCta = getByRole(dashboardTopBanner, "button", {
name: "Get continuous protection",
});
expect(dashboardTopBannerCta).toBeInTheDocument();
@ -1728,3 +1728,19 @@ it("expands one card at a time", async () => {
const afterExpand2 = screen.getAllByRole("button", { name: "Expand" });
expect(afterExpand1.length).toBe(afterExpand2.length);
});
it("closes previously active card onclick", async () => {
const user = userEvent.setup();
const ComposedDashboard = composeStory(
DashboardUsPremiumUnresolvedScanUnresolvedBreaches,
Meta,
);
render(<ComposedDashboard />);
const initialState = screen.getAllByRole("button", { name: "Expand" });
await user.click(initialState[0]);
const afterExpand = screen.getAllByRole("button", { name: "Collapse" });
await user.click(afterExpand[0]);
const afterCollapse = screen.getAllByRole("button", { name: "Expand" });
expect(initialState.length).toBe(afterCollapse.length);
});

Просмотреть файл

@ -188,7 +188,7 @@ export const DashboardTopBannerContent = (props: DashboardTopBannerProps) => {
</p>
<div className={styles.cta}>
<Button
href="/redesign/user/welcome/free-scan"
href="/redesign/user/welcome/free-scan?referrer=dashboard"
small
variant="primary"
>
@ -228,9 +228,7 @@ export const DashboardTopBannerContent = (props: DashboardTopBannerProps) => {
)}
</p>
<div className={styles.cta}>
<Button href={relevantGuidedStep.href} small variant="primary">
{l10n.getString("dashboard-top-banner-no-exposures-found-cta")}
</Button>
<PremiumButton label="dashboard-top-banner-no-exposures-found-cta" />
</div>
</>
);
@ -306,11 +304,7 @@ export const DashboardTopBannerContent = (props: DashboardTopBannerProps) => {
)}
</p>
<div className={styles.cta}>
<PremiumButton
label={
"dashboard-top-banner-your-data-is-protected-all-fixed-cta"
}
/>
<PremiumButton label="dashboard-top-banner-no-exposures-found-cta" />
</div>
</>
);
@ -485,7 +479,7 @@ export const DashboardTopBannerContent = (props: DashboardTopBannerProps) => {
)}
</p>
<div className={styles.cta}>
<Button href={relevantGuidedStep.href} small variant="primary">
<Button href="/redesign/user/settings" small variant="primary">
{l10n.getString(
"dashboard-top-banner-scan-in-progress-no-results-cta",
)}

Просмотреть файл

@ -124,7 +124,13 @@ export const View = (props: Props) => {
<ExposureCard
exposureData={exposure}
isExpanded={exposureCardKey === activeExposureCardKey}
setExpanded={() => setActiveExposureCardKey(exposureCardKey)}
setExpanded={() => {
if (exposureCardKey === activeExposureCardKey) {
setActiveExposureCardKey("");
} else {
setActiveExposureCardKey(exposureCardKey);
}
}}
locale={getLocale(l10n)}
isPremiumBrokerRemovalEnabled={props.enabledFeatureFlags.includes(
"PremiumBrokerRemoval",

Просмотреть файл

@ -90,3 +90,16 @@ it("expands one card at a time", async () => {
const expandButton2 = screen.getAllByRole("button", { name: "Expand" });
expect(expandButton.length).toBe(expandButton2.length);
});
it("closes previously active card onclick", async () => {
const user = userEvent.setup();
global.fetch = jest.fn().mockResolvedValueOnce({ ok: true });
const ComposedManualRemoveView = composeStory(ManualRemoveViewStory, Meta);
render(<ComposedManualRemoveView />);
const initialState = screen.getAllByRole("button", { name: "Expand" });
const afterExpand = screen.getAllByRole("button", { name: "Collapse" });
await user.click(afterExpand[0]);
const afterCollapse = screen.getAllByRole("button", { name: "Expand" });
expect(initialState.length).toBe(afterCollapse.length - 1);
});

Просмотреть файл

@ -83,7 +83,10 @@
display: flex;
flex-direction: column;
gap: $spacing-md;
padding-inline: $spacing-md;
@media screen and (min-width: $screen-md) {
padding-inline: $spacing-lg;
}
}
}

Просмотреть файл

@ -141,7 +141,13 @@ export function ManualRemoveView(props: Props) {
scanResult={scanResult}
isExpanded={index === activeExposureCardKey}
isPremiumUser={props.isPremiumUser}
setExpanded={() => setActiveExposureCardKey(index)}
setExpanded={() => {
if (index === activeExposureCardKey) {
setActiveExposureCardKey(-1);
} else {
setActiveExposureCardKey(index);
}
}}
/>
);
})}

Просмотреть файл

@ -62,7 +62,10 @@ export function StartFreeScanView(props: Props) {
</p>
</div>
<div className={styles.buttonsWrapper}>
<Button variant="primary" href="/redesign/user/welcome/free-scan">
<Button
variant="primary"
href="/redesign/user/welcome/free-scan?referrer=fix"
>
{l10n.getString(
"fix-flow-data-broker-profiles-start-free-scan-button-start-scan",
)}

Просмотреть файл

@ -5,6 +5,7 @@
import styles from "./welcomeToPremium.module.scss";
import { getL10n } from "../../../../../../../../functions/server/l10n";
import { PercentageChart } from "../../../../../../../../components/client/PercentageChart";
import { SubscriptionCheck } from "../../../../../../../../components/client/SubscriptionCheck";
import {
getDashboardSummary,
getExposureReduction,
@ -85,6 +86,9 @@ export function WelcomeToPremiumView(props: Props) {
<div className={styles.chart}>
<PercentageChart exposureReduction={exposureReduction} />
</div>
<div>
<SubscriptionCheck />
</div>
</div>
</FixView>
);

Просмотреть файл

@ -13,20 +13,15 @@ import { WelcomeToPremiumView } from "./WelcomeToPremiumView";
import { getSubscriberEmails } from "../../../../../../../../functions/server/getSubscriberEmails";
import { StepDeterminationData } from "../../../../../../../../functions/server/getRelevantGuidedSteps";
import { getCountryCode } from "../../../../../../../../functions/server/getCountryCode";
import { hasPremium } from "../../../../../../../../functions/universal/user";
export default async function WelcomeToPremiumPage() {
const session = await getServerSession(authOptions);
// Ensure user is logged in
if (!session?.user?.subscriber?.id) {
redirect("/redesign/user/dashboard/");
}
// The user may have subscribed and just need their session updated - they will be redirected back to try again if it looks valid.
if (!hasPremium(session.user)) {
redirect(`${process.env.NEXTAUTH_URL}/redesign/user/dashboard/subscribed`);
}
const result = await getOnerepProfileId(session.user.subscriber.id);
const profileId = result[0]["onerep_profile_id"] as number;
const scanData = await getLatestOnerepScanResults(profileId);

Просмотреть файл

@ -14,7 +14,7 @@
.fixWrapper {
max-width: $content-xl;
width: 100%;
padding: $layout-sm;
padding: $layout-xs;
background-color: $color-white;
background-size: cover;
background-position: center;

Просмотреть файл

@ -1,51 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use client";
import { useSession } from "next-auth/react";
import { useEffect } from "react";
import { useRouter } from "next/navigation";
import { hasPremium } from "../../../../../../functions/universal/user";
import { captureException } from "@sentry/browser";
import { useL10n } from "../../../../../../hooks/l10n";
/**
* Client-side page to update session info.
*
* Next-Auth does not have a simple way to do this purely from the server-side, so we
* use this page to check and redirect appropriately.
*
* NOTE: this does not replace doing server-side `hasPremium` checks! This is just
* a convenience so users do not need to sign out and back in to refresh their session
* after subscribing.
*/
export default function Subscribed() {
const l10n = useL10n();
const { update } = useSession();
const router = useRouter();
useEffect(() => {
async function updateSession() {
try {
const result = await update();
if (hasPremium(result?.user)) {
router.replace(
`/redesign/user/dashboard/fix/data-broker-profiles/welcome-to-premium`,
);
} else {
router.replace(`/`);
}
} catch (ex) {
console.error(ex);
captureException(ex);
router.replace(`/`);
}
}
void updateSession();
}, [update, router]);
return <div>{l10n.getString("subscription-check-loading")}</div>;
}

Просмотреть файл

@ -54,9 +54,16 @@ export type Props = {
onScanStarted: () => void;
onGoBack: () => void;
user: Session["user"];
skipInitialStep: boolean;
previousRoute: string | null;
};
export const EnterInfo = ({ onScanStarted, onGoBack }: Props) => {
export const EnterInfo = ({
onScanStarted,
onGoBack,
skipInitialStep,
previousRoute,
}: Props) => {
const [firstName, setFirstName] = useState("");
const [lastName, setLastName] = useState("");
const [location, setLocation] = useState("");
@ -360,14 +367,16 @@ export const EnterInfo = ({ onScanStarted, onGoBack }: Props) => {
)}
</div>
<div className={styles.stepButtonWrapper}>
<Button
variant="secondary"
onPress={() => onGoBack()}
className={styles.startButton}
type="button"
>
{l10n.getString("onboarding-steps-enter-info-back")}
</Button>
{(!skipInitialStep || (skipInitialStep && previousRoute)) && (
<Button
variant="secondary"
onPress={() => onGoBack()}
className={styles.startButton}
type="button"
>
{l10n.getString("onboarding-steps-enter-info-back")}
</Button>
)}
<Button
{...confirmDialogTrigger.triggerProps}
variant="primary"

Просмотреть файл

@ -13,6 +13,7 @@ import { useL10n } from "../../../../../hooks/l10n";
export type Props = {
dataBrokerCount: number;
breachesTotalCount: number;
previousRoute: string;
};
const getCurrentScanCountForRange = ({
@ -43,6 +44,7 @@ const getCurrentScanCountForRange = ({
export const FindExposures = ({
dataBrokerCount,
breachesTotalCount,
previousRoute,
}: Props) => {
const [scanProgress, setScanProgress] = useState(0);
const [scanFinished, setScanFinished] = useState(false);
@ -92,7 +94,7 @@ export const FindExposures = ({
// TODO: Add unit test when changing this code:
/* c8 ignore next 3 */
if (scanProgress >= maxProgress) {
router.push("/redesign/user/dashboard/");
router.push(previousRoute);
}
return () => clearTimeout(timeoutId);
@ -102,6 +104,7 @@ export const FindExposures = ({
checkingScanProgress,
scanFinished,
percentageSteps,
previousRoute,
]);
function ProgressLabel() {

Просмотреть файл

@ -20,6 +20,7 @@ export const Onboarding: Story = {
user={{ email: "example@example.com" }}
dataBrokerCount={190}
breachesTotalCount={678}
previousRoute={props.previousRoute}
/>
),
};

Просмотреть файл

@ -12,6 +12,7 @@ import Meta, { Onboarding } from "./Onboarding.stories";
jest.mock("next/navigation", () => ({
useRouter: () => ({
back: jest.fn(),
push: jest.fn(),
}),
}));
@ -227,11 +228,26 @@ it("shows a condensed version of the onboarding skipping step “Get started”"
expect(proceedButton).toBeInTheDocument();
});
it("does not navigate back to step 1 of the onboarding when directly linking to the `enterInfo` step", async () => {
const user = userEvent.setup();
it("does not show the go back button on step 2 of the onboarding when there is no previous route", () => {
const ComposedOnboarding = composeStory(Onboarding, Meta);
render(<ComposedOnboarding stepId="enterInfo" />);
const backButton = screen.queryByRole("button", {
name: "Go back",
});
expect(backButton).not.toBeInTheDocument();
});
it("does not navigate back to step 1 of the onboarding when directly linking to the `enterInfo` step if there is previous route", async () => {
const user = userEvent.setup();
const ComposedOnboarding = composeStory(Onboarding, Meta);
render(
<ComposedOnboarding
stepId="enterInfo"
previousRoute="/redesign/user/dashboard/"
/>,
);
const backButton = screen.getByRole("button", {
name: "Go back",
});

Просмотреть файл

@ -27,6 +27,7 @@ export type Props = {
dataBrokerCount: number;
breachesTotalCount: number;
stepId?: StepId;
previousRoute: string | null;
};
export const View = ({
@ -34,6 +35,7 @@ export const View = ({
dataBrokerCount,
breachesTotalCount,
stepId = "getStarted",
previousRoute,
}: Props) => {
const l10n = useL10n();
const skipInitialStep = stepId === "enterInfo";
@ -45,6 +47,7 @@ export const View = ({
<FindExposures
dataBrokerCount={dataBrokerCount}
breachesTotalCount={breachesTotalCount}
previousRoute={previousRoute ?? "/redesign/user/dashboard"}
/>
) : currentStep === "enterInfo" ? (
<EnterInfo
@ -52,9 +55,15 @@ export const View = ({
// TODO: Add unit test when changing this code:
/* c8 ignore next */
onScanStarted={() => setCurrentStep("findExposures")}
onGoBack={() =>
skipInitialStep ? router.back() : setCurrentStep("getStarted")
}
previousRoute={previousRoute}
skipInitialStep={skipInitialStep}
onGoBack={() => {
if (skipInitialStep && previousRoute) {
router.push(previousRoute);
} else {
setCurrentStep("getStarted");
}
}}
/>
) : (
<GetStarted

Просмотреть файл

@ -4,13 +4,14 @@
import { getServerSession } from "next-auth";
import { SignInButton } from "../../../../../../(nextjs_migration)/components/client/SignInButton";
import { notFound, redirect } from "next/navigation";
import { notFound } from "next/navigation";
import { isEligibleForFreeScan } from "../../../../../../functions/server/onerep";
import { View } from "../View";
import { getAllBreachesCount } from "../../../../../../../db/tables/breaches";
import { getCountryCode } from "../../../../../../functions/server/getCountryCode";
import { headers } from "next/headers";
import { authOptions } from "../../../../../../api/utils/auth";
import { getReferrerUrl } from "../../../../../../functions/server/getReferrerUrl";
const FreeScanSlug = "free-scan" as const;
@ -18,9 +19,12 @@ type Props = {
params: {
slug: string[] | undefined;
};
searchParams: {
referrer?: string;
};
};
export default async function Onboarding({ params }: Props) {
export default async function Onboarding({ params, searchParams }: Props) {
const session = await getServerSession(authOptions);
if (!session) {
return <SignInButton autoSignIn={true} />;
@ -42,22 +46,28 @@ export default async function Onboarding({ params }: Props) {
);
if (!userIsEligible) {
return redirect("/");
throw new Error(
`Subscriber not eligible for free scan, ID: ${session?.user?.subscriber?.id}`,
);
}
const allBreachesCount = await getAllBreachesCount();
const headersList = headers();
const previousRoute = getReferrerUrl({
headers: headersList,
referrerParam: searchParams.referrer,
});
return (
<>
<View
user={session.user}
dataBrokerCount={parseInt(
process.env.NEXT_PUBLIC_ONEREP_DATA_BROKER_COUNT as string,
10,
)}
breachesTotalCount={allBreachesCount}
stepId={firstSlug === FreeScanSlug ? "enterInfo" : "getStarted"}
/>
</>
<View
user={session.user}
dataBrokerCount={parseInt(
process.env.NEXT_PUBLIC_ONEREP_DATA_BROKER_COUNT as string,
10,
)}
breachesTotalCount={allBreachesCount}
stepId={firstSlug === FreeScanSlug ? "enterInfo" : "getStarted"}
previousRoute={previousRoute}
/>
);
}

Просмотреть файл

@ -25,7 +25,12 @@
padding: 0;
cursor: pointer;
color: rgba($color-purple-70, 0.7);
height: 20px; //height of down chevron
width: 15px; // size of the chevron on mobile
height: 20px; // height of down chevron
@media screen and (min-width: $screen-md) {
width: 20px; // size of the chevron on larger devices
}
.isOpen {
transition: transform 0.2s ease-out;
@ -74,6 +79,10 @@
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
&.makeFontSmaller {
font: $text-body-2xs;
}
}
.exposureImageWrapper {
@ -176,17 +185,18 @@
margin-left: 0;
padding-left: 0;
display: grid;
grid-template-columns: 1fr 1fr;
grid-template-columns: 1fr;
grid-row-gap: $spacing-sm;
@media screen and (min-width: $screen-sm) {
@media screen and (min-width: $screen-md) {
grid-template-columns: 1fr 1fr 1fr;
gap: $spacing-sm;
}
@media screen and (min-width: $screen-lg) {
display: flex;
flex-direction: row;
align-items: start;
align-items: center;
gap: $layout-xs;
flex-wrap: wrap;
justify-content: flex-start;

Просмотреть файл

@ -137,6 +137,10 @@ const ScanResultCard = (props: ScanResultCardProps) => {
);
}
const COMPANY_NAME_MAX_CHARACTER_COUNT = 20;
const isCompanyNameTooLong =
scanResult.data_broker.length > COMPANY_NAME_MAX_CHARACTER_COUNT;
const exposureCard = (
<div>
<div className={styles.exposureCard}>
@ -155,7 +159,10 @@ const ScanResultCard = (props: ScanResultCardProps) => {
</dt>
<dd>
<span
className={`${styles.exposureCompanyTitle} ${styles.companyNameArea}`}
className={`${styles.exposureCompanyTitle} ${
styles.companyNameArea
}
${isCompanyNameTooLong ? styles.makeFontSmaller : ""}`}
>
{scanResult.data_broker}
</span>
@ -246,7 +253,9 @@ const ScanResultCard = (props: ScanResultCardProps) => {
</div>
</div>
{isPremiumBrokerRemovalEnabled &&
props.scanResult.status === "new" ? (
// Verifying the status for automatically removed data brokers v. manually resolved are handled differently
props.scanResult.status === "new" &&
!props.scanResult.manually_resolved ? (
<span className={styles.fixItBtn}>{props.resolutionCta}</span>
) : null}
</div>

Просмотреть файл

@ -4,7 +4,7 @@
"use client";
import { useContext } from "react";
import { useContext, useEffect } from "react";
import Image from "next/image";
import { usePathname } from "next/navigation";
import { Session } from "next-auth";
@ -21,6 +21,7 @@ import ShieldIcon from "./assets/shield-icon.svg";
import styles from "./PremiumBadge.module.scss";
import { useGa } from "../../hooks/useGa";
import { CountryCodeContext } from "../../../contextProviders/country-code";
import { useSession } from "next-auth/react";
export type Props = {
user: Session["user"];
@ -56,6 +57,18 @@ export default function PremiumBadge({
dialogState,
);
const { update } = useSession();
useEffect(() => {
async function updateSession() {
await update();
}
void updateSession();
// This should only run once per page load - `update` will always appear to be changed.
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
if (hasPremium(user)) {
return (
<div className={styles.badge}>

Просмотреть файл

@ -0,0 +1,29 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use client";
import { getSession, useSession } from "next-auth/react";
import { useEffect } from "react";
import { hasPremium } from "../../functions/universal/user";
export const SubscriptionCheck = () => {
const { update } = useSession();
useEffect(() => {
async function updateSession() {
const session = await getSession();
if (!hasPremium(session?.user)) {
await update();
}
}
void updateSession();
// This should only run once per page load - `update` will always appear to be changed.
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
return <></>;
};

Просмотреть файл

@ -1,12 +1,14 @@
@import "../../tokens";
.pill {
font: $text-body-xs;
font: $text-body-2xs;
font-weight: 600;
display: inline-block;
min-width: 90px; // keep the width fixed
min-width: 80px; // keep the width fixed
@media screen and (min-width: $screen-md) {
font: $text-body-xs;
font-weight: 600;
min-width: 120px; // keep the width fixed
}
text-align: center;

Просмотреть файл

@ -0,0 +1,32 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import type { headers as headersGetter } from "next/headers";
type ReferrerUrlProps = {
headers: ReturnType<typeof headersGetter>;
referrerParam: string | undefined;
};
export function getReferrerUrl({
headers,
referrerParam,
}: ReferrerUrlProps): string | null {
const referrer = headers.get("referer");
const serverUrl = process.env.SERVER_URL as string;
const isRouteWithinMonitor = referrer && referrer.includes(serverUrl);
if (isRouteWithinMonitor) {
return referrer.replace(serverUrl, "");
}
switch (referrerParam) {
case "dashboard":
return "/redesign/user/dashboard";
case "fix":
return "/redesign/user/dashboard/fix/data-broker-profiles/start-free-scan";
default:
return null;
}
}

Просмотреть файл

@ -14,10 +14,8 @@ const loggingWinston = new LoggingWinston({
export const logger = createLogger({
level: "info",
transports: [new transports.Console()],
// In GCP environments, use cloud logging instead of stdout.
transports: ["stage", "production"].includes(process.env.APP_ENV ?? "local")
? [loggingWinston]
: [new transports.Console()],
});
// In GCP environments, use cloud logging instead of stdout.
if (["stage", "production"].includes(process.env.APP_ENV ?? "local")) {
logger.transports = [loggingWinston];
}

Просмотреть файл

@ -262,6 +262,9 @@ $text-body-sm:
$text-body-xs:
400 12px / 1.5 var(--font-inter),
sans-serif;
$text-body-2xs:
400 10px / 1.5 var(--font-inter),
sans-serif;
$tab-bar-height: 100px;
$width-first-column-filter-bar: 90px;

Просмотреть файл

@ -0,0 +1,33 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
export async function up(knex) {
await knex.schema
.createTable("email_notifications", table => {
table.increments("id").primary();
table.integer("subscriber_id").references("subscribers.id").notNullable();
table.integer("breach_id").references("breaches.id").notNullable();
table.boolean("appeared").notNullable().defaultTo(true);
table.boolean("notified").notNullable().defaultTo(false);
table.string("email").notNullable()
table.string("notification_type").notNullable().defaultTo("incident"); // incident, monthly
table.timestamp("created_at").defaultTo(knex.fn.now());
table.timestamp("updated_at").defaultTo(knex.fn.now());
table.index("subscriber_id");
table.index("breach_id");
table.unique(["subscriber_id", "breach_id", "email"]); // unique composite key
});
}
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
export async function down(knex) {
await knex.schema.dropTable("email_notifications");
}

Просмотреть файл

@ -0,0 +1,127 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import initKnex from "knex";
import knexConfig from "../knexfile.js";
const knex = initKnex(knexConfig);
/**
* @param {number} subscriberId
*/
async function getAllEmailNotificationsForSubscriber(subscriberId){
console.info("getAllEmailNotificationsForSubscriber: ", subscriberId);
return await knex.transaction(trx => {
return trx('email_notifications')
.forUpdate()
.select()
.where("subscriber_id", subscriberId)
.orderBy("id");
})
}
/**
* @param {number} subscriberId
* @param {number} breachId
* @param {string} email
*/
async function getEmailNotification(
subscriberId,
breachId,
email
){
console.info(
`getEmailNotification for subscriber: ${subscriberId}, breach: ${breachId}`,
);
const res = await knex.transaction(trx => {
return trx('email_notifications')
.forUpdate()
.select()
.where("subscriber_id", subscriberId)
.andWhere("breach_id", breachId)
.andWhere("email", email);
})
if (res.length > 1) {
console.error(
"More than one entry for subscriber/breach email notification: ",
res,
);
}
return res?.[0] || null;
}
/**
* @param {number} breachId
*/
async function getNotifiedSubscribersForBreach(
breachId
){
console.info(
`getEmailNotificationSubscribersForBreach for breach: ${breachId}`,
);
const res = await knex.transaction(trx => {
return trx('email_notifications')
.forUpdate()
.select("subscriber_id")
.where("notified", true)
.andWhere("breach_id", breachId);
})
return res.map((row) => row.subscriber_id);
}
/**
* @param {{ subscriberId: number; breachId: number; notified: boolean; email: string; notificationType: string; }} newNotification
*/
async function addEmailNotification(
newNotification
){
console.info(`addEmailNotification: ${JSON.stringify(newNotification)}`);
const emailNotificationDb = {
subscriber_id: newNotification.subscriberId,
breach_id: newNotification.breachId,
appeared: true,
notified: newNotification.notified || false,
email: newNotification.email,
notification_type: newNotification.notificationType,
};
const res = await knex("emails_notification")
.insert(emailNotificationDb)
.returning("*");
return res[0];
}
/**
* @param {number} subscriberId
* @param {number} breachId
* @param {string} email
*/
async function markEmailAsNotified(
subscriberId,
breachId,
email
) {
console.info(`markEmailAsNotified for breach: ${breachId}`);
await knex("email_notifications")
.where("subscriber_id", subscriberId)
.andWhere("breach_id", breachId)
.andWhere("email", email)
.update({
notified: true,
// @ts-ignore knex.fn.now() results in it being set to a date,
// even if it's not typed as a JS date object:
updated_at: knex.fn.now(),
});
}
export {
getAllEmailNotificationsForSubscriber,
getEmailNotification,
getNotifiedSubscribersForBreach,
addEmailNotification,
markEmailAsNotified
}

Просмотреть файл

@ -89,7 +89,7 @@ async function addOnerepScanResults(
// Create a new scan if it does not already exist. If it already exists:
// Update the status of the scan.
logger.info("new_scan_created", {
logger.info("scan_created_or_updated", {
onerepScanId,
onerepScanReason,
onerepScanStatus,
@ -110,27 +110,29 @@ async function addOnerepScanResults(
updated_at: knex.fn.now(),
});
await transaction("onerep_scan_results").insert(
onerepScanResults.map((scanResult) => ({
onerep_scan_result_id: scanResult.id,
onerep_scan_id: scanResult.scan_id,
link: scanResult.link,
age:
typeof scanResult.age === "string"
? Number.parseInt(scanResult.age, 10)
: undefined,
data_broker: scanResult.data_broker,
data_broker_id: scanResult.data_broker_id,
emails: JSON.stringify(scanResult.emails),
phones: JSON.stringify(scanResult.phones),
addresses: JSON.stringify(scanResult.addresses),
relatives: JSON.stringify(scanResult.relatives),
first_name: scanResult.first_name,
middle_name: scanResult.middle_name,
last_name: scanResult.last_name,
status: scanResult.status,
})),
);
const scanResultsMap = onerepScanResults.map((scanResult) => ({
onerep_scan_result_id: scanResult.id,
onerep_scan_id: scanResult.scan_id,
link: scanResult.link,
age:
typeof scanResult.age === "string"
? Number.parseInt(scanResult.age, 10)
: undefined,
data_broker: scanResult.data_broker,
data_broker_id: scanResult.data_broker_id,
emails: JSON.stringify(scanResult.emails),
phones: JSON.stringify(scanResult.phones),
addresses: JSON.stringify(scanResult.addresses),
relatives: JSON.stringify(scanResult.relatives),
first_name: scanResult.first_name,
middle_name: scanResult.middle_name,
last_name: scanResult.last_name,
status: scanResult.status,
}));
logger.info("scan_result", scanResultsMap);
await transaction("onerep_scan_results").insert(scanResultsMap);
});
}

26
src/knex-tables.d.ts поставляемый
Просмотреть файл

@ -255,6 +255,22 @@ declare module "knex/types/tables" {
"id" | "created_at" | "updated_at"
>;
interface EmailNotificationRow {
id: number;
subscriber_id: number;
breach_id: number;
appeared: boolean;
notified: boolean;
email: string;
notification_type: string;
created_at: Date;
updated_at: Date;
}
type EmailNotificationAutoInsertedColumns = Extract<
keyof EmailNotificationRow,
"id" | "created_at" | "updated_at"
>;
interface Tables {
feature_flags: Knex.CompositeTableType<
FeatureFlagRow,
@ -343,5 +359,15 @@ declare module "knex/types/tables" {
Partial<Omit<OnerepProfileRow, "id" | "created_at">> &
Pick<OnerepProfileRow, "updated_at">
>;
email_notifications: Knex.CompositeTableType<
EmailNotificationRow,
// On updates, auto-generated columns cannot be set:
Omit<EmailNotificationRow, EmailAddressAutoInsertedColumns> &
Partial<EmailNotificationRow>,
// On updates, don't allow updating the ID and created date; all other fields are optional, except updated_at:
Partial<Omit<EmailNotificationRow, "id" | "created_at">> &
Pick<EmailNotificationRow, "updated_at">
>;
}
}

Просмотреть файл

@ -8,95 +8,115 @@
* with the goal of deprecating the column
*/
import Knex from 'knex'
import knexConfig from '../db/knexfile.js'
import { getAllBreachesFromDb } from '../utils/hibp.js'
import { getAllEmailsAndBreaches } from '../utils/breaches.js'
import { setBreachResolution } from '../db/tables/subscribers.js'
import { BreachDataTypes } from '../utils/breach-resolution.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../db/knexfile.js";
import { getAllBreachesFromDb } from "../utils/hibp.js";
import { getAllEmailsAndBreaches } from "../utils/breaches.js";
import { setBreachResolution } from "../db/tables/subscribers.js";
import { BreachDataTypes } from "../utils/breach-resolution.js";
const knex = Knex(knexConfig);
const LIMIT = 50 // with millions of records, we have to load a few at a time
let offset = 0 // looping through all records with offset
let subscribersArr = []
const LIMIT = 50; // with millions of records, we have to load a few at a time
let offset = 0; // looping through all records with offset
let subscribersArr = [];
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
// find all subscribers who resolved any breaches in the past, convert those
// records into the new v2 format
do {
console.log(`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`)
console.log(
`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`,
);
subscribersArr = await knex
.select('id', 'primary_email', 'breaches_resolved', 'breach_resolution')
.from('subscribers')
.whereNotNull('breaches_resolved')
.select("id", "primary_email", "breaches_resolved", "breach_resolution")
.from("subscribers")
.whereNotNull("breaches_resolved")
.limit(LIMIT)
.offset(offset)
.offset(offset);
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
for (const subscriber of subscribersArr) {
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber
console.debug({ v1 })
console.debug({ v2 })
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber;
console.debug({ v1 });
console.debug({ v2 });
let isV2Changed = false // use a boolean to track if v2 has been changed, only upsert if so
let isV2Changed = false; // use a boolean to track if v2 has been changed, only upsert if so
// fetch subscriber all breaches / email
const subscriberBreachesEmail = await getAllEmailsAndBreaches(subscriber, allBreaches)
console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails))
const subscriberBreachesEmail = await getAllEmailsAndBreaches(
subscriber,
allBreaches,
);
console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails));
for (const [email, resolvedRecencyIndices] of Object.entries(v1)) {
console.debug({ email })
console.debug({ resolvedRecencyIndices })
console.debug({ email });
console.debug({ resolvedRecencyIndices });
for (const recencyIndex of resolvedRecencyIndices) {
console.debug({ recencyIndex })
console.debug({ recencyIndex });
// find subscriber's relevant recency index breach information
const ve = subscriberBreachesEmail.verifiedEmails?.filter(ve => ve.email === email)[0] || {}
const subBreach = ve.breaches?.filter(b => Number(b.recencyIndex) === Number(recencyIndex))[0] || null
console.debug({ subBreach })
const ve =
subscriberBreachesEmail.verifiedEmails?.filter(
(ve) => ve.email === email,
)[0] || {};
const subBreach =
ve.breaches?.filter(
(b) => Number(b.recencyIndex) === Number(recencyIndex),
)[0] || null;
console.debug({ subBreach });
if (!subBreach || !subBreach.DataClasses) {
console.warn(`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`)
continue
console.warn(
`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`,
);
continue;
}
// if email does not exist in v2, we need to add it to the object
// format: {email: { recencyIndex: { isResolved: true, resolutionsChecked: [DataTypes]}}}
if (!v2) v2 = {}
if (!v2) v2 = {};
if (!v2[email]) {
v2[email] = {
[recencyIndex]: {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses || [BreachDataTypes.General]
}
}
resolutionsChecked: subBreach?.DataClasses || [
BreachDataTypes.General,
],
},
};
isV2Changed = true
isV2Changed = true;
}
if (v2[email][recencyIndex]?.isResolved) {
console.log(`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`)
console.log(
`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`,
);
} else {
console.log(`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`)
console.log(
`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`,
);
v2[email][recencyIndex] = {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses
}
isV2Changed = true
resolutionsChecked: subBreach?.DataClasses,
};
isV2Changed = true;
}
}
}
// check if v2 is changed, if so, upsert the new v2
if (isV2Changed) {
await setBreachResolution(subscriber, v2)
await setBreachResolution(subscriber, v2);
}
}
offset += LIMIT
} while (subscribersArr.length === LIMIT)
offset += LIMIT;
} while (subscribersArr.length === LIMIT);
// breaking out of do..while loop
console.log('Reaching the end of the table, offset ended at', offset)
process.exit()
console.log("Reaching the end of the table, offset ended at", offset);
process.exit();

Просмотреть файл

@ -2,18 +2,28 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import Sentry from "@sentry/nextjs"
import Sentry from "@sentry/nextjs";
import { acceptedLanguages, negotiateLanguages } from "@fluent/langneg";
import { localStorage } from '../utils/localStorage.js'
import { localStorage } from "../utils/localStorage.js";
import * as pubsub from "@google-cloud/pubsub";
import * as grpc from "@grpc/grpc-js";
import { getSubscribersByHashes, knexSubscribers } from "../db/tables/subscribers.js";
import { getEmailAddressesByHashes, knexEmailAddresses } from "../db/tables/emailAddresses.js";
import {
getSubscribersByHashes,
knexSubscribers,
} from "../db/tables/subscribers.js";
import {
getEmailAddressesByHashes,
knexEmailAddresses,
} from "../db/tables/emailAddresses.js";
import {
getNotifiedSubscribersForBreach,
addEmailNotification,
markEmailAsNotified,
} from "../db/tables/email_notifications.js";
import { getTemplate } from "../views/emails/email2022.js";
import { breachAlertEmailPartial } from "../views/emails/emailBreachAlert.js";
import {
initEmail,
EmailTemplateType,
@ -26,7 +36,7 @@ import {
getAddressesAndLanguageForEmail,
getBreachByName,
getAllBreachesFromDb,
knexHibp
knexHibp,
} from "../utils/hibp.js";
const SENTRY_SLUG = "cron-breach-alerts";
@ -44,7 +54,9 @@ const checkInId = Sentry.captureCheckIn({
// Only process this many messages before exiting.
/* c8 ignore start */
const maxMessages = parseInt(process.env.EMAIL_BREACH_ALERT_MAX_MESSAGES || 10000);
const maxMessages = parseInt(
process.env.EMAIL_BREACH_ALERT_MAX_MESSAGES || 10000,
);
/* c8 ignore stop */
const projectId = process.env.GCP_PUBSUB_PROJECT_ID;
const subscriptionName = process.env.GCP_PUBSUB_SUBSCRIPTION_NAME;
@ -62,7 +74,7 @@ const subscriptionName = process.env.GCP_PUBSUB_SUBSCRIPTION_NAME;
export async function poll(subClient, receivedMessages) {
const formattedSubscription = subClient.subscriptionPath(
projectId,
subscriptionName
subscriptionName,
);
// Process the messages. Skip any that cannot be processed, and do not mark as acknowledged.
@ -72,7 +84,7 @@ export async function poll(subClient, receivedMessages) {
if (!(data.breachName && data.hashPrefix && data.hashSuffixes)) {
console.error(
"HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes."
"HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.",
);
continue;
}
@ -82,7 +94,13 @@ export async function poll(subClient, receivedMessages) {
const breaches = await getAllBreachesFromDb();
const breachAlert = getBreachByName(breaches, breachName);
const { IsVerified, Domain, IsFabricated, IsSpamList } = breachAlert;
const {
IsVerified,
Domain,
IsFabricated,
IsSpamList,
Id: breachId,
} = breachAlert;
// If any of the following conditions are not satisfied:
// Do not send the breach alert email! The `logId`s are being used for
@ -107,7 +125,7 @@ export async function poll(subClient, receivedMessages) {
];
const unsatisfiedConditions = emailDeliveryConditions.filter(
(condition) => condition.condition
(condition) => condition.condition,
);
const doNotSendEmail = unsatisfiedConditions.length > 0;
@ -134,7 +152,7 @@ export async function poll(subClient, receivedMessages) {
try {
const reqHashPrefix = hashPrefix.toLowerCase();
const hashes = hashSuffixes.map(
(suffix) => reqHashPrefix + suffix.toLowerCase()
(suffix) => reqHashPrefix + suffix.toLowerCase(),
);
const subscribers = await getSubscribersByHashes(hashes);
@ -152,10 +170,16 @@ export async function poll(subClient, receivedMessages) {
for (const recipient of recipients) {
console.info("notify", { recipient });
const notifiedSubs = await getNotifiedSubscribersForBreach(breachId);
// Get subscriber ID from:
// - `subscriber_id`: if `email_addresses` record
// - `id`: if `subscribers` record
const subscriberId = recipient.subscriber_id ?? recipient.id;
if (notifiedSubs.includes(subscriberId)) {
console.info("Subscriber already notified, skipping: ", subscriberId);
continue;
}
const { recipientEmail, breachedEmail, signupLanguage } =
getAddressesAndLanguageForEmail(recipient);
@ -169,11 +193,11 @@ export async function poll(subClient, receivedMessages) {
const supportedLocales = negotiateLanguages(
requestedLanguage,
availableLanguages,
{ defaultLocale: "en" }
{ defaultLocale: "en" },
);
await localStorage.run(new Map(), async () => {
localStorage.getStore().set('locale', supportedLocales);
localStorage.getStore().set("locale", supportedLocales);
await (async () => {
if (!notifiedRecipients.includes(breachedEmail)) {
const data = {
@ -187,11 +211,38 @@ export async function poll(subClient, receivedMessages) {
utmCampaign: utmCampaignId,
};
// try to append a new row into the email notifications table
// if the append fails, there might be already an entry, stop the script
try {
await addEmailNotification({
breachId,
subscriberId,
notified: false,
email: data.recipientEmail,
notificationType: "incident",
});
} catch (e) {
console.error("Failed to add email notification to table: ", e);
throw new Error(e);
}
const emailTemplate = getTemplate(data, breachAlertEmailPartial);
const subject = getMessage("breach-alert-subject");
await sendEmail(data.recipientEmail, subject, emailTemplate);
// mark email as notified in database
// if this call ever fails, stop stop the script with an error
try {
await markEmailAsNotified(
subscriberId,
breachId,
data.recipientEmail,
);
} catch (e) {
console.error("Failed to mark email as notified: ", e);
throw new Error(e);
}
notifiedRecipients.push(breachedEmail);
}
})();
@ -204,7 +255,7 @@ export async function poll(subClient, receivedMessages) {
subscription: formattedSubscription,
ackIds: [message.ackId],
});
/* c8 ignore start */
/* c8 ignore start */
} catch (error) {
console.error(`Notifying subscribers of breach failed: ${error}`);
}
@ -220,15 +271,15 @@ async function pullMessages() {
options = {
servicePath: "localhost",
port: "8085",
sslCreds: grpc.credentials.createInsecure()
}
sslCreds: grpc.credentials.createInsecure(),
};
}
const subClient = new pubsub.v1.SubscriberClient(options);
const formattedSubscription = subClient.subscriptionPath(
projectId,
subscriptionName
subscriptionName,
);
// If there are no messages, this will wait until the default timeout for the pull API.
@ -253,7 +304,9 @@ if (process.env.NODE_ENV !== "test") {
init()
.then(async (_res) => {
if (!(projectId && subscriptionName)) {
throw new Error("env vars not set: GCP_PUBSUB_PROJECT_ID and GCP_PUBSUB_SUBSCRIPTION_NAME")
throw new Error(
"env vars not set: GCP_PUBSUB_PROJECT_ID and GCP_PUBSUB_SUBSCRIPTION_NAME",
);
}
Sentry.captureCheckIn({
checkInId,
@ -262,7 +315,7 @@ if (process.env.NODE_ENV !== "test") {
});
})
.catch((err) => console.error(err))
.finally(async() => {
.finally(async () => {
// Tear down knex connection pools
await knexSubscribers.destroy();
await knexEmailAddresses.destroy();

Просмотреть файл

@ -7,7 +7,7 @@ import { test, expect, jest } from "@jest/globals";
jest.mock("@sentry/nextjs", () => {
return {
init: jest.fn(),
captureCheckIn: jest.fn()
captureCheckIn: jest.fn(),
};
});
@ -16,8 +16,8 @@ jest.mock("../utils/email.js", () => {
initEmail: jest.fn(),
EmailTemplateType: jest.fn(),
getEmailCtaHref: jest.fn(),
sendEmail: jest.fn()
}
sendEmail: jest.fn(),
};
});
jest.mock("../utils/hibp.js", () => {
@ -26,107 +26,138 @@ jest.mock("../utils/hibp.js", () => {
return {
recipientEmail: "1",
breachedEmail: "2",
signupLanguage: "3"
}
signupLanguage: "3",
};
}),
getBreachByName: jest.fn(),
getAllBreachesFromDb: jest.fn(),
}
};
});
jest.mock("../db/tables/subscribers.js", () => {
return {
getSubscribersByHashes: jest.fn(() => [""])
}
getSubscribersByHashes: jest.fn(() => [""]),
};
});
jest.mock("../db/tables/emailAddresses.js", () => {
return {
getEmailAddressesByHashes: jest.fn(() => [""])
}
getEmailAddressesByHashes: jest.fn(() => [""]),
};
});
jest.mock("../db/tables/email_notifications.js", () => {
return {
getNotifiedSubscribersForBreach: jest.fn(() => [""]),
addEmailNotification: jest.fn(),
markEmailAsNotified: jest.fn(),
};
});
jest.mock("../utils/fluent.js", () => {
return {
initFluentBundles: jest.fn(),
getMessage: jest.fn(),
getStringLookup: jest.fn()
}
getStringLookup: jest.fn(),
};
});
jest.mock("../views/emails/email2022.js", () => {
return {
getTemplate: jest.fn()
}
return {
getTemplate: jest.fn(),
};
});
jest.mock("../views/emails/emailBreachAlert.js", () => {
return {
breachAlertEmailPartial: jest.fn()
}
breachAlertEmailPartial: jest.fn(),
};
});
const subClient = {
subscriptionPath: jest.fn(),
acknowledge: jest.fn()
}
acknowledge: jest.fn(),
};
function buildReceivedMessages(testBreachAlert) {
return [
{
ackId: 'testAckId',
ackId: "testAckId",
message: {
attributes: {},
data: Buffer.from(JSON.stringify(testBreachAlert)),
messageId: '1',
messageId: "1",
publishTime: {},
orderingKey: ''
orderingKey: "",
},
deliveryAttempt: 0
}
]
deliveryAttempt: 0,
},
];
}
beforeEach(() => {
jest.clearAllMocks();
jest.spyOn(console, "error").mockImplementation(() => {});
});
test("rejects invalid messages", async () => {
const { poll } = await import("./emailBreachAlerts.js");
const consoleError = jest.spyOn(console, 'error').mockImplementation(() => {});
const consoleError = jest
.spyOn(console, "error")
.mockImplementation(() => {});
await poll(subClient, buildReceivedMessages({
// missing breachName
"hashPrefix": "test-prefix1",
"hashSuffixes": ["test-suffix1"]
}));
await poll(
subClient,
buildReceivedMessages({
// missing breachName
hashPrefix: "test-prefix1",
hashSuffixes: ["test-suffix1"],
}),
);
expect(subClient.acknowledge).toBeCalledTimes(0);
expect(consoleError).toBeCalledWith("HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.")
expect(consoleError).toBeCalledWith(
"HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.",
);
await poll(subClient, buildReceivedMessages({
"breachName": "test1",
// missing hashPrefix
"hashSuffixes": ["test-suffix1"]
}));
await poll(
subClient,
buildReceivedMessages({
breachName: "test1",
// missing hashPrefix
hashSuffixes: ["test-suffix1"],
}),
);
expect(subClient.acknowledge).toBeCalledTimes(0);
expect(consoleError).toBeCalledWith("HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.")
expect(consoleError).toBeCalledWith(
"HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.",
);
await poll(subClient, buildReceivedMessages({
"breachName": "test1",
"hashPrefix": "test-prefix1",
// missing hashSuffixes
}));
await poll(
subClient,
buildReceivedMessages({
breachName: "test1",
hashPrefix: "test-prefix1",
// missing hashSuffixes
}),
);
expect(subClient.acknowledge).toBeCalledTimes(0);
expect(consoleError).toBeCalledWith("HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.")
expect(consoleError).toBeCalledWith(
"HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.",
);
await poll(subClient, buildReceivedMessages({
"breachName": "test1",
"hashPrefix": "test-prefix1",
"hashSuffixes": "" // hashSuffixes not an array
}));
await poll(
subClient,
buildReceivedMessages({
breachName: "test1",
hashPrefix: "test-prefix1",
hashSuffixes: "", // hashSuffixes not an array
}),
);
expect(subClient.acknowledge).toBeCalledTimes(0);
expect(consoleError).toBeCalledWith("HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.")
expect(consoleError).toBeCalledWith(
"HIBP breach notification: requires breachName, hashPrefix, and hashSuffixes.",
);
});
test("processes valid messages", async () => {
@ -141,9 +172,9 @@ test("processes valid messages", async () => {
});
const receivedMessages = buildReceivedMessages({
"breachName": "test1",
"hashPrefix": "test-prefix1",
"hashSuffixes": ["test-suffix1"]
breachName: "test1",
hashPrefix: "test-prefix1",
hashSuffixes: ["test-suffix1"],
});
const { poll } = await import("./emailBreachAlerts.js");
@ -202,3 +233,146 @@ test("processes valid messages", async () => {
// Verified, not fabricated, not spam list breaches are emailed.
expect(sendEmail).toHaveBeenCalledTimes(1);
});
test("skipping email when subscriber id exists in email_notifications table", async () => {
const { sendEmail } = await import("../utils/email.js");
const mockedUtilsHibp = jest.requireMock("../utils/hibp.js");
mockedUtilsHibp.getBreachByName.mockReturnValue({
IsVerified: true,
Domain: "test1",
IsFabricated: false,
IsSpamList: false,
Id: 1,
});
jest.mock("../db/tables/subscribers.js", () => {
return {
getSubscribersByHashes: jest.fn(() => [{ id: 1 }]),
};
});
jest.mock("../db/tables/emailAddresses.js", () => {
return {
getEmailAddressesByHashes: jest.fn(() => []),
};
});
jest.mock("../db/tables/email_notifications.js", () => {
return {
getNotifiedSubscribersForBreach: jest.fn(() => [1]),
addEmailNotification: jest.fn(),
};
});
const receivedMessages = buildReceivedMessages({
breachName: "test1",
hashPrefix: "test-prefix1",
hashSuffixes: ["test-suffix1"],
});
const { poll } = await import("./emailBreachAlerts.js");
await poll(subClient, receivedMessages);
// Verified, not fabricated, not spam list breaches are acknowledged.
expect(subClient.acknowledge).toHaveBeenCalledTimes(1);
// Verified, not fabricated, not spam list breaches are emailed.
expect(sendEmail).toHaveBeenCalledTimes(0);
});
test("throws an error when addEmailNotification fails", async () => {
const { sendEmail } = await import("../utils/email.js");
const mockedUtilsHibp = jest.requireMock("../utils/hibp.js");
mockedUtilsHibp.getBreachByName.mockReturnValue({
IsVerified: true,
Domain: "test1",
IsFabricated: false,
IsSpamList: false,
Id: 1,
});
jest.mock("../db/tables/subscribers.js", () => {
return {
getSubscribersByHashes: jest.fn(() => [{ id: 1 }]),
};
});
jest.mock("../db/tables/emailAddresses.js", () => {
return {
getEmailAddressesByHashes: jest.fn(() => [""]),
};
});
jest.mock("../db/tables/email_notifications.js", () => {
return {
getNotifiedSubscribersForBreach: jest.fn(() => [2]),
addEmailNotification: jest.fn().mockImplementationOnce(() => {
throw new Error("add failed");
}),
};
});
const receivedMessages = buildReceivedMessages({
breachName: "test1",
hashPrefix: "test-prefix1",
hashSuffixes: ["test-suffix1"],
});
const { poll } = await import("./emailBreachAlerts.js");
try {
await poll(subClient, receivedMessages);
} catch (e) {
expect(console.error).toBeCalled();
expect(e.message).toBe("add failed");
}
expect(sendEmail).toHaveBeenCalledTimes(0);
});
test("throws an error when markEmailAsNotified fails", async () => {
const { sendEmail } = await import("../utils/email.js");
const mockedUtilsHibp = jest.requireMock("../utils/hibp.js");
mockedUtilsHibp.getBreachByName.mockReturnValue({
IsVerified: true,
Domain: "test1",
IsFabricated: false,
IsSpamList: false,
Id: 1,
});
jest.mock("../db/tables/subscribers.js", () => {
return {
getSubscribersByHashes: jest.fn(() => [{ id: 1 }]),
};
});
jest.mock("../db/tables/emailAddresses.js", () => {
return {
getEmailAddressesByHashes: jest.fn(() => [""]),
};
});
jest.mock("../db/tables/email_notifications.js", () => {
return {
getNotifiedSubscribersForBreach: jest.fn(() => [2]),
addEmailNotification: jest.fn(),
markEmailAsNotified: jest.fn().mockImplementationOnce(() => {
throw new Error("mark failed");
}),
};
});
const receivedMessages = buildReceivedMessages({
breachName: "test1",
hashPrefix: "test-prefix1",
hashSuffixes: ["test-suffix1"],
});
const { poll } = await import("./emailBreachAlerts.js");
try {
await poll(subClient, receivedMessages);
} catch (e) {
expect(console.error).toBeCalled();
expect(e.message).toBe("mark failed");
}
expect(sendEmail).toHaveBeenCalledTimes(1);
});

Просмотреть файл

@ -1,11 +1,11 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import { createWriteStream, existsSync } from 'fs';
import { Readable } from 'stream';
import { finished } from 'stream/promises';
import "dotenv/config"
import { createWriteStream, existsSync } from "fs";
import { Readable } from "stream";
import { finished } from "stream/promises";
import "dotenv/config";
const dataPath = "./locationAutocompleteData.json";
@ -13,12 +13,12 @@ if (!existsSync(dataPath)) {
const stream = createWriteStream(dataPath);
try {
const fetchUrl = `https://s3.amazonaws.com/${process.env.S3_BUCKET}/autocomplete/locationAutocompleteData.json`;
console.debug({ fetchUrl })
console.debug({ fetchUrl });
const { body } = await fetch(fetchUrl);
await finished(Readable.fromWeb(body).pipe(stream));
} catch (e) {
console.error(e)
console.error(e);
}
} else {
console.log('file already exists: ', dataPath)
}
console.log("file already exists: ", dataPath);
}

Просмотреть файл

@ -8,15 +8,15 @@
* with the goal of deprecating the column
*/
import Knex from 'knex'
import knexConfig from '../../db/knexfile.js'
import { getAllBreachesFromDb } from '../../utils/hibp.js'
import { getAllEmailsAndBreaches } from '../../utils/breaches.js'
import { BreachDataTypes } from '../../utils/breach-resolution.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../../db/knexfile.js";
import { getAllBreachesFromDb } from "../../utils/hibp.js";
import { getAllEmailsAndBreaches } from "../../utils/breaches.js";
import { BreachDataTypes } from "../../utils/breach-resolution.js";
const knex = Knex(knexConfig);
const LIMIT = 1000 // with millions of records, we have to load a few at a time
let subscribersArr = []
const LIMIT = 1000; // with millions of records, we have to load a few at a time
let subscribersArr = [];
/**
* Batch update
@ -24,95 +24,104 @@ let subscribersArr = []
* @param {*} updateCollection
*/
const batchUpdate = async (updateCollection) => {
const trx = await knex.transaction()
const trx = await knex.transaction();
try {
await Promise.all(updateCollection.map(tuple => {
const { user, updatedBreachesResolution } = tuple
return knex('subscribers')
.where('id', user.id)
.update({
breach_resolution: updatedBreachesResolution
})
.transacting(trx)
}))
await trx.commit()
await Promise.all(
updateCollection.map((tuple) => {
const { user, updatedBreachesResolution } = tuple;
return knex("subscribers")
.where("id", user.id)
.update({
breach_resolution: updatedBreachesResolution,
})
.transacting(trx);
}),
);
await trx.commit();
} catch (error) {
await trx.rollback()
console.error('batch update failed!!')
console.log({ updateCollection })
console.error(error)
await trx.rollback();
console.error("batch update failed!!");
console.log({ updateCollection });
console.error(error);
}
}
};
const selectAndLockResolutions = async () => {
const trx = await knex.transaction()
let subscribersArr = []
const trx = await knex.transaction();
let subscribersArr = [];
try {
subscribersArr = await knex.select('id', 'primary_email', 'breaches_resolved', 'breach_resolution')
.from('subscribers')
.whereNotNull('breaches_resolved')
.whereNull('db_migration_1')
subscribersArr = await knex
.select("id", "primary_email", "breaches_resolved", "breach_resolution")
.from("subscribers")
.whereNotNull("breaches_resolved")
.whereNull("db_migration_1")
.limit(LIMIT)
.orderBy('updated_at', 'desc')
.orderBy("updated_at", "desc")
.transacting(trx)
.forUpdate()
.forUpdate();
// update the lock
await Promise.all(subscribersArr.map(sub => {
const { id } = sub
return knex('subscribers')
.where('id', id)
.update({
db_migration_1: true
})
.transacting(trx)
}))
await Promise.all(
subscribersArr.map((sub) => {
const { id } = sub;
return knex("subscribers")
.where("id", id)
.update({
db_migration_1: true,
})
.transacting(trx);
}),
);
await trx.commit()
await trx.commit();
} catch (error) {
await trx.rollback()
console.log('select & mark rows failed!! first row:')
console.log({ first: subscribersArr[0] })
console.error(error)
await trx.rollback();
console.log("select & mark rows failed!! first row:");
console.log({ first: subscribersArr[0] });
console.error(error);
}
return subscribersArr
}
return subscribersArr;
};
const startTime = Date.now()
console.log(`Start time is: ${startTime}`)
const startTime = Date.now();
console.log(`Start time is: ${startTime}`);
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
// find all subscribers who resolved any breaches in the past, convert those
// records into the new v2 format
let failedToSelect = true
let failedToSelect = true;
while (failedToSelect) {
try {
subscribersArr = await selectAndLockResolutions()
failedToSelect = false
subscribersArr = await selectAndLockResolutions();
failedToSelect = false;
} catch (e) {
failedToSelect = true
console.error(e)
failedToSelect = true;
console.error(e);
}
}
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
const updateCollection = []
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
const updateCollection = [];
for (const subscriber of subscribersArr) {
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber
let isV2Changed = false // use a boolean to track if v2 has been changed, only upsert if so
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber;
let isV2Changed = false; // use a boolean to track if v2 has been changed, only upsert if so
// fetch subscriber all breaches / email
let subscriberBreachesEmail
let subscriberBreachesEmail;
try {
subscriberBreachesEmail = await getAllEmailsAndBreaches(subscriber, allBreaches)
subscriberBreachesEmail = await getAllEmailsAndBreaches(
subscriber,
allBreaches,
);
} catch (e) {
console.error('Cannot fetch subscriber breaches at the moment: ', e)
continue
console.error("Cannot fetch subscriber breaches at the moment: ", e);
continue;
}
// console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails))
@ -122,53 +131,67 @@ for (const subscriber of subscribersArr) {
for (const recencyIndex of resolvedRecencyIndices) {
// console.debug({ recencyIndex })
// find subscriber's relevant recency index breach information
const ve = subscriberBreachesEmail.verifiedEmails?.filter(e => e.email === email)[0] || {}
const ve =
subscriberBreachesEmail.verifiedEmails?.filter(
(e) => e.email === email,
)[0] || {};
// console.debug({ ve })
const subBreach = ve.breaches?.filter(b => Number(b.recencyIndex) === Number(recencyIndex))[0] || null
const subBreach =
ve.breaches?.filter(
(b) => Number(b.recencyIndex) === Number(recencyIndex),
)[0] || null;
// console.debug({ subBreach })
if (!subBreach || !subBreach.DataClasses) {
console.warn(`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`)
continue
console.warn(
`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`,
);
continue;
}
// if email does not exist in v2, we need to add it to the object
// format: {email: { recencyIndex: { isResolved: true, resolutionsChecked: [DataTypes]}}}
if (!v2) v2 = {}
if (!v2) v2 = {};
if (!v2[email]) {
v2[email] = {
[recencyIndex]: {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses || [BreachDataTypes.General]
}
}
resolutionsChecked: subBreach?.DataClasses || [
BreachDataTypes.General,
],
},
};
isV2Changed = true
isV2Changed = true;
}
if (v2[email][recencyIndex]?.isResolved) {
console.log(`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`)
console.log(
`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`,
);
} else {
console.log(`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`)
console.log(
`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`,
);
v2[email][recencyIndex] = {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses
}
isV2Changed = true
resolutionsChecked: subBreach?.DataClasses,
};
isV2Changed = true;
}
}
}
// check if v2 is changed, if so, upsert the new v2
if (isV2Changed) {
console.log('upsert for subscriber: ', subscriber.primary_email)
updateCollection.push({ user: subscriber, updatedBreachesResolution: v2 })
console.log("upsert for subscriber: ", subscriber.primary_email);
updateCollection.push({ user: subscriber, updatedBreachesResolution: v2 });
}
}
await batchUpdate(updateCollection)
await batchUpdate(updateCollection);
// breaking out of do..while loop
console.log('Script finished')
const endTime = Date.now()
console.log(`End time is: ${endTime}`)
console.log('Diff is: ', endTime - startTime)
process.exit()
console.log("Script finished");
const endTime = Date.now();
console.log(`End time is: ${endTime}`);
console.log("Diff is: ", endTime - startTime);
process.exit();

Просмотреть файл

@ -10,49 +10,52 @@
* `useBreachId: true/false`
*/
import Knex from 'knex'
import knexConfig from '../../db/knexfile.js'
import { getAllBreachesFromDb } from '../../utils/hibp.js'
import { getAllEmailsAndBreaches } from '../../utils/breaches.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../../db/knexfile.js";
import { getAllBreachesFromDb } from "../../utils/hibp.js";
import { getAllEmailsAndBreaches } from "../../utils/breaches.js";
const knex = Knex(knexConfig);
const LIMIT = 1000 // with millions of records, we have to load a few at a time
let subscribersArr = []
const LIMIT = 1000; // with millions of records, we have to load a few at a time
let subscribersArr = [];
const selectAndLockResolutions = async () => {
const trx = await knex.transaction()
let subscribers = []
const trx = await knex.transaction();
let subscribers = [];
try {
subscribers = await knex.select('id', 'primary_email', 'breach_resolution')
.from('subscribers')
.whereNotNull('breach_resolution')
.whereNull('db_migration_2')
subscribers = await knex
.select("id", "primary_email", "breach_resolution")
.from("subscribers")
.whereNotNull("breach_resolution")
.whereNull("db_migration_2")
.limit(LIMIT)
.orderBy('updated_at', 'desc')
.orderBy("updated_at", "desc")
.transacting(trx)
.forUpdate()
.forUpdate();
// update the lock
await Promise.all(subscribers.map(sub => {
const { id } = sub
return knex('subscribers')
.where('id', id)
.update({
db_migration_2: true
})
.transacting(trx)
}))
await Promise.all(
subscribers.map((sub) => {
const { id } = sub;
return knex("subscribers")
.where("id", id)
.update({
db_migration_2: true,
})
.transacting(trx);
}),
);
await trx.commit()
await trx.commit();
} catch (error) {
await trx.rollback()
console.error('select & mark rows failed!! first row:')
console.log({ first: subscribers[0] })
console.error(error)
await trx.rollback();
console.error("select & mark rows failed!! first row:");
console.log({ first: subscribers[0] });
console.error(error);
}
return subscribers
}
return subscribers;
};
/**
* Batch update
@ -60,102 +63,119 @@ const selectAndLockResolutions = async () => {
* @param {*} updateCollection
*/
const batchUpdate = async (updateCollection) => {
const trx = await knex.transaction()
const trx = await knex.transaction();
try {
await Promise.all(updateCollection.map(tuple => {
const { user, updatedBreachesResolution } = tuple
return knex('subscribers')
.where('id', user.id)
.update({
breach_resolution: updatedBreachesResolution
})
.transacting(trx)
}))
await trx.commit()
await Promise.all(
updateCollection.map((tuple) => {
const { user, updatedBreachesResolution } = tuple;
return knex("subscribers")
.where("id", user.id)
.update({
breach_resolution: updatedBreachesResolution,
})
.transacting(trx);
}),
);
await trx.commit();
} catch (error) {
await trx.rollback()
console.error('batch update failed!!')
console.log({ updateCollection })
console.error(error)
await trx.rollback();
console.error("batch update failed!!");
console.log({ updateCollection });
console.error(error);
}
}
};
// Script begins here
const startTime = Date.now()
console.log(`Start time is: ${startTime}`)
const startTime = Date.now();
console.log(`Start time is: ${startTime}`);
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
// find all subscribers who resolved any breaches in the past,
// replace recency index with breach id
let failedToSelect = true
let failedToSelect = true;
while (failedToSelect) {
try {
subscribersArr = await selectAndLockResolutions()
failedToSelect = false
subscribersArr = await selectAndLockResolutions();
failedToSelect = false;
} catch (e) {
console.error(e)
console.error(e);
}
}
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
const updateCollection = []
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
const updateCollection = [];
for (const subscriber of subscribersArr) {
const { breach_resolution: v2 } = subscriber
const { breach_resolution: v2 } = subscriber;
// console.debug({ v2 })
// if useBreachId is set, skip because this breach_resolution has already been worked on
if (v2.useBreachId) {
console.log('Skipping since `useBreachId` is set already, this breach resolution is already converted')
continue
console.log(
"Skipping since `useBreachId` is set already, this breach resolution is already converted",
);
continue;
}
const newResolutions = {}
const newResolutions = {};
// fetch subscriber all breaches / email
let subscriberBreachesEmail
let subscriberBreachesEmail;
try {
subscriberBreachesEmail = await getAllEmailsAndBreaches(subscriber, allBreaches)
subscriberBreachesEmail = await getAllEmailsAndBreaches(
subscriber,
allBreaches,
);
} catch (e) {
console.error('Cannot fetch subscriber breaches at the moment: ', e)
continue
console.error("Cannot fetch subscriber breaches at the moment: ", e);
continue;
}
// console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails))
for (const email in v2) {
// console.debug({ email })
const resolutions = v2[email]
const resolutions = v2[email];
// console.debug({ resolutions })
newResolutions[email] = {}
newResolutions[email] = {};
for (const recencyIndex in resolutions) {
// console.debug({ recencyIndex })
// find subscriber's relevant recency index breach information
const ve = subscriberBreachesEmail.verifiedEmails?.filter(ve => ve.email === email)[0] || {}
const subBreach = ve.breaches?.filter(b => Number(b.recencyIndex) === Number(recencyIndex))[0] || null
const breachName = subBreach?.Name
console.debug({ breachName })
const ve =
subscriberBreachesEmail.verifiedEmails?.filter(
(ve) => ve.email === email,
)[0] || {};
const subBreach =
ve.breaches?.filter(
(b) => Number(b.recencyIndex) === Number(recencyIndex),
)[0] || null;
const breachName = subBreach?.Name;
console.debug({ breachName });
// find breach id for the breach
const breachId = allBreaches.find(b => b.Name === breachName)?.Id
newResolutions[email][breachId] = v2[email][recencyIndex]
const breachId = allBreaches.find((b) => b.Name === breachName)?.Id;
newResolutions[email][breachId] = v2[email][recencyIndex];
}
}
// check if v2 is changed, if so, upsert the new v2
newResolutions.useBreachId = true
updateCollection.push({ user: subscriber, updatedBreachesResolution: newResolutions })
newResolutions.useBreachId = true;
updateCollection.push({
user: subscriber,
updatedBreachesResolution: newResolutions,
});
}
await batchUpdate(updateCollection)
await batchUpdate(updateCollection);
console.log('Reaching the end of the table')
const endTime = Date.now()
console.log(`End time is: ${endTime}`)
console.log('Diff is: ', endTime - startTime)
process.exit()
console.log("Reaching the end of the table");
const endTime = Date.now();
console.log(`End time is: ${endTime}`);
console.log("Diff is: ", endTime - startTime);
process.exit();

Просмотреть файл

@ -7,85 +7,102 @@
* The purpose of the script is to clean up some of the failed records during db migration on 3/28/23
*/
import Knex from 'knex'
import knexConfig from '../db/knexfile.js'
import { getAllBreachesFromDb } from '../utils/hibp.js'
import { getAllEmailsAndBreaches } from '../utils/breaches.js'
import { setBreachResolution } from '../db/tables/subscribers.js'
import mozlog from '../utils/log.js'
const log = mozlog('script.migrationCleanup')
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../db/knexfile.js";
import { getAllBreachesFromDb } from "../utils/hibp.js";
import { getAllEmailsAndBreaches } from "../utils/breaches.js";
import { setBreachResolution } from "../db/tables/subscribers.js";
import mozlog from "../utils/log.js";
const log = mozlog("script.migrationCleanup");
const knex = Knex(knexConfig);
const LIMIT = 3000
let subscribersArr = []
const LIMIT = 3000;
let subscribersArr = [];
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) log.info('breach_count', 'breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
log.info(
"breach_count",
"breaches loaded successfully! ",
allBreaches.length,
);
const count = await knex
.from('subscribers')
.whereRaw('NOT ((breach_resolution)::jsonb \\? \'useBreachId\')')
.count('*')
.from("subscribers")
.whereRaw("NOT ((breach_resolution)::jsonb \\? 'useBreachId')")
.count("*");
log.info('total_to_be_executed', count[0])
log.info("total_to_be_executed", count[0]);
// find all subscribers who resolved any breaches in the past,
// replace recency index with breach id
for (let i = 0; i < 10; i++) {
subscribersArr = await knex
.select('id', 'primary_email', 'breach_resolution')
.from('subscribers')
.orderBy('updated_at', 'desc')
.whereRaw('NOT ((breach_resolution)::jsonb \\? \'useBreachId\')')
.limit(LIMIT)
.select("id", "primary_email", "breach_resolution")
.from("subscribers")
.orderBy("updated_at", "desc")
.whereRaw("NOT ((breach_resolution)::jsonb \\? 'useBreachId')")
.limit(LIMIT);
log.info('job', `Loaded # of subscribers: ${subscribersArr.length}`)
log.info("job", `Loaded # of subscribers: ${subscribersArr.length}`);
for (const subscriber of subscribersArr) {
const { breach_resolution: v2 } = subscriber
const { breach_resolution: v2 } = subscriber;
// console.debug({ v2 })
// if useBreachId is set, skip because this breach_resolution has already been worked on
if (v2.useBreachId) {
log.warn('job', 'Skipping since `useBreachId` is set already, this breach resolution is already converted')
continue
log.warn(
"job",
"Skipping since `useBreachId` is set already, this breach resolution is already converted",
);
continue;
}
const newResolutions = {}
const newResolutions = {};
// fetch subscriber all breaches / email
const subscriberBreachesEmail = await getAllEmailsAndBreaches(subscriber, allBreaches)
const subscriberBreachesEmail = await getAllEmailsAndBreaches(
subscriber,
allBreaches,
);
// console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails))
for (const email in v2) {
// console.debug({ email })
const resolutions = v2[email]
// console.debug({ email })
const resolutions = v2[email];
// console.debug({ resolutions })
newResolutions[email] = {}
newResolutions[email] = {};
for (const recencyIndex in resolutions) {
console.debug({ recencyIndex })
console.debug({ recencyIndex });
// find subscriber's relevant recency index breach information
const ve = subscriberBreachesEmail.verifiedEmails?.filter(ve => ve.email === email)[0] || {}
const subBreach = ve.breaches?.filter(b => Number(b.recencyIndex) === Number(recencyIndex))[0] || null
const breachName = subBreach?.Name
console.debug({ breachName })
const ve =
subscriberBreachesEmail.verifiedEmails?.filter(
(ve) => ve.email === email,
)[0] || {};
const subBreach =
ve.breaches?.filter(
(b) => Number(b.recencyIndex) === Number(recencyIndex),
)[0] || null;
const breachName = subBreach?.Name;
console.debug({ breachName });
// find breach id for the breach
const breachId = allBreaches.find(b => b.Name === breachName)?.Id
log.info('job', { breachId })
newResolutions[email][breachId] = v2[email][recencyIndex]
const breachId = allBreaches.find((b) => b.Name === breachName)?.Id;
log.info("job", { breachId });
newResolutions[email][breachId] = v2[email][recencyIndex];
}
}
// check if v2 is changed, if so, upsert the new v2
newResolutions.useBreachId = true
await setBreachResolution(subscriber, newResolutions)
newResolutions.useBreachId = true;
await setBreachResolution(subscriber, newResolutions);
}
}
// breaking out of do..while loop
log.info('job', 'Reaching the end of the table')
process.exit()
log.info("job", "Reaching the end of the table");
process.exit();

Просмотреть файл

@ -8,50 +8,56 @@
* with the goal of deprecating the column
*/
import Knex from 'knex'
import knexConfig from '../../db/knexfile.js'
import { getAllBreachesFromDb } from '../../utils/hibp.js'
import { getAllEmailsAndBreaches } from '../../utils/breaches.js'
import { BreachDataTypes } from '../../utils/breach-resolution.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../../db/knexfile.js";
import { getAllBreachesFromDb } from "../../utils/hibp.js";
import { getAllEmailsAndBreaches } from "../../utils/breaches.js";
import { BreachDataTypes } from "../../utils/breach-resolution.js";
const knex = Knex(knexConfig);
const LIMIT = 1000 // with millions of records, we have to load a few at a time
let offset = 0 // looping through all records with offset
let subscribersArr = []
const LIMIT = 1000; // with millions of records, we have to load a few at a time
let offset = 0; // looping through all records with offset
let subscribersArr = [];
let CAP = 5000 // cap the experiment
let CAP = 5000; // cap the experiment
if (process.argv.length > 2) {
CAP = process.argv[2]
console.log('using cap passed in: ', CAP)
CAP = process.argv[2];
console.log("using cap passed in: ", CAP);
}
const startTime = Date.now()
console.log(`Start time is: ${startTime}`)
const startTime = Date.now();
console.log(`Start time is: ${startTime}`);
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
// find all subscribers who resolved any breaches in the past, convert those
// records into the new v2 format
do {
console.log(`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`)
console.log(
`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`,
);
subscribersArr = await knex
.select('id', 'primary_email', 'breaches_resolved', 'breach_resolution')
.from('subscribers')
.whereNotNull('breaches_resolved')
.select("id", "primary_email", "breaches_resolved", "breach_resolution")
.from("subscribers")
.whereNotNull("breaches_resolved")
.limit(LIMIT)
.offset(offset)
.orderBy('updated_at', 'desc')
.orderBy("updated_at", "desc");
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
for (const subscriber of subscribersArr) {
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber
let isV2Changed = false // use a boolean to track if v2 has been changed, only upsert if so
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber;
let isV2Changed = false; // use a boolean to track if v2 has been changed, only upsert if so
// fetch subscriber all breaches / email
const subscriberBreachesEmail = await getAllEmailsAndBreaches(subscriber, allBreaches)
const subscriberBreachesEmail = await getAllEmailsAndBreaches(
subscriber,
allBreaches,
);
// console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails))
for (const [email, resolvedRecencyIndices] of Object.entries(v1)) {
@ -60,53 +66,67 @@ do {
for (const recencyIndex of resolvedRecencyIndices) {
// console.debug({ recencyIndex })
// find subscriber's relevant recency index breach information
const ve = subscriberBreachesEmail.verifiedEmails?.filter(e => e.email === email)[0] || {}
const ve =
subscriberBreachesEmail.verifiedEmails?.filter(
(e) => e.email === email,
)[0] || {};
// console.debug({ ve })
const subBreach = ve.breaches?.filter(b => Number(b.recencyIndex) === Number(recencyIndex))[0] || null
const subBreach =
ve.breaches?.filter(
(b) => Number(b.recencyIndex) === Number(recencyIndex),
)[0] || null;
// console.debug({ subBreach })
if (!subBreach || !subBreach.DataClasses) {
console.warn(`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`)
continue
console.warn(
`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`,
);
continue;
}
// if email does not exist in v2, we need to add it to the object
// format: {email: { recencyIndex: { isResolved: true, resolutionsChecked: [DataTypes]}}}
if (!v2) v2 = {}
if (!v2) v2 = {};
if (!v2[email]) {
v2[email] = {
[recencyIndex]: {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses || [BreachDataTypes.General]
}
}
resolutionsChecked: subBreach?.DataClasses || [
BreachDataTypes.General,
],
},
};
isV2Changed = true
isV2Changed = true;
}
if (v2[email][recencyIndex]?.isResolved) {
console.log(`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`)
console.log(
`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`,
);
} else {
console.log(`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`)
console.log(
`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`,
);
v2[email][recencyIndex] = {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses
}
isV2Changed = true
resolutionsChecked: subBreach?.DataClasses,
};
isV2Changed = true;
}
}
}
// check if v2 is changed, if so, upsert the new v2
if (isV2Changed) {
console.log('upsert for subscriber: ', subscriber.primary_email)
console.log("upsert for subscriber: ", subscriber.primary_email);
}
}
offset += LIMIT
} while (subscribersArr.length === LIMIT && offset <= CAP)
offset += LIMIT;
} while (subscribersArr.length === LIMIT && offset <= CAP);
// breaking out of do..while loop
console.log('Reaching the end of the table, offset ended at', offset)
const endTime = Date.now()
console.log(`End time is: ${endTime}`)
console.log('Diff is: ', endTime - startTime)
process.exit()
console.log("Reaching the end of the table, offset ended at", offset);
const endTime = Date.now();
console.log(`End time is: ${endTime}`);
console.log("Diff is: ", endTime - startTime);
process.exit();

Просмотреть файл

@ -8,21 +8,21 @@
* with the goal of deprecating the column
*/
import Knex from 'knex'
import knexConfig from '../../db/knexfile.js'
import { getAllBreachesFromDb } from '../../utils/hibp.js'
import { getAllEmailsAndBreaches } from '../../utils/breaches.js'
import { BreachDataTypes } from '../../utils/breach-resolution.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../../db/knexfile.js";
import { getAllBreachesFromDb } from "../../utils/hibp.js";
import { getAllEmailsAndBreaches } from "../../utils/breaches.js";
import { BreachDataTypes } from "../../utils/breach-resolution.js";
const knex = Knex(knexConfig);
const LIMIT = 1000 // with millions of records, we have to load a few at a time
let CAP = 5000 // cap the experiment
const LIMIT = 1000; // with millions of records, we have to load a few at a time
let CAP = 5000; // cap the experiment
if (process.argv.length > 2) {
CAP = process.argv[2]
console.log('using cap passed in: ', CAP)
CAP = process.argv[2];
console.log("using cap passed in: ", CAP);
}
let offset = 0 // looping through all records with offset
let subscribersArr = []
let offset = 0; // looping through all records with offset
let subscribersArr = [];
/**
* Batch update
@ -30,55 +30,62 @@ let subscribersArr = []
* @param {*} updateCollection
*/
const batchUpdate = async (updateCollection) => {
const trx = await knex.transaction()
const trx = await knex.transaction();
try {
await Promise.all(updateCollection.map(tuple => {
const { user, updatedBreachesResolution } = tuple
return knex('subscribers')
.where('id', user.id)
.update({
breach_resolution: updatedBreachesResolution
})
.transacting(trx)
})
)
await trx.commit()
await Promise.all(
updateCollection.map((tuple) => {
const { user, updatedBreachesResolution } = tuple;
return knex("subscribers")
.where("id", user.id)
.update({
breach_resolution: updatedBreachesResolution,
})
.transacting(trx);
}),
);
await trx.commit();
} catch (error) {
await trx.rollback()
console.error('batch update failed!!')
console.log({ updateCollection })
console.error(error)
await trx.rollback();
console.error("batch update failed!!");
console.log({ updateCollection });
console.error(error);
}
}
};
const startTime = Date.now()
console.log(`Start time is: ${startTime}`)
const startTime = Date.now();
console.log(`Start time is: ${startTime}`);
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
// find all subscribers who resolved any breaches in the past, convert those
// records into the new v2 format
do {
console.log(`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`)
console.log(
`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`,
);
subscribersArr = await knex
.select('id', 'primary_email', 'breaches_resolved', 'breach_resolution')
.from('subscribers')
.whereNotNull('breaches_resolved')
.select("id", "primary_email", "breaches_resolved", "breach_resolution")
.from("subscribers")
.whereNotNull("breaches_resolved")
.limit(LIMIT)
.offset(offset)
.orderBy('updated_at', 'desc')
.orderBy("updated_at", "desc");
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
const updateCollection = []
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
const updateCollection = [];
for (const subscriber of subscribersArr) {
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber
let isV2Changed = false // use a boolean to track if v2 has been changed, only upsert if so
let { breaches_resolved: v1, breach_resolution: v2 } = subscriber;
let isV2Changed = false; // use a boolean to track if v2 has been changed, only upsert if so
// fetch subscriber all breaches / email
const subscriberBreachesEmail = await getAllEmailsAndBreaches(subscriber, allBreaches)
const subscriberBreachesEmail = await getAllEmailsAndBreaches(
subscriber,
allBreaches,
);
// console.debug(JSON.stringify(subscriberBreachesEmail.verifiedEmails))
for (const [email, resolvedRecencyIndices] of Object.entries(v1)) {
@ -87,55 +94,72 @@ do {
for (const recencyIndex of resolvedRecencyIndices) {
// console.debug({ recencyIndex })
// find subscriber's relevant recency index breach information
const ve = subscriberBreachesEmail.verifiedEmails?.filter(e => e.email === email)[0] || {}
const ve =
subscriberBreachesEmail.verifiedEmails?.filter(
(e) => e.email === email,
)[0] || {};
// console.debug({ ve })
const subBreach = ve.breaches?.filter(b => Number(b.recencyIndex) === Number(recencyIndex))[0] || null
const subBreach =
ve.breaches?.filter(
(b) => Number(b.recencyIndex) === Number(recencyIndex),
)[0] || null;
// console.debug({ subBreach })
if (!subBreach || !subBreach.DataClasses) {
console.warn(`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`)
continue
console.warn(
`SKIP: Cannot find subscribers breach and data types - recency: ${recencyIndex} email: ${email}`,
);
continue;
}
// if email does not exist in v2, we need to add it to the object
// format: {email: { recencyIndex: { isResolved: true, resolutionsChecked: [DataTypes]}}}
if (!v2) v2 = {}
if (!v2) v2 = {};
if (!v2[email]) {
v2[email] = {
[recencyIndex]: {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses || [BreachDataTypes.General]
}
}
resolutionsChecked: subBreach?.DataClasses || [
BreachDataTypes.General,
],
},
};
isV2Changed = true
isV2Changed = true;
}
if (v2[email][recencyIndex]?.isResolved) {
console.log(`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`)
console.log(
`recencyIndex ${recencyIndex} exists in v2 and is resolved, no changes`,
);
} else {
console.log(`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`)
console.log(
`recencyIndex ${recencyIndex} either does not exist or is not resolved, overwriting`,
);
v2[email][recencyIndex] = {
isResolved: true,
resolutionsChecked: subBreach?.DataClasses
}
isV2Changed = true
resolutionsChecked: subBreach?.DataClasses,
};
isV2Changed = true;
}
}
}
// check if v2 is changed, if so, upsert the new v2
if (isV2Changed) {
console.log('upsert for subscriber: ', subscriber.primary_email)
updateCollection.push({ user: subscriber, updatedBreachesResolution: v2 })
console.log("upsert for subscriber: ", subscriber.primary_email);
updateCollection.push({
user: subscriber,
updatedBreachesResolution: v2,
});
}
}
await batchUpdate(updateCollection)
offset += LIMIT
} while (subscribersArr.length === LIMIT && offset <= CAP)
await batchUpdate(updateCollection);
offset += LIMIT;
} while (subscribersArr.length === LIMIT && offset <= CAP);
// breaking out of do..while loop
console.log('Reaching the end of the table, offset ended at', offset)
const endTime = Date.now()
console.log(`End time is: ${endTime}`)
console.log('Diff is: ', endTime - startTime)
process.exit()
console.log("Reaching the end of the table, offset ended at", offset);
const endTime = Date.now();
console.log(`End time is: ${endTime}`);
console.log("Diff is: ", endTime - startTime);
process.exit();

Просмотреть файл

@ -7,41 +7,44 @@
* The purpose of the script is to benchmark pure read with limit set as 100
*/
import Knex from 'knex'
import knexConfig from '../../db/knexfile.js'
import { getAllBreachesFromDb } from '../../utils/hibp.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../../db/knexfile.js";
import { getAllBreachesFromDb } from "../../utils/hibp.js";
const knex = Knex(knexConfig);
const LIMIT = 100 // with millions of records, we have to load a few at a time
let offset = 0 // looping through all records with offset
let subscribersArr = []
const LIMIT = 100; // with millions of records, we have to load a few at a time
let offset = 0; // looping through all records with offset
let subscribersArr = [];
const startTime = Date.now()
console.log(`Start time is: ${startTime}`)
const startTime = Date.now();
console.log(`Start time is: ${startTime}`);
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
// find all subscribers who resolved any breaches in the past, convert those
// records into the new v2 format
do {
console.log(`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`)
console.log(
`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`,
);
subscribersArr = await knex
.select('id', 'primary_email', 'breaches_resolved', 'breach_resolution')
.from('subscribers')
.whereNotNull('breaches_resolved')
.select("id", "primary_email", "breaches_resolved", "breach_resolution")
.from("subscribers")
.whereNotNull("breaches_resolved")
.limit(LIMIT)
.offset(offset)
.offset(offset);
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
offset += LIMIT
} while (subscribersArr.length === LIMIT)
offset += LIMIT;
} while (subscribersArr.length === LIMIT);
// breaking out of do..while loop
console.log('Reaching the end of the table, offset ended at', offset)
const endTime = Date.now()
console.log(`End time is: ${endTime}`)
console.log('Diff is: ', endTime - startTime)
process.exit()
console.log("Reaching the end of the table, offset ended at", offset);
const endTime = Date.now();
console.log(`End time is: ${endTime}`);
console.log("Diff is: ", endTime - startTime);
process.exit();

Просмотреть файл

@ -7,44 +7,47 @@
* The purpose of the script is to benchmark pure read with limit set as 1000
*/
import Knex from 'knex'
import knexConfig from '../../db/knexfile.js'
import { getAllBreachesFromDb } from '../../utils/hibp.js'
const knex = Knex(knexConfig)
import Knex from "knex";
import knexConfig from "../../db/knexfile.js";
import { getAllBreachesFromDb } from "../../utils/hibp.js";
const knex = Knex(knexConfig);
const LIMIT = 1000 // with millions of records, we have to load a few at a time
let CAP = 1500000
const LIMIT = 1000; // with millions of records, we have to load a few at a time
let CAP = 1500000;
if (process.argv.length > 2) {
CAP = process.argv[2]
console.log('using cap passed in: ', CAP)
CAP = process.argv[2];
console.log("using cap passed in: ", CAP);
}
let offset = 0 // looping through all records with offset
let subscribersArr = []
let offset = 0; // looping through all records with offset
let subscribersArr = [];
// load all breaches for ref
const allBreaches = await getAllBreachesFromDb()
if (allBreaches && allBreaches.length > 0) console.log('breaches loaded successfully! ', allBreaches.length)
const allBreaches = await getAllBreachesFromDb();
if (allBreaches && allBreaches.length > 0)
console.log("breaches loaded successfully! ", allBreaches.length);
const startTime = Date.now()
console.log(`Start time is: ${startTime}`)
const startTime = Date.now();
console.log(`Start time is: ${startTime}`);
do {
console.log(`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`)
console.log(
`Converting breaches_resolved to breach_resolution - start: ${offset} limit: ${LIMIT}`,
);
subscribersArr = await knex
.select('id', 'primary_email', 'breaches_resolved', 'breach_resolution')
.from('subscribers')
.whereNotNull('breaches_resolved')
.select("id", "primary_email", "breaches_resolved", "breach_resolution")
.from("subscribers")
.whereNotNull("breaches_resolved")
.limit(LIMIT)
.offset(offset)
.orderBy('updated_at', 'desc')
.orderBy("updated_at", "desc");
console.log(`Loaded # of subscribers: ${subscribersArr.length}`)
console.log(`Loaded # of subscribers: ${subscribersArr.length}`);
offset += LIMIT
} while (subscribersArr.length === LIMIT && offset <= CAP)
offset += LIMIT;
} while (subscribersArr.length === LIMIT && offset <= CAP);
// breaking out of do..while loop
console.log('Reaching the end of the table, offset ended at', offset)
const endTime = Date.now()
console.log(`End time is: ${endTime}`)
console.log('Diff is: ', endTime - startTime)
process.exit()
console.log("Reaching the end of the table, offset ended at", offset);
const endTime = Date.now();
console.log(`End time is: ${endTime}`);
console.log("Diff is: ", endTime - startTime);
process.exit();

Просмотреть файл

@ -1,10 +1,10 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import { Upload } from "@aws-sdk/lib-storage";
import { S3 } from "@aws-sdk/client-s3";
import "dotenv/config"
import "dotenv/config";
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
@ -12,8 +12,8 @@ const region = process.env.AWS_REGION;
const Bucket = process.env.S3_BUCKET;
if (!accessKeyId || !secretAccessKey || !region || !Bucket) {
console.error("Environment vars for s3 upload are not set correctly")
process.exit()
console.error("Environment vars for s3 upload are not set correctly");
process.exit();
}
const s3 = new S3({
@ -25,19 +25,19 @@ const s3 = new S3({
});
export async function uploadToS3(fileName, fileStream) {
console.log('Attempt to upload to s3: ', fileName)
console.log("Attempt to upload to s3: ", fileName);
const uploadParams = {
Bucket,
Key: fileName,
Body: fileStream
}
Body: fileStream,
};
try {
await new Upload({
client: s3,
params: uploadParams
}).done()
console.log('Successfully uploaded data to ' + Bucket + '/' + fileName)
params: uploadParams,
}).done();
console.log("Successfully uploaded data to " + Bucket + "/" + fileName);
} catch (err) {
console.error(err, err.stack)
console.error(err, err.stack);
}
}
}

Просмотреть файл

@ -11,17 +11,20 @@
import { readdir } from "node:fs/promises";
import { resolve as pathResolve } from "node:path";
import { finished } from 'node:stream/promises';
import { finished } from "node:stream/promises";
import { createWriteStream } from "node:fs";
import { Readable } from 'node:stream';
import os from 'node:os';
import Sentry from "@sentry/nextjs"
import { req, formatDataClassesArray } from '../utils/hibp.js'
import { getAllBreaches, upsertBreaches, updateBreachFaviconUrl} from '../db/tables/breaches.js'
import { uploadToS3 } from './s3.js'
const SENTRY_SLUG = "cron-sync-breaches"
import { Readable } from "node:stream";
import os from "node:os";
import Sentry from "@sentry/nextjs";
import { req, formatDataClassesArray } from "../utils/hibp.js";
import {
getAllBreaches,
upsertBreaches,
updateBreachFaviconUrl,
} from "../db/tables/breaches.js";
import { uploadToS3 } from "./s3.js";
const SENTRY_SLUG = "cron-sync-breaches";
Sentry.init({
dsn: process.env.SENTRY_DSN,
@ -30,86 +33,99 @@ Sentry.init({
const checkInId = Sentry.captureCheckIn({
monitorSlug: SENTRY_SLUG,
status: "in_progress"
status: "in_progress",
});
export async function getBreachIcons(breaches) {
// make logofolder if it doesn't exist
const logoFolder = os.tmpdir();
console.log(`Logo folder: ${logoFolder}`)
console.log(`Logo folder: ${logoFolder}`);
// read existing logos
const existingLogos = await readdir(logoFolder);
(await Promise.allSettled(
breaches.map(async ({Domain: breachDomain, Name: breachName}) => {
await Promise.allSettled(
breaches.map(async ({ Domain: breachDomain, Name: breachName }) => {
if (!breachDomain || breachDomain.length == 0) {
console.log('empty domain: ', breachName)
await updateBreachFaviconUrl(breachName, null)
console.log("empty domain: ", breachName);
await updateBreachFaviconUrl(breachName, null);
return;
}
const logoFilename = breachDomain.toLowerCase() + ".ico";
const logoPath = pathResolve(logoFolder, logoFilename);
if (existingLogos.includes(logoFilename)) {
console.log('skipping ', logoFilename)
await updateBreachFaviconUrl(breachName, `https://s3.amazonaws.com/${process.env.S3_BUCKET}/${logoFilename}`)
console.log("skipping ", logoFilename);
await updateBreachFaviconUrl(
breachName,
`https://s3.amazonaws.com/${process.env.S3_BUCKET}/${logoFilename}`,
);
return;
}
console.log(`fetching: ${logoFilename}`)
console.log(`fetching: ${logoFilename}`);
const res = await fetch(
`https://icons.duckduckgo.com/ip3/${breachDomain}.ico`);
`https://icons.duckduckgo.com/ip3/${breachDomain}.ico`,
);
if (res.status !== 200) {
// update logo path with null
console.log(`Logo does not exist for: ${breachName} ${breachDomain}`)
await updateBreachFaviconUrl(breachName, null)
console.log(`Logo does not exist for: ${breachName} ${breachDomain}`);
await updateBreachFaviconUrl(breachName, null);
return;
}
await uploadToS3(logoFilename, Buffer.from(await res.arrayBuffer()))
const fileStream = createWriteStream(logoPath, { flags: 'wx' });
const bodyReadable = Readable.fromWeb(res.body)
await uploadToS3(logoFilename, Buffer.from(await res.arrayBuffer()));
const fileStream = createWriteStream(logoPath, { flags: "wx" });
const bodyReadable = Readable.fromWeb(res.body);
await finished(bodyReadable.pipe(fileStream));
await updateBreachFaviconUrl(breachName, `https://s3.amazonaws.com/${process.env.S3_BUCKET}/${logoFilename}`)
})
));
await updateBreachFaviconUrl(
breachName,
`https://s3.amazonaws.com/${process.env.S3_BUCKET}/${logoFilename}`,
);
}),
);
}
// Get breaches and upserts to DB
const breachesResponse = await req('/breaches')
const breaches = []
const seen = new Set()
const breachesResponse = await req("/breaches");
const breaches = [];
const seen = new Set();
for (const breach of breachesResponse) {
breach.DataClasses = formatDataClassesArray(breach.DataClasses)
breach.LogoPath = /[^/]*$/.exec(breach.LogoPath)[0]
breaches.push(breach)
seen.add(breach.Name + breach.BreachDate)
breach.DataClasses = formatDataClassesArray(breach.DataClasses);
breach.LogoPath = /[^/]*$/.exec(breach.LogoPath)[0];
breaches.push(breach);
seen.add(breach.Name + breach.BreachDate);
// sanity check: corrupt data structure
if (!isValidBreach(breach)) throw new Error('Breach data structure is not valid', JSON.stringify(breach))
if (!isValidBreach(breach))
throw new Error(
"Breach data structure is not valid",
JSON.stringify(breach),
);
}
console.log('Breaches found: ', breaches.length)
console.log('Unique breaches based on Name + BreachDate', seen.size)
console.log("Breaches found: ", breaches.length);
console.log("Unique breaches based on Name + BreachDate", seen.size);
// sanity check: no duplicate breaches with Name + BreachDate
if (seen.size !== breaches.length) {
throw new Error('Breaches contain duplicates. Stopping script...')
throw new Error("Breaches contain duplicates. Stopping script...");
} else {
await upsertBreaches(breaches)
await upsertBreaches(breaches);
// get
const result = await getAllBreaches()
console.log("Number of breaches in the database after upsert:", result.length)
const result = await getAllBreaches();
console.log(
"Number of breaches in the database after upsert:",
result.length,
);
}
await getBreachIcons(breaches)
await getBreachIcons(breaches);
Sentry.captureCheckIn({
checkInId,
monitorSlug: SENTRY_SLUG,
status: "ok"
})
setTimeout(process.exit, 1000)
status: "ok",
});
setTimeout(process.exit, 1000);
/**
* Null check for some required field
@ -118,9 +134,11 @@ setTimeout(process.exit, 1000)
* @returns Boolean is it a valid breach
*/
function isValidBreach(breach) {
return breach.Name !== undefined &&
return (
breach.Name !== undefined &&
breach.BreachDate !== undefined &&
breach.Title !== undefined &&
breach.Domain !== undefined &&
breach.DataClasses !== undefined
);
}

Просмотреть файл

@ -23,7 +23,7 @@ import {
writeFileSync,
} from "fs";
import { uploadToS3 } from "./s3.js";
import Sentry from "@sentry/nextjs"
import Sentry from "@sentry/nextjs";
import os from "os";
import path from "path";
import fs from "fs";
@ -42,10 +42,9 @@ Sentry.init({
tracesSampleRate: 1.0,
});
const checkInId = Sentry.captureCheckIn({
monitorSlug: SENTRY_SLUG,
status: "in_progress"
status: "in_progress",
});
// Only include populated places that are a city, town, village, or another
@ -65,14 +64,11 @@ const allowedFeatureCodes = [
function logProgress(currentCount, totalCount) {
const progress = Math.round(((currentCount + 1) / totalCount) * 100);
process.stdout.write(
`-> ${currentCount + 1} / ${totalCount} (${progress}%) \r`
`-> ${currentCount + 1} / ${totalCount} (${progress}%) \r`,
);
}
function writeFromRemoteFile({
url,
writeStream,
}) {
function writeFromRemoteFile({ url, writeStream }) {
return new Promise((resolve, reject) => {
https.get(url, (res) => {
res.on("end", () => {
@ -92,7 +88,7 @@ async function fetchRemoteArchive({
localExtractionPath,
}) {
console.info(
`Downloading remote file: ${remoteArchiveUrl} -> ${localDownloadPath}`
`Downloading remote file: ${remoteArchiveUrl} -> ${localDownloadPath}`,
);
await writeFromRemoteFile({
@ -104,7 +100,7 @@ async function fetchRemoteArchive({
const zip = new AdmZip(localDownloadPath);
await new Promise((resolve, reject) => {
zip.extractAllToAsync(localExtractionPath, true, false, (error) =>
error ? reject(error) : resolve()
error ? reject(error) : resolve(),
);
});
}
@ -154,7 +150,7 @@ try {
console.info("Reading file: Alternate location names");
const alternateNamesData = readFileSync(
`${localDestinationPath.alternateNames}/${DATA_COUNTRY_CODE}.txt`,
"utf8"
"utf8",
);
console.info("Parsing data: Alternate location names");
@ -172,7 +168,7 @@ try {
isHistoric,
_from,
_to,
] = alternateNamesLine.split("\t") // lines are tab delimited
] = alternateNamesLine.split("\t"); // lines are tab delimited
const isAbbreviation = isolanguage === "abbr";
const isRelevantAlternateName =
@ -189,14 +185,12 @@ try {
return null;
})
.filter(
(alternateName) => alternateName
);
.filter((alternateName) => alternateName);
console.info("Reading file: Hierarchy");
const hierachyData = readFileSync(
`${localDestinationPath.hierarchy}/hierarchy.txt`,
"utf8"
"utf8",
);
console.info("Parsing data: Location hierarchy");
const hierachyDataRows = hierachyData.split("\n");
@ -210,7 +204,7 @@ try {
console.info("Reading file: All locations");
const locationData = readFileSync(
`${localDestinationPath.locations}/${DATA_COUNTRY_CODE}.txt`,
"utf8"
"utf8",
);
console.info("Parsing data: All locations");
@ -240,7 +234,7 @@ try {
_dem,
_timezone,
_modificationDate,
] = location.split("\t") // lines are tab delimited
] = location.split("\t"); // lines are tab delimited
const isPopulatedPlaceOfInterest =
featureClass === allowedFeatureClass &&
@ -250,10 +244,10 @@ try {
if (isPopulatedPlaceOfInterest && hasPopulation) {
const alternateNames = parsedAlternateNames.filter(
({ alternateOf, name: alternateName }) =>
alternateOf === geonameId && alternateName !== name
alternateOf === geonameId && alternateName !== name,
);
const preferredName = alternateNames.find(
({ isPreferredName }) => isPreferredName === "1"
({ isPreferredName }) => isPreferredName === "1",
);
const alternateNamesFinal = alternateNames.map((alternateName) => {
// Include the original name as an alternative name if well use an
@ -276,13 +270,14 @@ try {
population,
...(alternateNames &&
alternateNames.length > 0 && {
alternateNames: alternateNamesFinal,
}),
alternateNames: alternateNamesFinal,
}),
});
}
return relevantLocations;
}, []
},
[],
);
// Filter out locations that have another populated place as a parent.
@ -304,15 +299,15 @@ try {
location.featureClass === allowedFeatureClass
);
});
}
},
);
return !hasPopulatedParentLocation;
}
},
);
console.info(
`Number of relevant locations found: ${locationDataPopulatedTopLevel.length}`
`Number of relevant locations found: ${locationDataPopulatedTopLevel.length}`,
);
console.info(`Writing location data to file: ${LOCATIONS_DATA_FILE}`);
@ -330,7 +325,7 @@ try {
writeFileSync(LOCATIONS_DATA_FILE, JSON.stringify(locationDataFinal));
let readStream = fs.createReadStream(LOCATIONS_DATA_FILE);
await uploadToS3(`autocomplete/${LOCATIONS_DATA_FILE}`, readStream)
await uploadToS3(`autocomplete/${LOCATIONS_DATA_FILE}`, readStream);
if (CLEANUP_TMP_DATA_AFTER_FINISHED) {
console.info("Cleaning up data directory");
@ -342,8 +337,9 @@ try {
const endTime = Date.now();
console.info(
`Created location data file successfully: Executed in ${(endTime - startTime) / 1000
}s`
`Created location data file successfully: Executed in ${
(endTime - startTime) / 1000
}s`,
);
} catch (error) {
console.error("Creating location file failed with:", error);
@ -352,6 +348,6 @@ try {
Sentry.captureCheckIn({
checkInId,
monitorSlug: SENTRY_SLUG,
status: "ok"
})
setTimeout(process.exit, 1000)
status: "ok",
});
setTimeout(process.exit, 1000);