Merge branch 'main' into e2e-tests-enhancement-work
This commit is contained in:
Коммит
7976a9105c
|
@ -378,7 +378,7 @@ jobs:
|
|||
description: "What command should the job run?"
|
||||
default: "pytest"
|
||||
type: enum
|
||||
enum: ["pytest", "black", "mypy", "build_email_tracker_lists", "build_glean", "check_glean"]
|
||||
enum: ["pytest", "black", "mypy", "build_email_tracker_lists", "build_glean", "check_glean", "ruff"]
|
||||
test_results_filename:
|
||||
description: "What is the name of the jUnit XML test output? (Optional)"
|
||||
default: ""
|
||||
|
@ -494,6 +494,7 @@ jobs:
|
|||
echo "export TEST_DB_NAME=$(printf '%q' "${TEST_DB_NAME}")" >> "$TMP_ENV"
|
||||
echo "export TEST_DB_URL=$(printf '%q' "${TEST_DB_URL}")" >> "$TMP_ENV"
|
||||
echo "export DATABASE_ENGINE=$(printf '%q' "${DATABASE_ENGINE}")" >> "$TMP_ENV"
|
||||
echo "export TEST_RESULTS_DIR=job-results" >> "$TMP_ENV"
|
||||
|
||||
cat "$TMP_ENV" | tee --append "$BASH_ENV"
|
||||
cat /home/circleci/project/.circleci/python_job.bash >> "$BASH_ENV"
|
||||
|
@ -504,7 +505,7 @@ jobs:
|
|||
steps:
|
||||
- run:
|
||||
name: Create job-results directory
|
||||
command: mkdir job-results
|
||||
command: mkdir -p "$TEST_RESULTS_DIR"
|
||||
- run:
|
||||
name: Set test defaults
|
||||
command: cp .env-dist .env
|
||||
|
@ -608,6 +609,13 @@ workflows:
|
|||
command: black
|
||||
filters: *default_filters
|
||||
|
||||
- python_job:
|
||||
name: ruff linting check
|
||||
command: ruff
|
||||
test_results_filename: "ruff.xml"
|
||||
allow_fail: true
|
||||
filters: *default_filters
|
||||
|
||||
- python_job:
|
||||
name: mypy type check
|
||||
command: mypy
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
: ${PHONES_ENABLED:=0}
|
||||
: ${PYTEST_FAIL_FAST:=0}
|
||||
: ${PYTEST_MIGRATIONS_MODE:=0}
|
||||
: ${TEST_RESULTS_DIR:=tmp}
|
||||
: ${TEST_RESULTS_FILENAME:=}
|
||||
: ${DATABASE_URL:=sqlite:///db.sqlite3}
|
||||
: ${TEST_DB_NAME:=test.sqlite3}
|
||||
|
@ -28,7 +29,7 @@ function run_mypy {
|
|||
if [ $MYPY_STRICT -ne 0 ]; then MYPY_ARGS+=("--strict"); fi
|
||||
if [ -n "$TEST_RESULTS_FILENAME" ]
|
||||
then
|
||||
MYPY_ARGS+=("--junit-xml" "job-results/${TEST_RESULTS_FILENAME}")
|
||||
MYPY_ARGS+=("--junit-xml" "${TEST_RESULTS_DIR}/${TEST_RESULTS_FILENAME}")
|
||||
fi
|
||||
MYPY_ARGS+=(".")
|
||||
|
||||
|
@ -56,7 +57,7 @@ function run_pytest {
|
|||
if [ $CREATE_DB -ne 0 ]; then PYTEST_ARGS+=("--create-db"); fi
|
||||
if [ -n "$TEST_RESULTS_FILENAME" ] && [ $SKIP_RESULTS != 1 ]
|
||||
then
|
||||
PYTEST_ARGS+=("--junit-xml=job-results/$TEST_RESULTS_FILENAME")
|
||||
PYTEST_ARGS+=("--junit-xml=${TEST_RESULTS_DIR}/$TEST_RESULTS_FILENAME")
|
||||
fi
|
||||
PYTEST_ARGS+=(".")
|
||||
|
||||
|
@ -84,6 +85,7 @@ function run_build_glean {
|
|||
set -x
|
||||
glean_parser translate --format python_server --output ${OUTPUT_FOLDER} ${INPUT_YAML}
|
||||
black ${OUTPUT_FOLDER}
|
||||
ruff check --fix --ignore E501 ${OUTPUT_FOLDER}
|
||||
{ set +x; } 2>/dev/null
|
||||
case "$OSTYPE" in
|
||||
darwin* | bsd*)
|
||||
|
@ -169,6 +171,20 @@ function run_check_glean {
|
|||
fi
|
||||
}
|
||||
|
||||
# Run ruff to lint python code
|
||||
function run_ruff {
|
||||
if [ -n "$TEST_RESULTS_FILENAME" ]
|
||||
then
|
||||
# Run with output to a test results file instead of stdout
|
||||
set -x
|
||||
ruff check --exit-zero --output-format junit --output-file "${TEST_RESULTS_DIR}/${TEST_RESULTS_FILENAME}" .
|
||||
{ set +x; } 2>/dev/null
|
||||
fi
|
||||
|
||||
set -x
|
||||
ruff check .
|
||||
}
|
||||
|
||||
|
||||
# Run a command by name
|
||||
# $1 - The command to run - black, mypy, pytest, or build_email_tracker_lists
|
||||
|
@ -176,7 +192,7 @@ function run_check_glean {
|
|||
function run_command {
|
||||
local COMMAND=${1:-}
|
||||
case $COMMAND in
|
||||
black | mypy | pytest | build_email_tracker_lists | build_glean | check_glean)
|
||||
black | mypy | pytest | build_email_tracker_lists | build_glean | check_glean | ruff)
|
||||
:;;
|
||||
"")
|
||||
echo "No command passed - '$COMMAND'"
|
||||
|
|
|
@ -6,5 +6,5 @@ module.exports = {
|
|||
.map((file) => file.split(process.cwd())[1])
|
||||
.join(" --file ")}`,
|
||||
"*.md": "prettier --write",
|
||||
"*.py": ["black", "mypy"],
|
||||
"*.py": ["black", "mypy", "ruff check --fix"],
|
||||
}
|
||||
|
|
|
@ -3,3 +3,7 @@ from django.apps import AppConfig
|
|||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
|
||||
def ready(self) -> None:
|
||||
# Register drf_spectacular schema extensions
|
||||
import api.schema # noqa: F401
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
import logging
|
||||
import shlex
|
||||
|
||||
import requests
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
import requests
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.exceptions import (
|
||||
|
@ -18,7 +17,6 @@ from rest_framework.exceptions import (
|
|||
PermissionDenied,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
INTROSPECT_TOKEN_URL = (
|
||||
"%s/introspect" % settings.SOCIALACCOUNT_PROVIDERS["fxa"]["OAUTH_ENDPOINT"]
|
||||
|
@ -95,10 +93,10 @@ def get_fxa_uid_from_oauth_token(token: str, use_cache=True) -> str:
|
|||
|
||||
# cache valid access_token and fxa_resp_data until access_token expiration
|
||||
# TODO: revisit this since the token can expire before its time
|
||||
if type(fxa_resp_data.get("json", {}).get("exp")) is int:
|
||||
if isinstance(fxa_resp_data.get("json", {}).get("exp"), int):
|
||||
# Note: FXA iat and exp are timestamps in *milliseconds*
|
||||
fxa_token_exp_time = int(fxa_resp_data["json"]["exp"] / 1000)
|
||||
now_time = int(datetime.now(timezone.utc).timestamp())
|
||||
now_time = int(datetime.now(UTC).timestamp())
|
||||
fxa_token_exp_cache_timeout = fxa_token_exp_time - now_time
|
||||
if fxa_token_exp_cache_timeout > cache_timeout:
|
||||
# cache until access_token expires (matched Relay user)
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
from rest_framework import permissions
|
||||
|
||||
from emails.models import Profile
|
||||
|
||||
from waffle import flag_is_active
|
||||
|
||||
from emails.models import Profile
|
||||
|
||||
READ_METHODS = ["GET", "HEAD"]
|
||||
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -0,0 +1,39 @@
|
|||
"""Schema Extensions for drf-spectacular"""
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
class FxaTokenAuthenticationScheme(OpenApiAuthenticationExtension):
|
||||
target_class = "api.authentication.FxaTokenAuthentication"
|
||||
name = "Mozilla account Token Auth"
|
||||
|
||||
def get_security_definition(self, auto_schema: AutoSchema) -> dict[str, str]:
|
||||
return {"type": "http", "scheme": "bearer"}
|
||||
|
||||
|
||||
IGNORED_PATHS = {
|
||||
"/api/v1/inbound_call",
|
||||
"/api/v1/inbound_sms",
|
||||
"/api/v1/realphone/resend_welcome_sms",
|
||||
"/api/v1/report_webcompat_issue",
|
||||
"/api/v1/runtime_data",
|
||||
"/api/v1/sms_status",
|
||||
"/api/v1/vCard/{lookup_key}",
|
||||
"/api/v1/voice_status",
|
||||
}
|
||||
|
||||
ENDPOINT = tuple[str, str, str, Callable[..., Response]]
|
||||
|
||||
|
||||
def preprocess_ignore_deprecated_paths(endpoints: list[ENDPOINT]) -> list[ENDPOINT]:
|
||||
"Remove the deprecated path variants without the trailing slash."
|
||||
|
||||
return [
|
||||
(path, path_regex, method, callback)
|
||||
for path, path_regex, method, callback in endpoints
|
||||
if path not in IGNORED_PATHS
|
||||
]
|
|
@ -1,7 +1,7 @@
|
|||
from django.contrib.auth.models import User
|
||||
from django.db.models import prefetch_related_objects
|
||||
|
||||
from rest_framework import serializers, exceptions
|
||||
from rest_framework import exceptions, serializers
|
||||
from waffle import get_waffle_flag_model
|
||||
|
||||
from emails.models import DomainAddress, Profile, RelayAddress
|
||||
|
@ -130,16 +130,16 @@ class StrictReadOnlyFieldsMixin:
|
|||
return attrs
|
||||
|
||||
# Getting the declared read only fields and read only fields from Meta
|
||||
read_only_fields = set(
|
||||
read_only_fields = {
|
||||
field_name for field_name, field in self.fields.items() if field.read_only
|
||||
).union(set(getattr(self.Meta, "read_only_fields", set())))
|
||||
}.union(set(getattr(self.Meta, "read_only_fields", set())))
|
||||
|
||||
# Getting implicit read only fields that are in the Profile model, but were not
|
||||
# defined in the serializer. By default, they won't update if put in the body
|
||||
# of a request, but they still give a 200 response (which we don't want).
|
||||
implicit_read_only_fields = set(
|
||||
implicit_read_only_fields = {
|
||||
field for field in vars(self.Meta.model) if field not in self.fields
|
||||
)
|
||||
}
|
||||
|
||||
received_read_only_fields = set(self.initial_data).intersection(
|
||||
read_only_fields.union(implicit_read_only_fields)
|
||||
|
|
|
@ -1,28 +1,22 @@
|
|||
from datetime import datetime
|
||||
|
||||
from model_bakery import baker
|
||||
import responses
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
import responses
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from rest_framework.exceptions import (
|
||||
APIException,
|
||||
AuthenticationFailed,
|
||||
NotFound,
|
||||
)
|
||||
from model_bakery import baker
|
||||
from rest_framework.exceptions import APIException, AuthenticationFailed, NotFound
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from ..authentication import (
|
||||
INTROSPECT_TOKEN_URL,
|
||||
FxaTokenAuthentication,
|
||||
get_cache_key,
|
||||
get_fxa_uid_from_oauth_token,
|
||||
introspect_token,
|
||||
INTROSPECT_TOKEN_URL,
|
||||
)
|
||||
|
||||
|
||||
MOCK_BASE = "api.authentication"
|
||||
|
||||
|
||||
|
|
|
@ -1,28 +1,24 @@
|
|||
from allauth.socialaccount.models import SocialAccount
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
not settings.PHONES_ENABLED, reason="PHONES_ENABLED is False"
|
||||
)
|
||||
pytestmark = pytest.mark.skipif(not settings.IQ_ENABLED, reason="IQ_ENABLED is False")
|
||||
|
||||
import pytest
|
||||
import responses
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from twilio.rest import Client
|
||||
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from rest_framework.test import RequestsClient
|
||||
from twilio.rest import Client
|
||||
|
||||
if settings.PHONES_ENABLED:
|
||||
from api.views.phones import compute_iq_mac
|
||||
from phones.models import InboundContact, iq_fmt
|
||||
|
||||
from phones.tests.models_tests import make_phone_test_user
|
||||
from api.tests.phones_views_tests import _make_real_phone, _make_relay_number
|
||||
from phones.tests.models_tests import make_phone_test_user
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
not settings.PHONES_ENABLED, reason="PHONES_ENABLED is False"
|
||||
)
|
||||
pytestmark = pytest.mark.skipif(not settings.IQ_ENABLED, reason="IQ_ENABLED is False")
|
||||
|
||||
API_ROOT = "http://127.0.0.1:8000"
|
||||
INBOUND_SMS_PATH = f"{API_ROOT}/api/v1/inbound_sms_iq/"
|
||||
|
|
|
@ -1,27 +1,26 @@
|
|||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from typing import Iterator, Literal
|
||||
from unittest.mock import Mock, patch, call
|
||||
import re
|
||||
|
||||
from twilio.request_validator import RequestValidator
|
||||
from twilio.rest import Client
|
||||
from collections.abc import Iterator
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from typing import Literal
|
||||
from unittest.mock import Mock, call, patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.test.utils import override_settings
|
||||
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
from rest_framework.test import APIClient
|
||||
from twilio.base.exceptions import TwilioRestException
|
||||
from twilio.request_validator import RequestValidator
|
||||
from twilio.rest import Client
|
||||
from waffle.testutils import override_flag
|
||||
import pytest
|
||||
|
||||
from emails.models import Profile
|
||||
|
||||
|
||||
if settings.PHONES_ENABLED:
|
||||
from api.views.phones import _match_by_prefix, MatchByPrefix
|
||||
from api.views.phones import MatchByPrefix, _match_by_prefix
|
||||
from phones.models import InboundContact, RealPhone, RelayNumber
|
||||
from phones.tests.models_tests import make_phone_test_user
|
||||
|
||||
|
@ -97,55 +96,55 @@ def user_with_sms_activity(outbound_phone_user, mocked_twilio_client):
|
|||
InboundContact.objects.create(
|
||||
relay_number=relay_number,
|
||||
inbound_number="+13015550001",
|
||||
last_inbound_date=datetime(2023, 3, 1, 12, 5, tzinfo=timezone.utc),
|
||||
last_inbound_date=datetime(2023, 3, 1, 12, 5, tzinfo=UTC),
|
||||
last_inbound_type="text",
|
||||
last_text_date=datetime(2023, 3, 1, 12, 5, tzinfo=timezone.utc),
|
||||
last_text_date=datetime(2023, 3, 1, 12, 5, tzinfo=UTC),
|
||||
)
|
||||
# Second SMS contact
|
||||
InboundContact.objects.create(
|
||||
relay_number=relay_number,
|
||||
inbound_number="+13015550002",
|
||||
last_inbound_date=datetime(2023, 3, 2, 13, 5, tzinfo=timezone.utc),
|
||||
last_inbound_date=datetime(2023, 3, 2, 13, 5, tzinfo=UTC),
|
||||
last_inbound_type="text",
|
||||
last_text_date=datetime(2023, 3, 2, 13, 5, tzinfo=timezone.utc),
|
||||
last_text_date=datetime(2023, 3, 2, 13, 5, tzinfo=UTC),
|
||||
)
|
||||
# Voice contact
|
||||
InboundContact.objects.create(
|
||||
relay_number=relay_number,
|
||||
inbound_number="+13015550003",
|
||||
last_inbound_date=datetime(2023, 3, 3, 8, 30, tzinfo=timezone.utc),
|
||||
last_inbound_date=datetime(2023, 3, 3, 8, 30, tzinfo=UTC),
|
||||
last_inbound_type="call",
|
||||
last_call_date=datetime(2023, 3, 3, 8, 30, tzinfo=timezone.utc),
|
||||
last_call_date=datetime(2023, 3, 3, 8, 30, tzinfo=UTC),
|
||||
)
|
||||
twilio_messages = [
|
||||
MockTwilioMessage(
|
||||
from_="+13015550001",
|
||||
to=relay_number.number,
|
||||
date_sent=datetime(2023, 3, 1, 12, 0, tzinfo=timezone.utc),
|
||||
date_sent=datetime(2023, 3, 1, 12, 0, tzinfo=UTC),
|
||||
body="Send Y to confirm appointment",
|
||||
),
|
||||
MockTwilioMessage(
|
||||
from_=relay_number.number,
|
||||
to="+13015550001",
|
||||
date_sent=datetime(2023, 3, 1, 12, 5, tzinfo=timezone.utc),
|
||||
date_sent=datetime(2023, 3, 1, 12, 5, tzinfo=UTC),
|
||||
body="Y",
|
||||
),
|
||||
MockTwilioMessage(
|
||||
from_="+13015550002",
|
||||
to=relay_number.number,
|
||||
date_sent=datetime(2023, 3, 2, 13, 0, tzinfo=timezone.utc),
|
||||
date_sent=datetime(2023, 3, 2, 13, 0, tzinfo=UTC),
|
||||
body="Donate $100 to Senator Smith?",
|
||||
),
|
||||
MockTwilioMessage(
|
||||
from_=relay_number.number,
|
||||
to="+13015550002",
|
||||
date_sent=datetime(2023, 3, 2, 13, 5, tzinfo=timezone.utc),
|
||||
date_sent=datetime(2023, 3, 2, 13, 5, tzinfo=UTC),
|
||||
body="STOP STOP STOP",
|
||||
),
|
||||
MockTwilioMessage(
|
||||
from_=relay_number.number,
|
||||
to="+13015550004",
|
||||
date_sent=datetime(2023, 3, 4, 20, 55, tzinfo=timezone.utc),
|
||||
date_sent=datetime(2023, 3, 4, 20, 55, tzinfo=UTC),
|
||||
body="U Up?",
|
||||
),
|
||||
]
|
||||
|
@ -1512,14 +1511,12 @@ def test_inbound_sms_reply_no_prefix_last_sender(
|
|||
assert relay_number.texts_forwarded == multi_reply.old_texts_forwarded + 1
|
||||
|
||||
|
||||
_match_by_prefix_candidates = set(
|
||||
(
|
||||
"+13015550000",
|
||||
"+13025550001",
|
||||
"+13035550001", # Same last 4 digits as above
|
||||
"+13045551301", # Last 4 match first 4 of oldest
|
||||
)
|
||||
)
|
||||
_match_by_prefix_candidates = {
|
||||
"+13015550000",
|
||||
"+13025550001",
|
||||
"+13035550001", # Same last 4 digits as above
|
||||
"+13045551301", # Last 4 match first 4 of oldest
|
||||
}
|
||||
|
||||
|
||||
MatchByPrefixParams = tuple[
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
from django.urls import reverse
|
||||
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.test import APITestCase
|
||||
from waffle.models import Flag
|
||||
import pytest
|
||||
|
||||
from emails.models import RelayAddress
|
||||
from emails.tests.models_tests import make_free_test_user, make_premium_test_user
|
||||
|
||||
from api.serializers import FlagSerializer
|
||||
from emails.models import RelayAddress
|
||||
from emails.tests.models_tests import make_free_test_user, make_premium_test_user
|
||||
|
||||
|
||||
class PremiumValidatorsTest(APITestCase):
|
||||
def test_non_premium_cant_set_block_list_emails(self):
|
||||
free_user = make_free_test_user()
|
||||
free_alias = baker.make(RelayAddress, user=free_user)
|
||||
assert free_alias.block_list_emails == False
|
||||
assert free_alias.block_list_emails is False
|
||||
|
||||
url = reverse("relayaddress-detail", args=[free_alias.id])
|
||||
data = {"block_list_emails": True}
|
||||
|
@ -25,12 +24,12 @@ class PremiumValidatorsTest(APITestCase):
|
|||
response = self.client.patch(url, data, format="json")
|
||||
|
||||
assert response.status_code == 401
|
||||
assert free_alias.block_list_emails == False
|
||||
assert free_alias.block_list_emails is False
|
||||
|
||||
def test_non_premium_can_clear_block_list_emails(self):
|
||||
free_user = make_free_test_user()
|
||||
free_alias = baker.make(RelayAddress, user=free_user)
|
||||
assert free_alias.block_list_emails == False
|
||||
assert free_alias.block_list_emails is False
|
||||
|
||||
url = reverse("relayaddress-detail", args=[free_alias.id])
|
||||
data = {"block_list_emails": False}
|
||||
|
@ -40,12 +39,12 @@ class PremiumValidatorsTest(APITestCase):
|
|||
|
||||
assert response.status_code == 200
|
||||
free_alias.refresh_from_db()
|
||||
assert free_alias.block_list_emails == False
|
||||
assert free_alias.block_list_emails is False
|
||||
|
||||
def test_premium_can_set_block_list_emails(self):
|
||||
premium_user = make_premium_test_user()
|
||||
premium_alias = baker.make(RelayAddress, user=premium_user)
|
||||
assert premium_alias.block_list_emails == False
|
||||
assert premium_alias.block_list_emails is False
|
||||
|
||||
url = reverse("relayaddress-detail", args=[premium_alias.id])
|
||||
data = {"block_list_emails": True}
|
||||
|
@ -55,7 +54,7 @@ class PremiumValidatorsTest(APITestCase):
|
|||
|
||||
assert response.status_code == 200
|
||||
premium_alias.refresh_from_db()
|
||||
assert premium_alias.block_list_emails == True
|
||||
assert premium_alias.block_list_emails is True
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
@ -1,34 +1,34 @@
|
|||
from datetime import datetime
|
||||
import logging
|
||||
from allauth.account.models import EmailAddress
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
import responses
|
||||
from datetime import datetime
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.cache import cache
|
||||
from django.test import RequestFactory, TestCase
|
||||
from django.utils import timezone
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
from django.utils import timezone
|
||||
|
||||
import pytest
|
||||
import responses
|
||||
from allauth.account.models import EmailAddress
|
||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
||||
from model_bakery import baker
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
from rest_framework.test import APIClient
|
||||
from waffle.testutils import override_flag
|
||||
|
||||
from api.authentication import get_cache_key, INTROSPECT_TOKEN_URL
|
||||
from api.authentication import INTROSPECT_TOKEN_URL, get_cache_key
|
||||
from api.tests.authentication_tests import (
|
||||
_setup_fxa_response,
|
||||
_setup_fxa_response_no_json,
|
||||
)
|
||||
from api.views import FXA_PROFILE_URL
|
||||
from emails.models import Profile, RelayAddress, DomainAddress
|
||||
from emails.models import DomainAddress, Profile, RelayAddress
|
||||
from emails.tests.models_tests import make_free_test_user, make_premium_test_user
|
||||
from privaterelay.tests.utils import (
|
||||
create_expected_glean_event,
|
||||
log_extra,
|
||||
get_glean_event,
|
||||
log_extra,
|
||||
)
|
||||
|
||||
|
||||
|
@ -97,14 +97,12 @@ def test_post_domainaddress_success(
|
|||
assert ret_data["full_address"].startswith("my-new-mask@premium.")
|
||||
|
||||
assert (event := get_glean_event(caplog)) is not None
|
||||
address = premium_user.domainaddress_set.get()
|
||||
expected_event = create_expected_glean_event(
|
||||
category="email_mask",
|
||||
name="created",
|
||||
user=premium_user,
|
||||
extra_items={
|
||||
"n_domain_masks": "1",
|
||||
"mask_id": address.metrics_id,
|
||||
"is_random_mask": "false",
|
||||
"created_by_api": "true",
|
||||
"has_website": "false",
|
||||
|
@ -446,7 +444,6 @@ def test_patch_domainaddress(
|
|||
user=premium_user,
|
||||
extra_items={
|
||||
"n_domain_masks": "1",
|
||||
"mask_id": existing.metrics_id,
|
||||
"is_random_mask": "false",
|
||||
},
|
||||
event_time=event["timestamp"],
|
||||
|
@ -605,7 +602,6 @@ def test_delete_domainaddress(
|
|||
prem_api_client: APIClient, premium_user: User, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
existing = DomainAddress.objects.create(user=premium_user, address="my-doomed-mask")
|
||||
existing_mask_id = existing.metrics_id
|
||||
url = reverse("domainaddress-detail", args=[existing.id])
|
||||
response = prem_api_client.delete(url)
|
||||
assert response.status_code == 204
|
||||
|
@ -618,7 +614,6 @@ def test_delete_domainaddress(
|
|||
user=premium_user,
|
||||
extra_items={
|
||||
"n_deleted_domain_masks": "1",
|
||||
"mask_id": existing_mask_id,
|
||||
"is_random_mask": "false",
|
||||
},
|
||||
event_time=event["timestamp"],
|
||||
|
@ -639,14 +634,12 @@ def test_post_relayaddress_success(
|
|||
assert ret_data["enabled"]
|
||||
|
||||
assert (event := get_glean_event(caplog)) is not None
|
||||
address = free_user.relayaddress_set.get()
|
||||
expected_event = create_expected_glean_event(
|
||||
category="email_mask",
|
||||
name="created",
|
||||
user=free_user,
|
||||
extra_items={
|
||||
"n_random_masks": "1",
|
||||
"mask_id": address.metrics_id,
|
||||
"is_random_mask": "true",
|
||||
"created_by_api": "true",
|
||||
"has_website": "false",
|
||||
|
@ -677,7 +670,6 @@ def test_post_relayaddress_with_generated_for_success(
|
|||
"n_random_masks": "1",
|
||||
"has_extension": "true",
|
||||
"date_got_extension": str(int(address.created_at.timestamp())),
|
||||
"mask_id": address.metrics_id,
|
||||
"is_random_mask": "true",
|
||||
"created_by_api": "true",
|
||||
"has_website": "true",
|
||||
|
@ -773,7 +765,6 @@ def test_patch_relayaddress(
|
|||
user=free_user,
|
||||
extra_items={
|
||||
"n_random_masks": "1",
|
||||
"mask_id": existing.metrics_id,
|
||||
"is_random_mask": "true",
|
||||
},
|
||||
event_time=event["timestamp"],
|
||||
|
@ -933,7 +924,6 @@ def test_delete_randomaddress(
|
|||
free_api_client: APIClient, free_user: User, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
existing = RelayAddress.objects.create(user=free_user)
|
||||
existing_mask_id = existing.metrics_id
|
||||
url = reverse("relayaddress-detail", args=[existing.id])
|
||||
response = free_api_client.delete(url)
|
||||
assert response.status_code == 204
|
||||
|
@ -946,7 +936,6 @@ def test_delete_randomaddress(
|
|||
user=free_user,
|
||||
extra_items={
|
||||
"n_deleted_random_masks": "1",
|
||||
"mask_id": existing_mask_id,
|
||||
"is_random_mask": "true",
|
||||
},
|
||||
event_time=event["timestamp"],
|
||||
|
|
25
api/urls.py
25
api/urls.py
|
@ -9,12 +9,13 @@ from drf_spectacular.views import (
|
|||
from rest_framework import routers
|
||||
|
||||
from privaterelay.utils import enable_if_setting
|
||||
|
||||
from .views import (
|
||||
DomainAddressViewSet,
|
||||
RelayAddressViewSet,
|
||||
ProfileViewSet,
|
||||
UserViewSet,
|
||||
FlagViewSet,
|
||||
ProfileViewSet,
|
||||
RelayAddressViewSet,
|
||||
UserViewSet,
|
||||
first_forwarded_email,
|
||||
report_webcompat_issue,
|
||||
runtime_data,
|
||||
|
@ -93,18 +94,18 @@ urlpatterns = [
|
|||
|
||||
if settings.PHONES_ENABLED:
|
||||
from .views.phones import (
|
||||
outbound_call,
|
||||
list_messages,
|
||||
outbound_sms,
|
||||
InboundContactViewSet,
|
||||
RealPhoneViewSet,
|
||||
RelayNumberViewSet,
|
||||
InboundContactViewSet,
|
||||
inbound_call,
|
||||
inbound_sms,
|
||||
vCard,
|
||||
sms_status,
|
||||
voice_status,
|
||||
list_messages,
|
||||
outbound_call,
|
||||
outbound_sms,
|
||||
resend_welcome_sms,
|
||||
sms_status,
|
||||
vCard,
|
||||
voice_status,
|
||||
)
|
||||
|
||||
if settings.PHONES_ENABLED:
|
||||
|
@ -143,11 +144,11 @@ if settings.PHONES_ENABLED:
|
|||
),
|
||||
path("v1/sms_status/", sms_status, name="sms_status"),
|
||||
path(
|
||||
"v1/vCard/<lookup_key>",
|
||||
"v1/vCard/<str:lookup_key>",
|
||||
vCard,
|
||||
name="vCard_deprecate_after_updating_clients",
|
||||
),
|
||||
path("v1/vCard/<lookup_key>/", vCard, name="vCard"),
|
||||
path("v1/vCard/<str:lookup_key>/", vCard, name="vCard"),
|
||||
path(
|
||||
"v1/realphone/resend_welcome_sms",
|
||||
resend_welcome_sms,
|
||||
|
|
|
@ -9,35 +9,24 @@ Profile stuff is strange - model is in emails, but probably should be in private
|
|||
"""
|
||||
|
||||
import logging
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls.exceptions import NoReverseMatch
|
||||
import requests
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models.query import QuerySet
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls.exceptions import NoReverseMatch
|
||||
|
||||
import django_ftl
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import (
|
||||
AuthenticationFailed,
|
||||
ErrorDetail,
|
||||
ParseError,
|
||||
)
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
from rest_framework.views import exception_handler
|
||||
|
||||
import requests
|
||||
from allauth.account.adapter import get_adapter as get_account_adapter
|
||||
from allauth.socialaccount.adapter import get_adapter as get_social_adapter
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from allauth.socialaccount.helpers import complete_social_login
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django_filters import rest_framework as filters
|
||||
from waffle import flag_is_active, get_waffle_flag_model
|
||||
from waffle.models import Switch, Sample
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import (
|
||||
decorators,
|
||||
permissions,
|
||||
|
@ -46,38 +35,39 @@ from rest_framework import (
|
|||
throttling,
|
||||
viewsets,
|
||||
)
|
||||
from emails.apps import EmailsConfig
|
||||
from emails.utils import generate_from_header, incr_if_enabled, ses_message_props
|
||||
from emails.views import wrap_html_email, _get_address
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import AuthenticationFailed, ErrorDetail, ParseError
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
from rest_framework.views import exception_handler
|
||||
from waffle import flag_is_active, get_waffle_flag_model
|
||||
from waffle.models import Sample, Switch
|
||||
|
||||
from emails.apps import EmailsConfig
|
||||
from emails.models import DomainAddress, Profile, RelayAddress
|
||||
from emails.utils import generate_from_header, incr_if_enabled, ses_message_props
|
||||
from emails.views import _get_address, wrap_html_email
|
||||
from privaterelay.ftl_bundles import main as ftl_bundle
|
||||
from privaterelay.plans import (
|
||||
get_bundle_country_language_mapping,
|
||||
get_premium_country_language_mapping,
|
||||
get_phone_country_language_mapping,
|
||||
get_premium_country_language_mapping,
|
||||
)
|
||||
from privaterelay.utils import get_countries_info_from_request_and_mapping, glean_logger
|
||||
|
||||
from emails.models import (
|
||||
DomainAddress,
|
||||
Profile,
|
||||
RelayAddress,
|
||||
)
|
||||
|
||||
from ..authentication import get_fxa_uid_from_oauth_token
|
||||
from ..exceptions import RelayAPIException
|
||||
from ..permissions import IsOwner, CanManageFlags
|
||||
from ..permissions import CanManageFlags, IsOwner
|
||||
from ..serializers import (
|
||||
DomainAddressSerializer,
|
||||
FirstForwardedEmailSerializer,
|
||||
FlagSerializer,
|
||||
ProfileSerializer,
|
||||
RelayAddressSerializer,
|
||||
UserSerializer,
|
||||
FlagSerializer,
|
||||
WebcompatIssueSerializer,
|
||||
)
|
||||
|
||||
from privaterelay.ftl_bundles import main as ftl_bundle
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
info_logger = logging.getLogger("eventsinfo")
|
||||
FXA_PROFILE_URL = (
|
||||
|
@ -140,14 +130,12 @@ class AddressViewSet(Generic[_Address], SaveToRequestUser, viewsets.ModelViewSet
|
|||
)
|
||||
|
||||
def perform_destroy(self, instance: _Address) -> None:
|
||||
mask_id = instance.metrics_id
|
||||
user = instance.user
|
||||
is_random_mask = isinstance(instance, RelayAddress)
|
||||
super().perform_destroy(instance)
|
||||
glean_logger().log_email_mask_deleted(
|
||||
request=self.request,
|
||||
user=user,
|
||||
mask_id=mask_id,
|
||||
is_random_mask=is_random_mask,
|
||||
)
|
||||
|
||||
|
@ -157,9 +145,10 @@ class RelayAddressViewSet(AddressViewSet[RelayAddress]):
|
|||
permission_classes = [permissions.IsAuthenticated, IsOwner]
|
||||
filterset_class = RelayAddressFilter
|
||||
|
||||
def get_queryset(self):
|
||||
assert isinstance(self.request.user, User)
|
||||
return RelayAddress.objects.filter(user=self.request.user)
|
||||
def get_queryset(self) -> QuerySet[RelayAddress]:
|
||||
if isinstance(self.request.user, User):
|
||||
return RelayAddress.objects.filter(user=self.request.user)
|
||||
return RelayAddress.objects.none()
|
||||
|
||||
|
||||
class DomainAddressFilter(filters.FilterSet):
|
||||
|
@ -190,9 +179,10 @@ class DomainAddressViewSet(AddressViewSet[DomainAddress]):
|
|||
permission_classes = [permissions.IsAuthenticated, IsOwner]
|
||||
filterset_class = DomainAddressFilter
|
||||
|
||||
def get_queryset(self):
|
||||
assert isinstance(self.request.user, User)
|
||||
return DomainAddress.objects.filter(user=self.request.user)
|
||||
def get_queryset(self) -> QuerySet[DomainAddress]:
|
||||
if isinstance(self.request.user, User):
|
||||
return DomainAddress.objects.filter(user=self.request.user)
|
||||
return DomainAddress.objects.none()
|
||||
|
||||
|
||||
class ProfileViewSet(viewsets.ModelViewSet):
|
||||
|
@ -200,9 +190,10 @@ class ProfileViewSet(viewsets.ModelViewSet):
|
|||
permission_classes = [permissions.IsAuthenticated, IsOwner]
|
||||
http_method_names = ["get", "post", "head", "put", "patch"]
|
||||
|
||||
def get_queryset(self):
|
||||
assert isinstance(self.request.user, User)
|
||||
return Profile.objects.filter(user=self.request.user)
|
||||
def get_queryset(self) -> QuerySet[Profile]:
|
||||
if isinstance(self.request.user, User):
|
||||
return Profile.objects.filter(user=self.request.user)
|
||||
return Profile.objects.none()
|
||||
|
||||
|
||||
class UserViewSet(viewsets.ModelViewSet):
|
||||
|
@ -210,9 +201,10 @@ class UserViewSet(viewsets.ModelViewSet):
|
|||
permission_classes = [permissions.IsAuthenticated, IsOwner]
|
||||
http_method_names = ["get", "head"]
|
||||
|
||||
def get_queryset(self):
|
||||
assert isinstance(self.request.user, User)
|
||||
return User.objects.filter(id=self.request.user.id)
|
||||
def get_queryset(self) -> QuerySet[User]:
|
||||
if isinstance(self.request.user, User):
|
||||
return User.objects.filter(id=self.request.user.id)
|
||||
return User.objects.none()
|
||||
|
||||
|
||||
@extend_schema(
|
||||
|
|
|
@ -1,69 +1,62 @@
|
|||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
import string
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Literal
|
||||
|
||||
from waffle import get_waffle_flag_model
|
||||
import django_ftl
|
||||
import phonenumbers
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models.query import QuerySet
|
||||
from django.forms import model_to_dict
|
||||
|
||||
from drf_spectacular.utils import extend_schema, OpenApiParameter
|
||||
import django_ftl
|
||||
import phonenumbers
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework import (
|
||||
decorators,
|
||||
exceptions,
|
||||
permissions,
|
||||
response,
|
||||
throttling,
|
||||
viewsets,
|
||||
exceptions,
|
||||
)
|
||||
from rest_framework.generics import get_object_or_404
|
||||
from rest_framework.request import Request
|
||||
|
||||
from twilio.base.exceptions import TwilioRestException
|
||||
from waffle import flag_is_active
|
||||
from waffle import flag_is_active, get_waffle_flag_model
|
||||
|
||||
from api.views import SaveToRequestUser
|
||||
from emails.utils import incr_if_enabled
|
||||
from phones.iq_utils import send_iq_sms
|
||||
|
||||
from phones.apps import phones_config, twilio_client
|
||||
from phones.iq_utils import send_iq_sms
|
||||
from phones.models import (
|
||||
InboundContact,
|
||||
RealPhone,
|
||||
RelayNumber,
|
||||
area_code_numbers,
|
||||
get_last_text_sender,
|
||||
get_pending_unverified_realphone_records,
|
||||
get_valid_realphone_verification_record,
|
||||
get_verified_realphone_record,
|
||||
get_verified_realphone_records,
|
||||
location_numbers,
|
||||
send_welcome_message,
|
||||
suggested_numbers,
|
||||
location_numbers,
|
||||
area_code_numbers,
|
||||
)
|
||||
from privaterelay.ftl_bundles import main as ftl_bundle
|
||||
|
||||
from ..exceptions import ConflictError, ErrorContextType
|
||||
from ..permissions import HasPhoneService
|
||||
from ..renderers import (
|
||||
TemplateTwiMLRenderer,
|
||||
vCardRenderer,
|
||||
)
|
||||
from ..renderers import TemplateTwiMLRenderer, vCardRenderer
|
||||
from ..serializers.phones import (
|
||||
InboundContactSerializer,
|
||||
RealPhoneSerializer,
|
||||
RelayNumberSerializer,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
info_logger = logging.getLogger("eventsinfo")
|
||||
|
||||
|
@ -99,9 +92,10 @@ class RealPhoneViewSet(SaveToRequestUser, viewsets.ModelViewSet):
|
|||
# TODO: this doesn't seem to e working?
|
||||
throttle_classes = [RealPhoneRateThrottle]
|
||||
|
||||
def get_queryset(self):
|
||||
assert isinstance(self.request.user, User)
|
||||
return RealPhone.objects.filter(user=self.request.user)
|
||||
def get_queryset(self) -> QuerySet[RealPhone]:
|
||||
if isinstance(self.request.user, User):
|
||||
return RealPhone.objects.filter(user=self.request.user)
|
||||
return RealPhone.objects.none()
|
||||
|
||||
def create(self, request):
|
||||
"""
|
||||
|
@ -260,9 +254,10 @@ class RelayNumberViewSet(SaveToRequestUser, viewsets.ModelViewSet):
|
|||
permission_classes = [permissions.IsAuthenticated, HasPhoneService]
|
||||
serializer_class = RelayNumberSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
assert isinstance(self.request.user, User)
|
||||
return RelayNumber.objects.filter(user=self.request.user)
|
||||
def get_queryset(self) -> QuerySet[RelayNumber]:
|
||||
if isinstance(self.request.user, User):
|
||||
return RelayNumber.objects.filter(user=self.request.user)
|
||||
return RelayNumber.objects.none()
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""
|
||||
|
@ -361,9 +356,11 @@ class InboundContactViewSet(viewsets.ModelViewSet):
|
|||
permission_classes = [permissions.IsAuthenticated, HasPhoneService]
|
||||
serializer_class = InboundContactSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
request_user_relay_num = get_object_or_404(RelayNumber, user=self.request.user)
|
||||
return InboundContact.objects.filter(relay_number=request_user_relay_num)
|
||||
def get_queryset(self) -> QuerySet[InboundContact]:
|
||||
if isinstance(self.request.user, User):
|
||||
relay_number = get_object_or_404(RelayNumber, user=self.request.user)
|
||||
return InboundContact.objects.filter(relay_number=relay_number)
|
||||
return InboundContact.objects.none()
|
||||
|
||||
|
||||
def _validate_number(request, number_field="number"):
|
||||
|
@ -430,7 +427,7 @@ def _get_number_details(e164_number):
|
|||
@decorators.api_view()
|
||||
@decorators.permission_classes([permissions.AllowAny])
|
||||
@decorators.renderer_classes([vCardRenderer])
|
||||
def vCard(request, lookup_key):
|
||||
def vCard(request: Request, lookup_key: str) -> response.Response:
|
||||
"""
|
||||
Get a Relay vCard. `lookup_key` should be passed in url path.
|
||||
|
||||
|
@ -1288,7 +1285,7 @@ def _check_and_update_contact(inbound_contact, contact_type, relay_number):
|
|||
relay_number.save()
|
||||
raise exceptions.ValidationError(f"Number is not accepting {contact_type}.")
|
||||
|
||||
inbound_contact.last_inbound_date = datetime.now(timezone.utc)
|
||||
inbound_contact.last_inbound_date = datetime.now(UTC)
|
||||
singular_contact_type = contact_type[:-1] # strip trailing "s"
|
||||
inbound_contact.last_inbound_type = singular_contact_type
|
||||
attr = f"num_{contact_type}"
|
||||
|
|
|
@ -0,0 +1,298 @@
|
|||
# Use Code Linters to Enforce Layout and Discover Issues
|
||||
|
||||
- Status: Accepted
|
||||
- Deciders: Luke Crouch, Se Yeon Kim, John Whitlock
|
||||
- Date: 2024-04-10
|
||||
|
||||
Technical Story: [MPP-79][]
|
||||
|
||||
[MPP-79]: https://mozilla-hub.atlassian.net/browse/MPP-79
|
||||
|
||||
## Context and Problem Statement
|
||||
|
||||
A linter is a static analysis tool that aids development. Some of the areas
|
||||
addressed by different linting tools include:
|
||||
|
||||
- **Enforce Layout**: A tool can check and even reformat code to fit a project
|
||||
style. This helps make code and code changes easier to read and comprehend.
|
||||
It helps new contributors to match the existing project style.
|
||||
- **Check Types**: Dynamic languages are flexible about variable types.
|
||||
Valid code with incorrect types can result in bugs and runtime errors. A
|
||||
linter can check type hints for consistent usage. Code editors can use type
|
||||
hints for documentation and assisted code writing.
|
||||
- **Identify Mistakes**: A tool can detect common mistakes. Examples are
|
||||
identifying unused variables, and using loose equality in JavaScript.
|
||||
- **Standardize Constructs**: There are many ways to express the same logic in
|
||||
code. A linter can suggest or rewrite code to a standard form. One example is
|
||||
the order and placement of imports. Another is omitting optional defaults to
|
||||
a function call.
|
||||
- **Spelling and Grammar Checking**: These tools have different priorities from
|
||||
similar tools for word processors. They expect technical and project-specific
|
||||
terms. They look for text in prose and code comments.
|
||||
|
||||
Most developers appreciate that linters make multi-developer projects easier.
|
||||
Tools automatically check and enforce project standards. Code reviewers focus
|
||||
on the logic of changes, rather than the form of the code. A linter can teach
|
||||
developers about unexpected issues and improve their understanding of the
|
||||
project's languages.
|
||||
|
||||
Also, developers dislike changes to their working process. The benefits may be
|
||||
unclear. They may disagree that the benefits are worth the extra effort. These
|
||||
objections are easiest to overcome at the start of a project. As a project
|
||||
becomes bigger, it becomes harder to satisfy a new tool. Adding a new tool to
|
||||
a mature project is a slow, deliberate process.
|
||||
|
||||
## Linters Used By Relay
|
||||
|
||||
Relay has added several linters to check code quality:
|
||||
|
||||
| | CSS | JavaScript | Python | Markdown | Shell Scripts |
|
||||
| :--------------------- | :-------- | :--------------- | :----- | :------- | :------------ |
|
||||
| Enforce Layout | prettier | prettier, ESLint | black | prettier | _none_ |
|
||||
| Check Types | stylelint | TypeScript | mypy | _n/a_ | _none_ |
|
||||
| Identify Mistakes | stylelint | ESLint | _none_ | _none_ | _none_ |
|
||||
| Standardize Constructs | stylelint | ESLint | _none_ | _none_ | _none_ |
|
||||
| Spelling and Grammar | _none_ | _none_ | _none_ | _none_ | _none_ |
|
||||
|
||||
Relay includes [husky][] and [lint-staged][] to run linters as a pre-commit
|
||||
hook. The linters are configured in [.lintstagedrc.js][]. They run against the
|
||||
files changed in each commit. When a tool supports fixing issues, it can update
|
||||
the files before committing. Otherwise, detected issues will halt the commit,
|
||||
giving the developer a chance to fix them.
|
||||
|
||||
[husky]: https://typicode.github.io/husky/
|
||||
[lint-staged]: https://github.com/lint-staged/lint-staged
|
||||
[.lintstagedrc.js]: https://github.com/mozilla/fx-private-relay/blob/main/.lintstagedrc.js
|
||||
|
||||
### stylelint
|
||||
|
||||
[stylelint][] identifies mistakes and standardizes constructs in CSS. Relay
|
||||
uses a plugin to also check [SCSS/Sass][sass], an extended syntax for CSS.
|
||||
Relay added `stylelint` in July 2021.
|
||||
|
||||
The configuration file is at [frontend/.stylelintrc.cjs][]. Relay uses the
|
||||
[stylelint-config-recommended-scss][] ruleset, and extends it with custom
|
||||
rules.
|
||||
|
||||
[sass]: https://sass-lang.com/documentation/syntax/
|
||||
[stylelint-config-recommended-scss]: https://github.com/stylelint-scss/stylelint-config-recommended-scss
|
||||
[frontend/.stylelintrc.cjs]: https://github.com/mozilla/fx-private-relay/blob/main/frontend/.stylelintrc.cjs
|
||||
[stylelint]: https://stylelint.io/
|
||||
|
||||
### prettier
|
||||
|
||||
[prettier][] is a code formatter for several languages. Relay uses it to format
|
||||
JavaScript, TypeScript, CSS, SCSS, and Markdown. Relay added `prettier` (again?)
|
||||
in December 2021.
|
||||
|
||||
The (empty) configuration file is at [frontend/.prettierrc.json][]. Relay and
|
||||
other projects use the `prettier` defaults. Many tools are compatible with
|
||||
`prettier` by default or by configuration.
|
||||
|
||||
[prettier]: https://prettier.io/
|
||||
[frontend/.prettierrc.json]: https://github.com/mozilla/fx-private-relay/blob/main/frontend/.prettierrc.json
|
||||
|
||||
### TypeScript
|
||||
|
||||
[TypeScript][] extends JavaScript with type information. It converts to
|
||||
JavaScript in a build step, mostly by removing the type hints. Relay added
|
||||
TypeScript in March 2022 with the refactor to React / next.js.
|
||||
|
||||
The configuration at [frontend/tsconfig.json][] has a few customizations.
|
||||
Many of the changes help TypeScript work with `next.js`. Relay has enabled
|
||||
strict mode, which turns on a default set of rules. The types are checked
|
||||
during the frontend build and with `next lint`.
|
||||
|
||||
[TypeScript]: https://www.typescriptlang.org/
|
||||
[frontend/tsconfig.json]: https://github.com/mozilla/fx-private-relay/blame/main/frontend/tsconfig.json
|
||||
|
||||
### next lint / ESLint
|
||||
|
||||
Next.js [integrates with ESLint][nextjs-eslint]. It identifies mistakes and
|
||||
standardizes constructs in JavaScript. It can fix some issues automatically.
|
||||
Relay added ESLint in October 2020 or earlier. Relay continued using it with
|
||||
the next.js refactor in March 2022.
|
||||
|
||||
Relay uses Next.js's base ESLint configuration. This includes the tuning needed
|
||||
to make [ESLint][] formatting compatible with `prettier`. A stricter rule set
|
||||
that checks against [Google's Web Vitals][web-vitals] is also available. Relay
|
||||
uses Django rather than `next.js`. Relay expanded [frontend/.eslintrc.js][] to
|
||||
support this non-standard configuration.
|
||||
|
||||
[ESLint]: https://eslint.org/docs/latest/
|
||||
[nextjs-eslint]: https://nextjs.org/docs/pages/building-your-application/configuring/eslint
|
||||
[web-vitals]: https://web.dev/articles/vitals
|
||||
[frontend/.eslintrc.js]: https://github.com/mozilla/fx-private-relay/blob/main/frontend/.eslintrc.js
|
||||
|
||||
### mypy
|
||||
|
||||
[mypy][] is a static type checker for Python. Relay added mypy support in April 2022. Relay started with a recommended configuration for an
|
||||
[existing codebase][mypy-existing].
|
||||
|
||||
`mypy` uses [pyproject.toml][] for configuration. Relay ignores issues with
|
||||
third party libraries that do not ship type hints. We disable strict rules
|
||||
that need code changes to pass. We ratchet up `mypy` strictness over time.
|
||||
|
||||
[mypy]: https://mypy.readthedocs.io/en/stable/
|
||||
[mypy-existing]: https://mypy.readthedocs.io/en/stable/existing_code.html
|
||||
|
||||
### black
|
||||
|
||||
[black][] is a formatting tool to enforce layout of Python code. It rewrites
|
||||
files in place. Relay adopted it in May 2022.
|
||||
|
||||
`black` uses [pyproject.toml][] for configuration. Relay uses the default
|
||||
configuration. In the future, Relay can tune the supported Python versions.
|
||||
|
||||
[black]: https://black.readthedocs.io/en/stable/index.html
|
||||
[pyproject.toml]: https://github.com/mozilla/fx-private-relay/blob/main/pyproject.toml
|
||||
|
||||
## Adding A New Linter
|
||||
|
||||
Adding a new linter is not free. Each developer needs to add it to their workflow.
|
||||
Continuous Integration (CI) needs to run the linter on each pull request.
|
||||
Developers must address linting issues before merging.
|
||||
|
||||
The decision to add a linter has two parts. First, is a linter needed? Second,
|
||||
which linter?
|
||||
|
||||
### Decision Drivers: Is a Linter Needed?
|
||||
|
||||
When deciding _if a linter is needed_, some criteria are:
|
||||
|
||||
- **Is the language used in production?** Issues in production code can impact
|
||||
users. A few seconds per pull request is worth avoiding a production bug.
|
||||
- **What is the impact of issues detected by the linter?** The highest impact
|
||||
is avoiding a user-facing bug. The next level of impact is improving the
|
||||
speed and effectiveness of code reviews. Tools can handle the mechanical
|
||||
review. Reviewers are free to spend their time and attention on the logic of
|
||||
the code changes.
|
||||
|
||||
Here is the linter chart again, focusing on areas without linters:
|
||||
|
||||
| | CSS | JavaScript | Python | Markdown | Shell Scripts |
|
||||
| :--------------------- | :-- | :--------- | :----- | :------- | :------------ |
|
||||
| Enforce Layout | yes | yes | yes | yes | no |
|
||||
| Check Types | yes | yes | yes | n/a | no |
|
||||
| Identify Mistakes | yes | yes | **no** | no | no |
|
||||
| Standardize Constructs | yes | yes | **no** | no | no |
|
||||
| Spelling and Grammar | no | no | no | no | no |
|
||||
|
||||
Relay does not suffer from a lack of Markdown linters. Our documentation is
|
||||
developer-facing, not user-facing. Reviewers find the most obvious errors. A
|
||||
document with a few typos is still useful and easy to fix. Authors and
|
||||
reviewers can use tools like the [markdownlint demo][] when needed.
|
||||
|
||||
Relay does not suffer from a lack of shell script linters. Relay uses shell
|
||||
scripts in CircleCI for build and test steps. Errors in these scripts appear as
|
||||
broken builds. Developers fix them to unblock the build. These build failures
|
||||
do not have a direct impact on our users. Authors and reviewers can use tools
|
||||
like [ShellCheck][] when needed.
|
||||
|
||||
Relay does not suffer from spelling and grammar errors. Many reviewers check
|
||||
new user-facing strings, as they proceed from design to translation. Errors in
|
||||
code comments and function names do not cause bugs. A developer understands a
|
||||
good code comment with a spelling error. A developer should delete a misleading
|
||||
comment with perfect English. Authors and reviewers can use tools like
|
||||
[PyEnchant][], [Hemingway][], and word processors when needed.
|
||||
|
||||
As highlighted, there is a gap in Python linters. There is no official linter
|
||||
to identify mistakes and standardize constructs. Python is used in production
|
||||
for the web and API server, as well as background tasks. Errors in this code
|
||||
will impact users, and non-standard code can slow down code reviews. Relay
|
||||
should add a new tool to address this gap.
|
||||
|
||||
[markdownlint demo]: https://dlaa.me/markdownlint/
|
||||
[ShellCheck]: https://www.shellcheck.net/
|
||||
[PyEnchant]: https://pyenchant.github.io/pyenchant/#
|
||||
[Hemingway]: https://hemingwayapp.com/
|
||||
|
||||
## Decision Drivers: Which Linter?
|
||||
|
||||
A required linter must have these attributes:
|
||||
|
||||
- **Checks on pull request**: The continuous integration process needs to run
|
||||
the tool. If the tool identifies an issue, the build should fail. This
|
||||
prevents merging failing code.
|
||||
- **Runs in the development environment**: Fast tools should run as a pre-commit
|
||||
step. A developer can run the tool on their machine. When the tool accepts the
|
||||
code, it should also pass in CI.
|
||||
- **Marks false positives**: It should be rare to identify good code as causing
|
||||
a problem. When there is a false positive, it should be possible to ignore it
|
||||
and get a passing check.
|
||||
|
||||
When choosing between similar tools, these attributes can help guide the
|
||||
decision:
|
||||
|
||||
- **Good defaults**: A tool with good defaults needs less configuration.
|
||||
The tool works the same across projects.
|
||||
- **Fixes issues when appropriate**: Developers love a "fix it" button. A
|
||||
tool that can fix problems correctly is better than a tool that only
|
||||
identifies them. A developer should fix issues that need human judgment.
|
||||
- **Editor integration**: A developer's primary tool is the code editor. An
|
||||
integrated tool helps fix issues as part of the writing process. A
|
||||
non-integrated tool turns code linting into an extra chore.
|
||||
- **Speed**: A fast tool gets used. A slow tool gets skipped. A tool that runs
|
||||
on each file save should take less than a second. A pre-commit hook should
|
||||
run in less than five seconds. Developers should feel they save time running
|
||||
a tool, rather than waiting to see if it fails in CI.
|
||||
|
||||
There are many tools that address identifying mistakes and standardizing
|
||||
constructs in Python. Some linters that would fill this role:
|
||||
|
||||
- [pylint][]: This tool has been in development since 2003. It has
|
||||
hundreds of built-in checks, including layout enforcement and spelling
|
||||
checkers. It works across entire codebases instead of individual files. It
|
||||
identifies 2,800 issues in our code, in 25 seconds.
|
||||
- [pycodestyle][]: This tool, formerly known as `pep8`, has been in
|
||||
development since 2006. It is focused on code formatting and some code
|
||||
constructs. It identifies 317 issues in our code in 1.5 seconds.
|
||||
- [pyflakes][]: This tool has been in development since 2009. It identifies
|
||||
mistakes in code without caring about formatting. It finds 12 issues in our
|
||||
code in 12 seconds.
|
||||
- [flake8][]: This tool has been in development since 2010. It combines
|
||||
`pycodestyle`, `pyflakes` and the `mccabe` tool into one package. It also
|
||||
has a plugin system that can extend the ruleset. It identifies 277 issues
|
||||
in our code in 600 milliseconds.
|
||||
- [isort][]: This tool has been in development since 2017. It reformats
|
||||
imports to enforce an order and style. It detects 99 issues in our code in
|
||||
750 milliseconds. It can be used as a `flake8` plugin.
|
||||
- [bandit][]: This tool scans code for security issues. It finds 1997 low
|
||||
severity and 8 medium severity issues in our code in 1.5 seconds. It can be
|
||||
run as a `flake8` plugin.
|
||||
- [ruff][]: This tool, launched in 2023 and written in Rust, is designed for
|
||||
speed and automatic fixing. It identifies 26 issues in our code, 9 fixable,
|
||||
in 35 milliseconds. It has optional rules that implement the checks of all
|
||||
the previous tools.
|
||||
|
||||
All the tools have the required attributes. They run in CI and in the
|
||||
development environment. They are configurable, and have a mechanism for
|
||||
marking false positives.
|
||||
|
||||
Three approaches that stand out:
|
||||
|
||||
- `pylint` for identifying the most issues. There are many false positives, such
|
||||
as missing documentation and understanding pytest fixtures. Significant
|
||||
configuration will cut these false positives. It is slow enough to
|
||||
cause pain if used for local development.
|
||||
- `flake8` with several plugins (`isort`, `bandit`, others). This provides a
|
||||
good mix of speed and coverage. [PyCQA][] maintains the tools and plugins,
|
||||
and they work well together.
|
||||
- `ruff` with additional checks enabled. This tool can enforce many of the
|
||||
rules of the other tools in a single package. It runs 10x - 100x faster than
|
||||
the other tools. It is compatible with `black` formatting by default.
|
||||
The largest negative is that [Astral][] is VC-backed. The tool may add
|
||||
monetization in the future.
|
||||
|
||||
Due to the speed and flexibility, `ruff` is the recommended tool for the
|
||||
next linter.
|
||||
|
||||
[Astral]: https://astral.sh/blog/announcing-astral-the-company-behind-ruff
|
||||
[PyCQA]: https://github.com/PyCQA
|
||||
[bandit]: https://github.com/PyCQA/bandit
|
||||
[flake8]: https://flake8.pycqa.org/en/latest/index.html
|
||||
[isort]: https://pycqa.github.io/isort/index.html
|
||||
[pycodestyle]: https://pycodestyle.pycqa.org/en/latest/
|
||||
[pyflakes]: https://github.com/PyCQA/pyflakes
|
||||
[pylint]: https://pylint.readthedocs.io
|
||||
[ruff]: https://docs.astral.sh/ruff/
|
|
@ -60,18 +60,34 @@ see code coverage for the frontend.
|
|||
|
||||
### `@playwright/test` and `dotenv`
|
||||
|
||||
Our Playwright tests are currently only triggered in CI once a day from the
|
||||
`main` branch. Thus, there's unfortunately no easy way to verify that it still
|
||||
runs successfully until after merging. To do so, after merging the PR, open the
|
||||
"Actions" tab in GitHub, then find the "[Relay e2e
|
||||
Tests](https://github.com/mozilla/fx-private-relay/actions/workflows/playwright.yml)"
|
||||
workflow on the left-hand side, and then use the "Run workflow" trigger button
|
||||
in the top row of the table. Alternatively, you can also try run the end-to-end
|
||||
tests locally by following the instructions in
|
||||
[../e2e-tests/README.md](../e2e-tests/README.md#how-to-run).
|
||||
Our Playwright tests run as a GitHub action [Relay e2e Tests][] once a day from
|
||||
the `main` branch. See [../e2e-tests/README.md](../e2e-tests/README.md#how-to-run)
|
||||
for instructions on running the tests locally.
|
||||
|
||||
`dotenv` is used to load environment variables for Playwright from `.env` files,
|
||||
so can be verified in the same way.
|
||||
To test updates to `@playwright/test` and `dotenv`, add the following to the
|
||||
`.env` file:
|
||||
|
||||
```
|
||||
E2E_TEST_ACCOUNT_PASSWORD=<a_long_random_string_like_a_uuid>
|
||||
E2E_TEST_ENV=stage
|
||||
```
|
||||
|
||||
Check out the update branch and run:
|
||||
|
||||
```
|
||||
npm install
|
||||
npx playwright install
|
||||
npx playwright test
|
||||
```
|
||||
|
||||
This will confirm that `dotenv` picked up the settings from `.env`, and that
|
||||
the new playwright runs against stage.
|
||||
|
||||
Once the PR is merged, you can wait for the daily test run, or manually run the
|
||||
test by going to the [Relay e2e Tests][] GitHub Action page, select "Run
|
||||
workflow", and pick the `main` branch and the `stage` environment.
|
||||
|
||||
[Relay e2e Tests]: https://github.com/mozilla/fx-private-relay/actions/workflows/playwright.yml
|
||||
|
||||
### `react-aria` and `react-stately`
|
||||
|
||||
|
|
|
@ -78,3 +78,9 @@ This is because playwright needs to create an image initially. On the following
|
|||
To lint the files, run the following in the root directory (it is recommended to run this after any changes to the test suite):
|
||||
|
||||
`npx prettier --write e2e-tests/*`
|
||||
|
||||
### 9. Health check
|
||||
|
||||
Our ![health check](https://github.com/mozilla/fx-private-relay/actions/workflows/relay_e2e_health.yml) runs a subset of the entire e2e test suite. This subset of tests focuses on critical tests for free and premium users for the overall health of the relay application. To add a test into the healthcheck CI, add `@health_check` into the title of your test or test group. See the following as an example,
|
||||
|
||||
`test.describe("Subscription flows @health_check", ...)`
|
||||
|
|
|
@ -28,7 +28,7 @@ test.describe("Firefox Relay - Landing Page - Visual Regression @health_check",
|
|||
});
|
||||
});
|
||||
|
||||
test.describe("Check header buttons and their redirects, C1812638 @health_check", () => {
|
||||
test.describe("Check header buttons and their redirects, C1812638 @health_check", () => {
|
||||
test.beforeEach(async ({ landingPage }) => {
|
||||
await landingPage.open();
|
||||
});
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
from django.apps import AppConfig, apps
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
from django.utils.functional import cached_property
|
||||
from mypy_boto3_ses.client import SESClient
|
||||
|
||||
from django.apps import apps, AppConfig
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
|
||||
|
||||
|
@ -41,7 +40,7 @@ class EmailsConfig(AppConfig):
|
|||
logger.exception("exception during S3 connect")
|
||||
|
||||
def __init__(self, app_name, app_module):
|
||||
super(EmailsConfig, self).__init__(app_name, app_module)
|
||||
super().__init__(app_name, app_module)
|
||||
|
||||
# badwords file from:
|
||||
# https://www.cs.cmu.edu/~biglou/resources/bad-words.txt
|
||||
|
@ -53,7 +52,7 @@ class EmailsConfig(AppConfig):
|
|||
def _load_terms(self, filename):
|
||||
terms = []
|
||||
terms_file_path = os.path.join(settings.BASE_DIR, "emails", filename)
|
||||
with open(terms_file_path, "r") as terms_file:
|
||||
with open(terms_file_path) as terms_file:
|
||||
for word in terms_file:
|
||||
if len(word.strip()) > 0 and word.strip()[0] == "#":
|
||||
continue
|
||||
|
|
|
@ -3,10 +3,10 @@ CommandFromDjangoSettings is a base class for commands that get parameters from
|
|||
settings.
|
||||
"""
|
||||
|
||||
import textwrap
|
||||
from argparse import RawDescriptionHelpFormatter
|
||||
from collections import namedtuple
|
||||
from shutil import get_terminal_size
|
||||
import textwrap
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError, DjangoHelpFormatter
|
||||
|
|
|
@ -11,9 +11,9 @@ https://docs.python.org/3/library/datetime.html#datetime.datetime.isoformat
|
|||
https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
import json
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
|
@ -57,7 +57,7 @@ class Command(CommandFromDjangoSettings):
|
|||
def handle(self, verbosity, *args, **kwargs):
|
||||
"""Handle call from command line (called by BaseCommand)"""
|
||||
self.init_from_settings(verbosity)
|
||||
with open(self.healthcheck_path, mode="r", encoding="utf8") as healthcheck_file:
|
||||
with open(self.healthcheck_path, encoding="utf8") as healthcheck_file:
|
||||
context = self.check_healthcheck(healthcheck_file, self.max_age)
|
||||
if context["success"]:
|
||||
if self.verbosity > 1:
|
||||
|
@ -95,7 +95,7 @@ class Command(CommandFromDjangoSettings):
|
|||
context["data"] = data
|
||||
raw_timestamp = data["timestamp"]
|
||||
timestamp = datetime.fromisoformat(raw_timestamp)
|
||||
age = (datetime.now(tz=timezone.utc) - timestamp).total_seconds()
|
||||
age = (datetime.now(tz=UTC) - timestamp).total_seconds()
|
||||
|
||||
context["age_s"] = round(age, 3)
|
||||
if age > max_age:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from ...models import Profile
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from ...models import Reply
|
||||
|
||||
|
@ -13,7 +13,7 @@ class Command(BaseCommand):
|
|||
parser.add_argument("days_old", nargs=1, type=int)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
delete_date = datetime.now(timezone.utc) - timedelta(options["days_old"][0])
|
||||
delete_date = datetime.now(UTC) - timedelta(options["days_old"][0])
|
||||
replies_to_delete = Reply.objects.filter(created_at__lt=delete_date).only("id")
|
||||
print(
|
||||
f"Deleting {len(replies_to_delete)} reply records "
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import json
|
||||
import pathlib
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
|
|
@ -4,16 +4,15 @@ import shlex
|
|||
import sys
|
||||
import time
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from emails.sns import verify_from_sns
|
||||
from emails.views import _sns_inbound_logic, validate_sns_arn_and_type
|
||||
from emails.utils import incr_if_enabled
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from emails.sns import verify_from_sns
|
||||
from emails.utils import incr_if_enabled
|
||||
from emails.views import _sns_inbound_logic, validate_sns_arn_and_type
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
info_logger = logging.getLogger("eventsinfo")
|
||||
|
|
|
@ -9,29 +9,29 @@ https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-s
|
|||
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sqs.html#SQS.Queue.receive_messages
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urlsplit
|
||||
import gc
|
||||
import json
|
||||
import logging
|
||||
import shlex
|
||||
import time
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from codetiming import Timer
|
||||
from markus.utils import generate_tag
|
||||
import OpenSSL
|
||||
from datetime import UTC, datetime
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
from emails.sns import verify_from_sns
|
||||
from emails.views import _sns_inbound_logic, validate_sns_arn_and_type
|
||||
from emails.utils import incr_if_enabled, gauge_if_enabled
|
||||
import boto3
|
||||
import OpenSSL
|
||||
from botocore.exceptions import ClientError
|
||||
from codetiming import Timer
|
||||
from markus.utils import generate_tag
|
||||
|
||||
from emails.management.command_from_django_settings import (
|
||||
CommandFromDjangoSettings,
|
||||
SettingToLocal,
|
||||
)
|
||||
from emails.sns import verify_from_sns
|
||||
from emails.utils import gauge_if_enabled, incr_if_enabled
|
||||
from emails.views import _sns_inbound_logic, validate_sns_arn_and_type
|
||||
|
||||
logger = logging.getLogger("eventsinfo.process_emails_from_sqs")
|
||||
|
||||
|
@ -441,7 +441,7 @@ class Command(CommandFromDjangoSettings):
|
|||
def write_healthcheck(self):
|
||||
"""Update the healthcheck file with operations data, if path is set."""
|
||||
data = {
|
||||
"timestamp": datetime.now(tz=timezone.utc).isoformat(),
|
||||
"timestamp": datetime.now(tz=UTC).isoformat(),
|
||||
"cycles": self.cycles,
|
||||
"total_messages": self.total_messages,
|
||||
"failed_messages": self.failed_messages,
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
import logging
|
||||
|
||||
from mypy_boto3_ses.type_defs import ContentTypeDef
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
import django_ftl
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from botocore.exceptions import ClientError
|
||||
from mypy_boto3_ses.type_defs import ContentTypeDef
|
||||
|
||||
from emails.apps import EmailsConfig
|
||||
from emails.models import Profile
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# Generated by Django 2.2.2 on 2019-06-05 12:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
import emails.models
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# Generated by Django 2.2.2 on 2019-06-06 02:49
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import emails.models
|
||||
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
# Generated by Django 2.2.2 on 2019-06-11 04:08
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
# Generated by Django 2.2.2 on 2019-06-12 20:47
|
||||
import uuid
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Generated by Django 2.2.10 on 2020-03-10 22:03
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 2.2.13 on 2021-04-13 19:00
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Generated by Django 2.2.24 on 2021-08-17 03:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Generated by Django 2.2.24 on 2021-08-31 20:29
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 2.2.24 on 2021-10-05 18:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import emails.models
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Generated by Django 2.2.24 on 2021-10-15 05:22
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.db import migrations
|
||||
|
||||
from emails.models import hash_subdomain
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 2.2.24 on 2021-10-19 15:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import emails.models
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 2.2.24 on 2021-11-05 19:16
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 2.2.24 on 2021-11-10 22:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import emails.models
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.14 on 2022-08-05 14:28
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.14 on 2022-08-30 15:34
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 3.2.16 on 2022-10-28 02:35
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-22 21:23
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.19 on 2023-08-07 20:13
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.19 on 2023-10-19 23:46
|
||||
# ruff: noqa: W291,E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 4.2.8 on 2024-01-02 20:18
|
||||
# ruff: noqa: W291,E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
132
emails/models.py
132
emails/models.py
|
@ -1,22 +1,26 @@
|
|||
from __future__ import annotations
|
||||
from collections import namedtuple
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from hashlib import sha256
|
||||
from typing import Iterable, Literal
|
||||
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import uuid
|
||||
from collections import namedtuple
|
||||
from collections.abc import Iterable
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from hashlib import sha256
|
||||
from typing import Literal, cast
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import BadRequest
|
||||
from django.core.validators import MinLengthValidator
|
||||
from django.db import models, transaction
|
||||
from django.db.models.query import QuerySet
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation.trans_real import (
|
||||
parse_accept_lang_header,
|
||||
get_supported_language_variant,
|
||||
parse_accept_lang_header,
|
||||
)
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
|
@ -210,11 +214,11 @@ class Profile(models.Model):
|
|||
return None
|
||||
|
||||
@property
|
||||
def relay_addresses(self):
|
||||
def relay_addresses(self) -> QuerySet[RelayAddress]:
|
||||
return RelayAddress.objects.filter(user=self.user)
|
||||
|
||||
@property
|
||||
def domain_addresses(self):
|
||||
def domain_addresses(self) -> QuerySet[DomainAddress]:
|
||||
return DomainAddress.objects.filter(user=self.user)
|
||||
|
||||
@property
|
||||
|
@ -230,9 +234,9 @@ class Profile(models.Model):
|
|||
ra_count: int = self.relay_addresses.count()
|
||||
return ra_count >= settings.MAX_NUM_FREE_ALIASES
|
||||
|
||||
def check_bounce_pause(self):
|
||||
def check_bounce_pause(self) -> BounceStatus:
|
||||
if self.last_hard_bounce:
|
||||
last_hard_bounce_allowed = datetime.now(timezone.utc) - timedelta(
|
||||
last_hard_bounce_allowed = datetime.now(UTC) - timedelta(
|
||||
days=settings.HARD_BOUNCE_ALLOWED_DAYS
|
||||
)
|
||||
if self.last_hard_bounce > last_hard_bounce_allowed:
|
||||
|
@ -240,7 +244,7 @@ class Profile(models.Model):
|
|||
self.last_hard_bounce = None
|
||||
self.save()
|
||||
if self.last_soft_bounce:
|
||||
last_soft_bounce_allowed = datetime.now(timezone.utc) - timedelta(
|
||||
last_soft_bounce_allowed = datetime.now(UTC) - timedelta(
|
||||
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
|
||||
)
|
||||
if self.last_soft_bounce > last_soft_bounce_allowed:
|
||||
|
@ -250,15 +254,15 @@ class Profile(models.Model):
|
|||
return BounceStatus(False, "")
|
||||
|
||||
@property
|
||||
def bounce_status(self):
|
||||
def bounce_status(self) -> BounceStatus:
|
||||
return self.check_bounce_pause()
|
||||
|
||||
@property
|
||||
def next_email_try(self):
|
||||
def next_email_try(self) -> datetime:
|
||||
bounce_pause, bounce_type = self.check_bounce_pause()
|
||||
|
||||
if not bounce_pause:
|
||||
return datetime.now(timezone.utc)
|
||||
return datetime.now(UTC)
|
||||
|
||||
if bounce_type == "soft":
|
||||
assert self.last_soft_bounce
|
||||
|
@ -339,7 +343,7 @@ class Profile(models.Model):
|
|||
return False
|
||||
|
||||
@property
|
||||
def has_vpn(self):
|
||||
def has_vpn(self) -> bool:
|
||||
if not self.fxa:
|
||||
return False
|
||||
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
|
||||
|
@ -349,7 +353,7 @@ class Profile(models.Model):
|
|||
return False
|
||||
|
||||
@property
|
||||
def emails_forwarded(self):
|
||||
def emails_forwarded(self) -> int:
|
||||
return (
|
||||
sum(ra.num_forwarded for ra in self.relay_addresses)
|
||||
+ sum(da.num_forwarded for da in self.domain_addresses)
|
||||
|
@ -357,7 +361,7 @@ class Profile(models.Model):
|
|||
)
|
||||
|
||||
@property
|
||||
def emails_blocked(self):
|
||||
def emails_blocked(self) -> int:
|
||||
return (
|
||||
sum(ra.num_blocked for ra in self.relay_addresses)
|
||||
+ sum(da.num_blocked for da in self.domain_addresses)
|
||||
|
@ -365,21 +369,17 @@ class Profile(models.Model):
|
|||
)
|
||||
|
||||
@property
|
||||
def emails_replied(self):
|
||||
# Once Django is on version 4.0 and above, we can set the default=0
|
||||
# and return a int instead of None
|
||||
# https://docs.djangoproject.com/en/4.0/ref/models/querysets/#default
|
||||
totals = [self.relay_addresses.aggregate(models.Sum("num_replied"))]
|
||||
totals.append(self.domain_addresses.aggregate(models.Sum("num_replied")))
|
||||
total_num_replied = 0
|
||||
for num in totals:
|
||||
total_num_replied += (
|
||||
num.get("num_replied__sum") if num.get("num_replied__sum") else 0
|
||||
)
|
||||
return total_num_replied + self.num_email_replied_in_deleted_address
|
||||
def emails_replied(self) -> int:
|
||||
ra_sum = self.relay_addresses.aggregate(models.Sum("num_replied", default=0))
|
||||
da_sum = self.domain_addresses.aggregate(models.Sum("num_replied", default=0))
|
||||
return (
|
||||
int(ra_sum["num_replied__sum"])
|
||||
+ int(da_sum["num_replied__sum"])
|
||||
+ self.num_email_replied_in_deleted_address
|
||||
)
|
||||
|
||||
@property
|
||||
def level_one_trackers_blocked(self):
|
||||
def level_one_trackers_blocked(self) -> int:
|
||||
return (
|
||||
sum(ra.num_level_one_trackers_blocked or 0 for ra in self.relay_addresses)
|
||||
+ sum(
|
||||
|
@ -444,8 +444,8 @@ class Profile(models.Model):
|
|||
|
||||
# look for abuse metrics created on the same UTC date, regardless of time.
|
||||
midnight_utc_today = datetime.combine(
|
||||
datetime.now(timezone.utc).date(), datetime.min.time()
|
||||
).astimezone(timezone.utc)
|
||||
datetime.now(UTC).date(), datetime.min.time()
|
||||
).astimezone(UTC)
|
||||
midnight_utc_tomorow = midnight_utc_today + timedelta(days=1)
|
||||
abuse_metric = self.user.abusemetrics_set.filter(
|
||||
first_recorded__gte=midnight_utc_today,
|
||||
|
@ -464,7 +464,7 @@ class Profile(models.Model):
|
|||
abuse_metric.num_email_forwarded_per_day += 1
|
||||
if forwarded_email_size > 0:
|
||||
abuse_metric.forwarded_email_size_per_day += forwarded_email_size
|
||||
abuse_metric.last_recorded = datetime.now(timezone.utc)
|
||||
abuse_metric.last_recorded = datetime.now(UTC)
|
||||
abuse_metric.save()
|
||||
|
||||
# check user should be flagged for abuse
|
||||
|
@ -493,7 +493,7 @@ class Profile(models.Model):
|
|||
or hit_max_forwarded
|
||||
or hit_max_forwarded_email_size
|
||||
):
|
||||
self.last_account_flagged = datetime.now(timezone.utc)
|
||||
self.last_account_flagged = datetime.now(UTC)
|
||||
self.save()
|
||||
data = {
|
||||
"uid": self.fxa.uid if self.fxa else None,
|
||||
|
@ -514,7 +514,7 @@ class Profile(models.Model):
|
|||
account_premium_feature_resumed = self.last_account_flagged + timedelta(
|
||||
days=settings.PREMIUM_FEATURE_PAUSED_DAYS
|
||||
)
|
||||
if datetime.now(timezone.utc) > account_premium_feature_resumed:
|
||||
if datetime.now(UTC) > account_premium_feature_resumed:
|
||||
# premium feature has been resumed
|
||||
return False
|
||||
# user was flagged and the premium feature pause period is not yet over
|
||||
|
@ -584,14 +584,18 @@ def address_hash(address, subdomain=None, domain=None):
|
|||
if not domain:
|
||||
domain = get_domains_from_settings()["MOZMAIL_DOMAIN"]
|
||||
if subdomain:
|
||||
return sha256(f"{address}@{subdomain}.{domain}".encode("utf-8")).hexdigest()
|
||||
return sha256(f"{address}@{subdomain}.{domain}".encode()).hexdigest()
|
||||
if domain == settings.RELAY_FIREFOX_DOMAIN:
|
||||
return sha256(f"{address}".encode("utf-8")).hexdigest()
|
||||
return sha256(f"{address}@{domain}".encode("utf-8")).hexdigest()
|
||||
return sha256(f"{address}".encode()).hexdigest()
|
||||
return sha256(f"{address}@{domain}".encode()).hexdigest()
|
||||
|
||||
|
||||
def address_default():
|
||||
return "".join(random.choices(string.ascii_lowercase + string.digits, k=9))
|
||||
return "".join(
|
||||
random.choices( # noqa: S311 (standard pseudo-random generator used)
|
||||
string.ascii_lowercase + string.digits, k=9
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def has_bad_words(value) -> bool:
|
||||
|
@ -622,7 +626,7 @@ def get_domain_numerical(domain_address):
|
|||
|
||||
|
||||
def hash_subdomain(subdomain, domain=settings.MOZMAIL_DOMAIN):
|
||||
return sha256(f"{subdomain}.{domain}".encode("utf-8")).hexdigest()
|
||||
return sha256(f"{subdomain}.{domain}".encode()).hexdigest()
|
||||
|
||||
|
||||
class RegisteredSubdomain(models.Model):
|
||||
|
@ -771,7 +775,7 @@ class RelayAddress(models.Model):
|
|||
)
|
||||
deleted_address.save()
|
||||
profile = Profile.objects.get(user=self.user)
|
||||
profile.address_last_deleted = datetime.now(timezone.utc)
|
||||
profile.address_last_deleted = datetime.now(UTC)
|
||||
profile.num_address_deleted += 1
|
||||
profile.num_email_forwarded_in_deleted_address += self.num_forwarded
|
||||
profile.num_email_blocked_in_deleted_address += self.num_blocked
|
||||
|
@ -781,9 +785,9 @@ class RelayAddress(models.Model):
|
|||
profile.num_email_replied_in_deleted_address += self.num_replied
|
||||
profile.num_email_spam_in_deleted_address += self.num_spam
|
||||
profile.num_deleted_relay_addresses += 1
|
||||
profile.last_engagement = datetime.now(timezone.utc)
|
||||
profile.last_engagement = datetime.now(UTC)
|
||||
profile.save()
|
||||
return super(RelayAddress, self).delete(*args, **kwargs)
|
||||
return super().delete(*args, **kwargs)
|
||||
|
||||
def save(
|
||||
self,
|
||||
|
@ -803,7 +807,7 @@ class RelayAddress(models.Model):
|
|||
break
|
||||
self.address = address_default()
|
||||
locked_profile.update_abuse_metric(address_created=True)
|
||||
locked_profile.last_engagement = datetime.now(timezone.utc)
|
||||
locked_profile.last_engagement = datetime.now(UTC)
|
||||
locked_profile.save()
|
||||
if (not self.user.profile.server_storage) and any(
|
||||
(self.description, self.generated_for, self.used_on)
|
||||
|
@ -827,12 +831,15 @@ class RelayAddress(models.Model):
|
|||
)
|
||||
|
||||
@property
|
||||
def domain_value(self):
|
||||
return get_domains_from_settings().get(self.get_domain_display())
|
||||
def domain_value(self) -> str:
|
||||
domain = cast(
|
||||
Literal["RELAY_FIREFOX_DOMAIN", "MOZMAIL_DOMAIN"], self.get_domain_display()
|
||||
)
|
||||
return get_domains_from_settings()[domain]
|
||||
|
||||
@property
|
||||
def full_address(self):
|
||||
return "%s@%s" % (self.address, self.domain_value)
|
||||
def full_address(self) -> str:
|
||||
return f"{self.address}@{self.domain_value}"
|
||||
|
||||
@property
|
||||
def metrics_id(self) -> str:
|
||||
|
@ -950,7 +957,7 @@ class DomainAddress(models.Model):
|
|||
raise DomainAddrDuplicateException(duplicate_address=self.address)
|
||||
|
||||
user_profile.update_abuse_metric(address_created=True)
|
||||
user_profile.last_engagement = datetime.now(timezone.utc)
|
||||
user_profile.last_engagement = datetime.now(UTC)
|
||||
user_profile.save(update_fields=["last_engagement"])
|
||||
incr_if_enabled("domainaddress.create")
|
||||
if self.first_emailed_at:
|
||||
|
@ -984,7 +991,7 @@ class DomainAddress(models.Model):
|
|||
@staticmethod
|
||||
def make_domain_address(
|
||||
user_profile: Profile, address: str | None = None, made_via_email: bool = False
|
||||
) -> "DomainAddress":
|
||||
) -> DomainAddress:
|
||||
check_user_can_make_domain_address(user_profile)
|
||||
|
||||
if not address:
|
||||
|
@ -996,7 +1003,7 @@ class DomainAddress(models.Model):
|
|||
# Only check for bad words if randomly generated
|
||||
assert isinstance(address, str)
|
||||
|
||||
first_emailed_at = datetime.now(timezone.utc) if made_via_email else None
|
||||
first_emailed_at = datetime.now(UTC) if made_via_email else None
|
||||
domain_address = DomainAddress.objects.create(
|
||||
user=user_profile.user, address=address, first_emailed_at=first_emailed_at
|
||||
)
|
||||
|
@ -1017,7 +1024,7 @@ class DomainAddress(models.Model):
|
|||
# self.user_profile is a property and should not be used to
|
||||
# update values on the user's profile
|
||||
profile = Profile.objects.get(user=self.user)
|
||||
profile.address_last_deleted = datetime.now(timezone.utc)
|
||||
profile.address_last_deleted = datetime.now(UTC)
|
||||
profile.num_address_deleted += 1
|
||||
profile.num_email_forwarded_in_deleted_address += self.num_forwarded
|
||||
profile.num_email_blocked_in_deleted_address += self.num_blocked
|
||||
|
@ -1027,21 +1034,20 @@ class DomainAddress(models.Model):
|
|||
profile.num_email_replied_in_deleted_address += self.num_replied
|
||||
profile.num_email_spam_in_deleted_address += self.num_spam
|
||||
profile.num_deleted_domain_addresses += 1
|
||||
profile.last_engagement = datetime.now(timezone.utc)
|
||||
profile.last_engagement = datetime.now(UTC)
|
||||
profile.save()
|
||||
return super(DomainAddress, self).delete(*args, **kwargs)
|
||||
return super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def domain_value(self):
|
||||
return get_domains_from_settings().get(self.get_domain_display())
|
||||
|
||||
@property
|
||||
def full_address(self):
|
||||
return "%s@%s.%s" % (
|
||||
self.address,
|
||||
self.user_profile.subdomain,
|
||||
self.domain_value,
|
||||
def domain_value(self) -> str:
|
||||
domain = cast(
|
||||
Literal["RELAY_FIREFOX_DOMAIN", "MOZMAIL_DOMAIN"], self.get_domain_display()
|
||||
)
|
||||
return get_domains_from_settings()[domain]
|
||||
|
||||
@property
|
||||
def full_address(self) -> str:
|
||||
return f"{self.address}@{self.user_profile.subdomain}.{self.domain_value}"
|
||||
|
||||
@property
|
||||
def metrics_id(self) -> str:
|
||||
|
@ -1078,7 +1084,7 @@ class Reply(models.Model):
|
|||
address = self.relay_address or self.domain_address
|
||||
assert address
|
||||
address.num_replied += 1
|
||||
address.last_used_at = datetime.now(timezone.utc)
|
||||
address.last_used_at = datetime.now(UTC)
|
||||
address.save(update_fields=["num_replied", "last_used_at"])
|
||||
return address.num_replied
|
||||
|
||||
|
|
|
@ -18,18 +18,13 @@ https://github.com/python/cpython/blob/main/Lib/email/headerregistry.py
|
|||
https://github.com/python/cpython/blob/main/Lib/email/policy.py
|
||||
"""
|
||||
|
||||
from email._header_value_parser import get_unstructured, InvalidMessageID
|
||||
from email.headerregistry import (
|
||||
BaseHeader,
|
||||
MessageIDHeader as PythonMessageIDHeader,
|
||||
HeaderRegistry as PythonHeaderRegistry,
|
||||
UnstructuredHeader,
|
||||
)
|
||||
from email.policy import EmailPolicy
|
||||
|
||||
from email import errors
|
||||
|
||||
from typing import cast, TYPE_CHECKING
|
||||
from email._header_value_parser import InvalidMessageID, get_unstructured
|
||||
from email.headerregistry import BaseHeader, UnstructuredHeader
|
||||
from email.headerregistry import HeaderRegistry as PythonHeaderRegistry
|
||||
from email.headerregistry import MessageIDHeader as PythonMessageIDHeader
|
||||
from email.policy import EmailPolicy
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# _HeaderParser is a protocol from mypy's typeshed
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
from hashlib import sha256
|
||||
import logging
|
||||
from hashlib import sha256
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from django.db.models.signals import post_save, pre_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from emails.models import Profile
|
||||
from emails.utils import incr_if_enabled, set_user_group
|
||||
|
||||
|
||||
info_logger = logging.getLogger("eventsinfo")
|
||||
|
||||
|
||||
|
|
|
@ -3,16 +3,15 @@
|
|||
|
||||
import base64
|
||||
import logging
|
||||
import pem
|
||||
from urllib.request import urlopen
|
||||
|
||||
from OpenSSL import crypto
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import caches
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.utils.encoding import smart_bytes
|
||||
|
||||
import pem
|
||||
from OpenSSL import crypto
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
|
||||
|
@ -98,7 +97,7 @@ def _grab_keyfile(cert_url):
|
|||
|
||||
pemfile = key_cache.get(cert_url)
|
||||
if not pemfile:
|
||||
response = urlopen(cert_url)
|
||||
response = urlopen(cert_url) # noqa: S310 (check for custom scheme)
|
||||
pemfile = response.read()
|
||||
# Extract the first certificate in the file and confirm it's a valid
|
||||
# PEM certificate
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from django import template
|
||||
from django.template.defaultfilters import stringfilter
|
||||
from django.utils.html import conditional_escape
|
||||
from django.utils.safestring import mark_safe, SafeString
|
||||
from django.utils.safestring import SafeString, mark_safe
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
@ -50,4 +50,4 @@ def convert_fsi_to_span(text: str | SafeString, autoescape=True) -> str | SafeSt
|
|||
)
|
||||
else:
|
||||
result = f'{pre_fsi}<span dir="auto">{middle}</span>{post_pdi}'
|
||||
return mark_safe(result)
|
||||
return mark_safe(result) # noqa: S308 (use of mark_safe)
|
||||
|
|
|
@ -4,10 +4,10 @@ from __future__ import annotations
|
|||
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from model_bakery import baker
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
|
||||
from emails.cleaners import ServerStorageCleaner, MissingProfileCleaner
|
||||
from emails.cleaners import MissingProfileCleaner, ServerStorageCleaner
|
||||
from emails.models import DomainAddress, RelayAddress
|
||||
|
||||
from .models_tests import make_premium_test_user, make_storageless_test_user
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
from datetime import datetime, timezone, timedelta
|
||||
from unittest.mock import ANY, patch
|
||||
import json
|
||||
import logging
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from django.core.management import CommandError, call_command
|
||||
|
||||
import pytest
|
||||
|
||||
from django.core.management import call_command, CommandError
|
||||
|
||||
from emails.management.commands.check_health import Command
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def test_settings(settings, tmp_path):
|
||||
|
@ -28,7 +25,7 @@ def write_healthcheck(path, age=0):
|
|||
|
||||
Returns the path to the healthcheck file
|
||||
"""
|
||||
timestamp = (datetime.now(tz=timezone.utc) - timedelta(seconds=age)).isoformat()
|
||||
timestamp = (datetime.now(tz=UTC) - timedelta(seconds=age)).isoformat()
|
||||
data = {"timestamp": timestamp, "testing": True}
|
||||
with path.open("w", encoding="utf8") as f:
|
||||
json.dump(data, f)
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Any, Generator, TYPE_CHECKING
|
||||
from unittest.mock import patch, Mock
|
||||
from uuid import uuid4
|
||||
import json
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from markus.testing import MetricsMock
|
||||
import pytest
|
||||
import OpenSSL
|
||||
from collections.abc import Generator
|
||||
from datetime import UTC, datetime
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from unittest.mock import Mock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
import OpenSSL
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
from markus.testing import MetricsMock
|
||||
|
||||
from emails.tests.views_tests import EMAIL_SNS_BODIES
|
||||
from privaterelay.tests.utils import log_extra
|
||||
|
||||
|
@ -375,7 +376,7 @@ def test_writes_healthcheck_file(test_settings):
|
|||
"""Running the command writes to the healthcheck file."""
|
||||
call_command("process_emails_from_sqs")
|
||||
healthcheck_path = test_settings.PROCESS_EMAIL_HEALTHCHECK_PATH
|
||||
with open(healthcheck_path, "r", encoding="utf-8") as healthcheck_file:
|
||||
with open(healthcheck_path, encoding="utf-8") as healthcheck_file:
|
||||
content = json.load(healthcheck_file)
|
||||
assert content == {
|
||||
"timestamp": content["timestamp"],
|
||||
|
@ -388,7 +389,7 @@ def test_writes_healthcheck_file(test_settings):
|
|||
"queue_count_not_visible": 3,
|
||||
}
|
||||
ts = datetime.fromisoformat(content["timestamp"])
|
||||
duration = (datetime.now(tz=timezone.utc) - ts).total_seconds()
|
||||
duration = (datetime.now(tz=UTC) - ts).total_seconds()
|
||||
assert 0.0 < duration < 0.5
|
||||
|
||||
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
import pytest
|
||||
from typing import Tuple
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management import call_command
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
import django_ftl
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from emails.models import Profile
|
||||
from emails.tests.models_tests import make_free_test_user
|
||||
from privaterelay.ftl_bundles import main as ftl_bundle
|
||||
|
||||
|
||||
COMMAND_NAME = "send_welcome_emails"
|
||||
|
||||
|
||||
|
@ -106,7 +103,7 @@ def test_invalid_email_address_skips_invalid(
|
|||
call_command(COMMAND_NAME)
|
||||
|
||||
invalid_email_user.profile.refresh_from_db()
|
||||
assert invalid_email_user.profile.sent_welcome_email == False
|
||||
assert invalid_email_user.profile.sent_welcome_email is False
|
||||
|
||||
rec1, rec2, rec3, rec4, rec5 = caplog.records
|
||||
assert "Starting" in rec1.getMessage()
|
||||
|
@ -144,7 +141,7 @@ def _assert_caplog_for_1_email_to_user(
|
|||
assert "Exiting" in rec4.getMessage()
|
||||
|
||||
|
||||
def _get_send_email_args(mock_ses_client: MagicMock) -> Tuple:
|
||||
def _get_send_email_args(mock_ses_client: MagicMock) -> tuple:
|
||||
call_args = mock_ses_client.send_email.call_args[1]
|
||||
to_addresses = call_args["Destination"]["ToAddresses"]
|
||||
source = call_args["Source"]
|
||||
|
|
|
@ -1,39 +1,38 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from hashlib import sha256
|
||||
import random
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from hashlib import sha256
|
||||
from unittest import skip
|
||||
from unittest.mock import patch, Mock
|
||||
from unittest.mock import Mock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import override_settings, TestCase
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from waffle.testutils import override_flag
|
||||
import pytest
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from model_bakery import baker
|
||||
from waffle.testutils import override_flag
|
||||
|
||||
from ..models import (
|
||||
AbuseMetrics,
|
||||
address_hash,
|
||||
CannotMakeAddressException,
|
||||
CannotMakeSubdomainException,
|
||||
DeletedAddress,
|
||||
DomainAddress,
|
||||
DomainAddrDuplicateException,
|
||||
DomainAddress,
|
||||
DomainAddrUnavailableException,
|
||||
Profile,
|
||||
RegisteredSubdomain,
|
||||
RelayAddress,
|
||||
address_hash,
|
||||
get_domain_numerical,
|
||||
has_bad_words,
|
||||
hash_subdomain,
|
||||
is_blocklisted,
|
||||
Profile,
|
||||
RegisteredSubdomain,
|
||||
RelayAddress,
|
||||
valid_available_subdomain,
|
||||
valid_address,
|
||||
valid_address_pattern,
|
||||
valid_available_subdomain,
|
||||
)
|
||||
from ..utils import get_domains_from_settings
|
||||
|
||||
|
@ -61,7 +60,7 @@ def make_free_test_user(email: str = "") -> User:
|
|||
def make_premium_test_user() -> User:
|
||||
premium_user = baker.make(User, email="premium@email.com")
|
||||
premium_user.profile.server_storage = True
|
||||
premium_user.profile.date_subscribed = datetime.now(tz=timezone.utc)
|
||||
premium_user.profile.date_subscribed = datetime.now(tz=UTC)
|
||||
premium_user.profile.save()
|
||||
upgrade_test_user_to_premium(premium_user)
|
||||
return premium_user
|
||||
|
@ -72,7 +71,7 @@ def make_storageless_test_user() -> User:
|
|||
storageless_user_profile = storageless_user.profile
|
||||
storageless_user_profile.server_storage = False
|
||||
storageless_user_profile.subdomain = "mydomain"
|
||||
storageless_user_profile.date_subscribed = datetime.now(tz=timezone.utc)
|
||||
storageless_user_profile.date_subscribed = datetime.now(tz=UTC)
|
||||
storageless_user_profile.save()
|
||||
upgrade_test_user_to_premium(storageless_user)
|
||||
return storageless_user
|
||||
|
@ -167,29 +166,27 @@ class MiscEmailModelsTest(TestCase):
|
|||
@override_settings(RELAY_FIREFOX_DOMAIN="firefox.com")
|
||||
def test_address_hash_without_subdomain_domain_firefox(self):
|
||||
address = "aaaaaaaaa"
|
||||
expected_hash = sha256(f"{address}".encode("utf-8")).hexdigest()
|
||||
expected_hash = sha256(f"{address}".encode()).hexdigest()
|
||||
assert address_hash(address, domain="firefox.com") == expected_hash
|
||||
|
||||
@override_settings(RELAY_FIREFOX_DOMAIN="firefox.com")
|
||||
def test_address_hash_without_subdomain_domain_not_firefoxz(self):
|
||||
non_default = "test.com"
|
||||
address = "aaaaaaaaa"
|
||||
expected_hash = sha256(f"{address}@{non_default}".encode("utf-8")).hexdigest()
|
||||
expected_hash = sha256(f"{address}@{non_default}".encode()).hexdigest()
|
||||
assert address_hash(address, domain=non_default) == expected_hash
|
||||
|
||||
def test_address_hash_with_subdomain(self):
|
||||
address = "aaaaaaaaa"
|
||||
subdomain = "test"
|
||||
domain = get_domains_from_settings().get("MOZMAIL_DOMAIN")
|
||||
expected_hash = sha256(
|
||||
f"{address}@{subdomain}.{domain}".encode("utf-8")
|
||||
).hexdigest()
|
||||
expected_hash = sha256(f"{address}@{subdomain}.{domain}".encode()).hexdigest()
|
||||
assert address_hash(address, subdomain, domain) == expected_hash
|
||||
|
||||
def test_address_hash_with_additional_domain(self):
|
||||
address = "aaaaaaaaa"
|
||||
test_domain = "test.com"
|
||||
expected_hash = sha256(f"{address}@{test_domain}".encode("utf-8")).hexdigest()
|
||||
expected_hash = sha256(f"{address}@{test_domain}".encode()).hexdigest()
|
||||
assert address_hash(address, domain=test_domain) == expected_hash
|
||||
|
||||
def test_get_domain_numerical(self):
|
||||
|
@ -320,7 +317,7 @@ class RelayAddressTest(TestCase):
|
|||
def test_delete_mozmail_deleted_address_object(self):
|
||||
relay_address = baker.make(RelayAddress, domain=2, user=self.user)
|
||||
address_hash = sha256(
|
||||
f"{relay_address.address}@{relay_address.domain_value}".encode("utf-8")
|
||||
f"{relay_address.address}@{relay_address.domain_value}".encode()
|
||||
).hexdigest()
|
||||
relay_address.delete()
|
||||
deleted_count = DeletedAddress.objects.filter(address_hash=address_hash).count()
|
||||
|
@ -453,7 +450,7 @@ class RelayAddressTest(TestCase):
|
|||
assert relay_address.used_on == "https://example.com"
|
||||
|
||||
# Update a different field with update_fields to avoid full model save
|
||||
new_last_used_at = datetime(2024, 1, 11, tzinfo=timezone.utc)
|
||||
new_last_used_at = datetime(2024, 1, 11, tzinfo=UTC)
|
||||
relay_address.last_used_at = new_last_used_at
|
||||
relay_address.save(update_fields={"last_used_at"})
|
||||
|
||||
|
@ -477,7 +474,7 @@ class RelayAddressTest(TestCase):
|
|||
assert relay_address.block_list_emails
|
||||
|
||||
# Update a different field with update_fields to avoid full model save
|
||||
new_last_used_at = datetime(2024, 1, 12, tzinfo=timezone.utc)
|
||||
new_last_used_at = datetime(2024, 1, 12, tzinfo=UTC)
|
||||
relay_address.last_used_at = new_last_used_at
|
||||
relay_address.save(update_fields={"last_used_at"})
|
||||
|
||||
|
@ -546,7 +543,7 @@ class ProfileBounceTestCase(ProfileTestCase):
|
|||
This happens when the user's email server reports a hard bounce, such as
|
||||
saying the email does not exist.
|
||||
"""
|
||||
self.profile.last_hard_bounce = datetime.now(timezone.utc) - timedelta(
|
||||
self.profile.last_hard_bounce = datetime.now(UTC) - timedelta(
|
||||
days=settings.HARD_BOUNCE_ALLOWED_DAYS - 1
|
||||
)
|
||||
self.profile.save()
|
||||
|
@ -559,7 +556,7 @@ class ProfileBounceTestCase(ProfileTestCase):
|
|||
This happens when the user's email server reports a soft bounce, such as
|
||||
saying the user's mailbox is full.
|
||||
"""
|
||||
self.profile.last_soft_bounce = datetime.now(timezone.utc) - timedelta(
|
||||
self.profile.last_soft_bounce = datetime.now(UTC) - timedelta(
|
||||
days=settings.SOFT_BOUNCE_ALLOWED_DAYS - 1
|
||||
)
|
||||
self.profile.save()
|
||||
|
@ -595,7 +592,7 @@ class ProfileCheckBouncePause(ProfileBounceTestCase):
|
|||
assert bounce_type == "hard"
|
||||
|
||||
def test_hard_bounce_over_resets_timer(self) -> None:
|
||||
self.profile.last_hard_bounce = datetime.now(timezone.utc) - timedelta(
|
||||
self.profile.last_hard_bounce = datetime.now(UTC) - timedelta(
|
||||
days=settings.HARD_BOUNCE_ALLOWED_DAYS + 1
|
||||
)
|
||||
self.profile.save()
|
||||
|
@ -608,7 +605,7 @@ class ProfileCheckBouncePause(ProfileBounceTestCase):
|
|||
assert self.profile.last_hard_bounce is None
|
||||
|
||||
def test_soft_bounce_over_resets_timer(self) -> None:
|
||||
self.profile.last_soft_bounce = datetime.now(timezone.utc) - timedelta(
|
||||
self.profile.last_soft_bounce = datetime.now(UTC) - timedelta(
|
||||
days=settings.SOFT_BOUNCE_ALLOWED_DAYS + 1
|
||||
)
|
||||
self.profile.save()
|
||||
|
@ -625,7 +622,7 @@ class ProfileNextEmailTryDateTest(ProfileBounceTestCase):
|
|||
"""Tests for Profile.next_email_try"""
|
||||
|
||||
def test_no_bounces_returns_today(self) -> None:
|
||||
assert self.profile.next_email_try.date() == datetime.now(timezone.utc).date()
|
||||
assert self.profile.next_email_try.date() == datetime.now(UTC).date()
|
||||
|
||||
def test_hard_bounce_returns_proper_datemath(self) -> None:
|
||||
last_hard_bounce = self.set_hard_bounce()
|
||||
|
@ -719,7 +716,7 @@ class ProfileDatePhoneRegisteredTest(ProfileTestCase):
|
|||
|
||||
def test_real_phone_no_relay_number_returns_verified_date(self) -> None:
|
||||
self.upgrade_to_phone()
|
||||
datetime_now = datetime.now(timezone.utc)
|
||||
datetime_now = datetime.now(UTC)
|
||||
RealPhone.objects.create(
|
||||
user=self.profile.user,
|
||||
number="+12223334444",
|
||||
|
@ -732,7 +729,7 @@ class ProfileDatePhoneRegisteredTest(ProfileTestCase):
|
|||
self,
|
||||
) -> None:
|
||||
self.upgrade_to_phone()
|
||||
datetime_now = datetime.now(timezone.utc)
|
||||
datetime_now = datetime.now(UTC)
|
||||
phone_user = self.profile.user
|
||||
RealPhone.objects.create(
|
||||
user=phone_user,
|
||||
|
@ -747,7 +744,7 @@ class ProfileDatePhoneRegisteredTest(ProfileTestCase):
|
|||
self,
|
||||
) -> None:
|
||||
self.upgrade_to_phone()
|
||||
datetime_now = datetime.now(timezone.utc)
|
||||
datetime_now = datetime.now(UTC)
|
||||
phone_user = self.profile.user
|
||||
real_phone = RealPhone.objects.create(
|
||||
user=phone_user,
|
||||
|
@ -881,7 +878,7 @@ class ProfileSaveTest(ProfileTestCase):
|
|||
assert self.profile.subdomain == "mIxEdcAsE"
|
||||
|
||||
# Update a different field with update_fields to avoid a full model save
|
||||
new_date_subscribed = datetime(2023, 3, 3, tzinfo=timezone.utc)
|
||||
new_date_subscribed = datetime(2023, 3, 3, tzinfo=UTC)
|
||||
self.profile.date_subscribed = new_date_subscribed
|
||||
self.profile.save(update_fields={"date_subscribed"})
|
||||
|
||||
|
@ -1192,9 +1189,9 @@ class ProfileUpdateAbuseMetricTest(ProfileTestCase):
|
|||
mocked_datetime = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
|
||||
self.expected_now = datetime.now(timezone.utc)
|
||||
self.expected_now = datetime.now(UTC)
|
||||
mocked_datetime.combine.return_value = datetime.combine(
|
||||
datetime.now(timezone.utc).date(), datetime.min.time()
|
||||
datetime.now(UTC).date(), datetime.min.time()
|
||||
)
|
||||
mocked_datetime.now.return_value = self.expected_now
|
||||
mocked_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw)
|
||||
|
@ -1305,24 +1302,16 @@ class ProfilePlanTermTest(ProfileTestCase):
|
|||
|
||||
def test_phone_user_1_month(self) -> None:
|
||||
self.upgrade_to_phone()
|
||||
self.profile.date_phone_subscription_start = datetime(
|
||||
2024, 1, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_start = datetime(2024, 1, 1, tzinfo=UTC)
|
||||
|
||||
self.profile.date_phone_subscription_end = datetime(
|
||||
2024, 2, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_end = datetime(2024, 2, 1, tzinfo=UTC)
|
||||
assert self.profile.plan_term == "1_month"
|
||||
|
||||
def test_phone_user_1_year(self) -> None:
|
||||
self.upgrade_to_phone()
|
||||
self.profile.date_phone_subscription_start = datetime(
|
||||
2024, 1, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_start = datetime(2024, 1, 1, tzinfo=UTC)
|
||||
|
||||
self.profile.date_phone_subscription_end = datetime(
|
||||
2025, 1, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_end = datetime(2025, 1, 1, tzinfo=UTC)
|
||||
assert self.profile.plan_term == "1_year"
|
||||
|
||||
def test_vpn_bundle_user(self) -> None:
|
||||
|
@ -1344,24 +1333,16 @@ class ProfileMetricsPremiumStatus(ProfileTestCase):
|
|||
|
||||
def test_phone_user_1_month(self) -> None:
|
||||
self.upgrade_to_phone()
|
||||
self.profile.date_phone_subscription_start = datetime(
|
||||
2024, 1, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_start = datetime(2024, 1, 1, tzinfo=UTC)
|
||||
|
||||
self.profile.date_phone_subscription_end = datetime(
|
||||
2024, 2, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_end = datetime(2024, 2, 1, tzinfo=UTC)
|
||||
assert self.profile.metrics_premium_status == "phone_1_month"
|
||||
|
||||
def test_phone_user_1_year(self) -> None:
|
||||
self.upgrade_to_phone()
|
||||
self.profile.date_phone_subscription_start = datetime(
|
||||
2024, 1, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_start = datetime(2024, 1, 1, tzinfo=UTC)
|
||||
|
||||
self.profile.date_phone_subscription_end = datetime(
|
||||
2025, 1, 1, tzinfo=timezone.utc
|
||||
)
|
||||
self.profile.date_phone_subscription_end = datetime(2025, 1, 1, tzinfo=UTC)
|
||||
assert self.profile.metrics_premium_status == "phone_1_year"
|
||||
|
||||
def test_vpn_bundle_user(self) -> None:
|
||||
|
@ -1612,7 +1593,7 @@ class DomainAddressTest(TestCase):
|
|||
assert domain_address.used_on == "https://example.com"
|
||||
|
||||
# Update a different field with update_fields to avoid full model save
|
||||
new_last_used_at = datetime(2024, 1, 11, tzinfo=timezone.utc)
|
||||
new_last_used_at = datetime(2024, 1, 11, tzinfo=UTC)
|
||||
domain_address.last_used_at = new_last_used_at
|
||||
domain_address.save(update_fields={"last_used_at"})
|
||||
|
||||
|
@ -1638,7 +1619,7 @@ class DomainAddressTest(TestCase):
|
|||
assert domain_address.block_list_emails
|
||||
|
||||
# Update a different field with update_fields to avoid full model save
|
||||
new_last_used_at = datetime(2024, 1, 12, tzinfo=timezone.utc)
|
||||
new_last_used_at = datetime(2024, 1, 12, tzinfo=UTC)
|
||||
assert domain_address.last_used_at != new_last_used_at
|
||||
domain_address.last_used_at = new_last_used_at
|
||||
domain_address.save(update_fields={"last_used_at"})
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
"""Tests for emails.policy"""
|
||||
|
||||
from email import message_from_string, errors
|
||||
from typing_extensions import TypedDict
|
||||
from email import errors, message_from_string
|
||||
|
||||
import pytest
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from emails.policy import relay_policy
|
||||
|
||||
|
|
|
@ -9,7 +9,10 @@ from ..sns import _grab_keyfile
|
|||
class GrabKeyfileTest(TestCase):
|
||||
@patch("emails.sns.urlopen")
|
||||
def test_grab_keyfile_checks_cert_url_origin(self, mock_urlopen):
|
||||
cert_url = "https://sns.us-east-1.amazonaws.com/SimpleNotificationService-7ff5318490ec183fbaddaa2a969abfda.pem"
|
||||
cert_url = (
|
||||
"https://sns.us-east-1.amazonaws.com/"
|
||||
"SimpleNotificationService-7ff5318490ec183fbaddaa2a969abfda.pem"
|
||||
)
|
||||
assert mock_urlopen.called_once_with(cert_url)
|
||||
|
||||
with self.assertRaises(SuspiciousOperation):
|
||||
|
|
|
@ -1,19 +1,22 @@
|
|||
import json
|
||||
from base64 import b64encode
|
||||
from typing import Literal
|
||||
from urllib.parse import quote_plus
|
||||
from django.test import TestCase, override_settings
|
||||
from unittest.mock import patch
|
||||
import json
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
import pytest
|
||||
|
||||
from emails.utils import (
|
||||
InvalidFromHeader,
|
||||
generate_from_header,
|
||||
get_domains_from_settings,
|
||||
get_email_domain_from_settings,
|
||||
parse_email_header,
|
||||
remove_trackers,
|
||||
InvalidFromHeader,
|
||||
)
|
||||
|
||||
from .models_tests import make_free_test_user, make_premium_test_user # noqa: F401
|
||||
|
||||
|
||||
|
|
|
@ -1,36 +1,28 @@
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from email import message_from_string
|
||||
from email.message import EmailMessage
|
||||
from typing import Any, cast
|
||||
from unittest._log import _LoggingWatcher
|
||||
from unittest.mock import patch, Mock
|
||||
from uuid import uuid4
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from email import message_from_string
|
||||
from email.message import EmailMessage
|
||||
from typing import Any, cast
|
||||
from unittest._log import _LoggingWatcher
|
||||
from unittest.mock import Mock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.http import HttpResponse
|
||||
from django.test import override_settings, Client, SimpleTestCase, TestCase
|
||||
from django.test import Client, SimpleTestCase, TestCase, override_settings
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from botocore.exceptions import ClientError
|
||||
from markus.main import MetricsRecord
|
||||
from markus.testing import MetricsMock
|
||||
from model_bakery import baker
|
||||
import pytest
|
||||
|
||||
from privaterelay.ftl_bundles import main
|
||||
from privaterelay.tests.utils import (
|
||||
create_expected_glean_event,
|
||||
get_glean_event,
|
||||
log_extra,
|
||||
)
|
||||
from privaterelay.glean.server_events import GLEAN_EVENT_MOZLOG_TYPE as GLEAN_LOG
|
||||
|
||||
from emails.models import (
|
||||
DeletedAddress,
|
||||
|
@ -40,15 +32,16 @@ from emails.models import (
|
|||
Reply,
|
||||
address_hash,
|
||||
)
|
||||
from emails.policy import relay_policy
|
||||
from emails.types import AWS_SNSMessageJSON, OutgoingHeaders
|
||||
from emails.utils import (
|
||||
InvalidFromHeader,
|
||||
b64_lookup_key,
|
||||
decrypt_reply_metadata,
|
||||
derive_reply_keys,
|
||||
encrypt_reply_metadata,
|
||||
get_domains_from_settings,
|
||||
get_message_id_bytes,
|
||||
InvalidFromHeader,
|
||||
)
|
||||
from emails.views import (
|
||||
EmailDroppedReason,
|
||||
|
@ -66,7 +59,13 @@ from emails.views import (
|
|||
validate_sns_arn_and_type,
|
||||
wrapped_email_test,
|
||||
)
|
||||
from emails.policy import relay_policy
|
||||
from privaterelay.ftl_bundles import main
|
||||
from privaterelay.glean.server_events import GLEAN_EVENT_MOZLOG_TYPE as GLEAN_LOG
|
||||
from privaterelay.tests.utils import (
|
||||
create_expected_glean_event,
|
||||
get_glean_event,
|
||||
log_extra,
|
||||
)
|
||||
|
||||
from .models_tests import (
|
||||
make_free_test_user,
|
||||
|
@ -95,7 +94,7 @@ def load_fixtures(file_suffix: str) -> dict[str, AWS_SNSMessageJSON | str]:
|
|||
file_name = os.path.basename(fixture_file)
|
||||
key = file_name[: -len(file_suffix)]
|
||||
assert key not in fixtures
|
||||
with open(fixture_file, "r") as f:
|
||||
with open(fixture_file) as f:
|
||||
if ext == ".json":
|
||||
fixtures[key] = json.load(f)
|
||||
else:
|
||||
|
@ -387,7 +386,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
super().setUp()
|
||||
self.user = baker.make(User, email="user@example.com")
|
||||
self.profile = self.user.profile
|
||||
self.profile.last_engagement = datetime.now(timezone.utc)
|
||||
self.profile.last_engagement = datetime.now(UTC)
|
||||
self.profile.save()
|
||||
self.sa: SocialAccount = baker.make(
|
||||
SocialAccount, user=self.user, provider="fxa"
|
||||
|
@ -398,7 +397,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.premium_user = make_premium_test_user()
|
||||
self.premium_profile = Profile.objects.get(user=self.premium_user)
|
||||
self.premium_profile.subdomain = "subdomain"
|
||||
self.premium_profile.last_engagement = datetime.now(timezone.utc)
|
||||
self.premium_profile.last_engagement = datetime.now(UTC)
|
||||
self.premium_profile.save()
|
||||
|
||||
def test_single_recipient_sns_notification(self) -> None:
|
||||
|
@ -414,7 +413,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at is not None
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
self.ra.user.profile.refresh_from_db()
|
||||
assert self.ra.user.profile.last_engagement is not None
|
||||
assert (
|
||||
|
@ -438,7 +437,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at is not None
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
|
||||
def test_list_email_sns_notification(self) -> None:
|
||||
"""By default, list emails should still forward."""
|
||||
|
@ -448,7 +447,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at is not None
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
|
||||
def test_block_list_email_sns_notification(self) -> None:
|
||||
"""When an alias is blocking list emails, list emails should not forward."""
|
||||
|
@ -498,7 +497,6 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
assert (event := get_glean_event(caplog)) is not None
|
||||
assert event["category"] == "email"
|
||||
assert event["name"] == "forwarded"
|
||||
assert event["extra"]["mask_id"] == self.ra.metrics_id
|
||||
|
||||
@override_settings(STATSD_ENABLED=True)
|
||||
def test_spamVerdict_FAIL_auto_block_doesnt_relay(self) -> None:
|
||||
|
@ -526,13 +524,12 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
da = DomainAddress.objects.get(user=self.premium_user, address="wildcard")
|
||||
assert da.num_forwarded == 1
|
||||
assert da.last_used_at
|
||||
assert (datetime.now(tz=timezone.utc) - da.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - da.last_used_at).seconds < 2.0
|
||||
|
||||
mask_event = get_glean_event(caplog, "email_mask", "created")
|
||||
assert mask_event is not None
|
||||
shared_extra_items = {
|
||||
"n_domain_masks": "1",
|
||||
"mask_id": da.metrics_id,
|
||||
"is_random_mask": "false",
|
||||
}
|
||||
expected_mask_event = create_expected_glean_event(
|
||||
|
@ -564,7 +561,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at is not None
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
|
||||
def test_unsuccessful_email_relay_message_not_removed_from_s3(self) -> None:
|
||||
self.mock_send_raw_email.side_effect = SEND_RAW_EMAIL_FAILED
|
||||
|
@ -609,7 +606,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
|
||||
def test_russian_spam(self) -> None:
|
||||
"""
|
||||
|
@ -631,7 +628,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
|
||||
@patch("emails.views.info_logger")
|
||||
def test_plain_text(self, mock_logger: Mock) -> None:
|
||||
|
@ -647,7 +644,7 @@ class SNSNotificationIncomingTest(SNSNotificationTestBase):
|
|||
self.ra.refresh_from_db()
|
||||
assert self.ra.num_forwarded == 1
|
||||
assert self.ra.last_used_at
|
||||
assert (datetime.now(tz=timezone.utc) - self.ra.last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - self.ra.last_used_at).seconds < 2.0
|
||||
mock_logger.warning.assert_not_called()
|
||||
|
||||
@patch("emails.views.info_logger")
|
||||
|
@ -747,8 +744,8 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
# Create a premium user matching the s3_stored_replies sender
|
||||
self.user = baker.make(User, email="source@sender.com")
|
||||
self.user.profile.server_storage = True
|
||||
self.user.profile.date_subscribed = datetime.now(tz=timezone.utc)
|
||||
self.user.profile.last_engagement = datetime.now(tz=timezone.utc)
|
||||
self.user.profile.date_subscribed = datetime.now(tz=UTC)
|
||||
self.user.profile.last_engagement = datetime.now(tz=UTC)
|
||||
self.user.profile.save()
|
||||
self.pre_reply_last_engagement = self.user.profile.last_engagement
|
||||
upgrade_test_user_to_premium(self.user)
|
||||
|
@ -792,7 +789,6 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
assert (event := get_glean_event(caplog)) is not None
|
||||
assert event["category"] == "email"
|
||||
assert event["name"] == "forwarded"
|
||||
assert event["extra"]["mask_id"] == self.relay_address.metrics_id
|
||||
assert event["extra"]["is_reply"] == "true"
|
||||
|
||||
self.mock_remove_message_from_s3.assert_called_once()
|
||||
|
@ -806,7 +802,7 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
assert self.relay_address.num_replied == 1
|
||||
last_used_at = self.relay_address.last_used_at
|
||||
assert last_used_at
|
||||
assert (datetime.now(tz=timezone.utc) - last_used_at).seconds < 2.0
|
||||
assert (datetime.now(tz=UTC) - last_used_at).seconds < 2.0
|
||||
assert (last_en := self.relay_address.user.profile.last_engagement) is not None
|
||||
assert last_en > self.pre_reply_last_engagement
|
||||
|
||||
|
@ -846,9 +842,10 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
operation_name="S3.something",
|
||||
error_response={"Error": {"Code": "NoSuchKey", "Message": "the message"}},
|
||||
)
|
||||
with self.assertLogs(INFO_LOG) as info_caplog, self.assertLogs(
|
||||
ERROR_LOG, "ERROR"
|
||||
) as events_caplog:
|
||||
with (
|
||||
self.assertLogs(INFO_LOG) as info_caplog,
|
||||
self.assertLogs(ERROR_LOG, "ERROR") as events_caplog,
|
||||
):
|
||||
response = _sns_notification(EMAIL_SNS_BODIES["s3_stored_replies"])
|
||||
self.mock_send_raw_email.assert_not_called()
|
||||
assert response.status_code == 404
|
||||
|
@ -866,9 +863,10 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
operation_name="S3.something",
|
||||
error_response={"Error": {"Code": "IsNapping", "Message": "snooze"}},
|
||||
)
|
||||
with self.assertLogs(INFO_LOG) as info_caplog, self.assertLogs(
|
||||
ERROR_LOG, "ERROR"
|
||||
) as error_caplog:
|
||||
with (
|
||||
self.assertLogs(INFO_LOG) as info_caplog,
|
||||
self.assertLogs(ERROR_LOG, "ERROR") as error_caplog,
|
||||
):
|
||||
response = _sns_notification(EMAIL_SNS_BODIES["s3_stored_replies"])
|
||||
self.mock_send_raw_email.assert_not_called()
|
||||
assert response.status_code == 503
|
||||
|
@ -888,9 +886,10 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
EMAIL_SNS_BODIES["s3_stored_replies"], text="text content"
|
||||
)
|
||||
self.mock_send_raw_email.side_effect = SEND_RAW_EMAIL_FAILED
|
||||
with self.assertLogs(INFO_LOG) as info_caplog, self.assertLogs(
|
||||
ERROR_LOG, "ERROR"
|
||||
) as error_caplog:
|
||||
with (
|
||||
self.assertLogs(INFO_LOG) as info_caplog,
|
||||
self.assertLogs(ERROR_LOG, "ERROR") as error_caplog,
|
||||
):
|
||||
response = _sns_notification(EMAIL_SNS_BODIES["s3_stored_replies"])
|
||||
assert response.status_code == 400
|
||||
assert response.content == b"SES client error"
|
||||
|
@ -903,55 +902,136 @@ class SNSNotificationRepliesTest(SNSNotificationTestBase):
|
|||
assert getattr(error_log, "Message") == "the message"
|
||||
|
||||
|
||||
@override_settings(STATSD_ENABLED=True)
|
||||
class BounceHandlingTest(TestCase):
|
||||
def setUp(self):
|
||||
self.user = baker.make(User, email="relayuser@test.com")
|
||||
self.sa: SocialAccount = baker.make(
|
||||
SocialAccount, user=self.user, provider="fxa", uid=str(uuid4())
|
||||
)
|
||||
|
||||
def test_sns_message_with_hard_bounce(self) -> None:
|
||||
pre_request_datetime = datetime.now(timezone.utc)
|
||||
pre_request_datetime = datetime.now(UTC)
|
||||
|
||||
_sns_notification(BOUNCE_SNS_BODIES["hard"])
|
||||
with self.assertLogs(INFO_LOG) as logs, MetricsMock() as mm:
|
||||
_sns_notification(BOUNCE_SNS_BODIES["hard"])
|
||||
|
||||
self.user.refresh_from_db()
|
||||
assert self.user.profile.last_hard_bounce is not None
|
||||
assert self.user.profile.last_hard_bounce >= pre_request_datetime
|
||||
|
||||
def test_sns_message_with_soft_bounce(self) -> None:
|
||||
pre_request_datetime = datetime.now(timezone.utc)
|
||||
assert len(logs.records) == 1
|
||||
log_data = log_extra(logs.records[0])
|
||||
assert (diagnostic := log_data["bounce_diagnostic"])
|
||||
assert log_data == {
|
||||
"bounce_action": "failed",
|
||||
"bounce_diagnostic": diagnostic,
|
||||
"bounce_status": "5.1.1",
|
||||
"bounce_subtype": "OnAccountSuppressionList",
|
||||
"bounce_type": "Permanent",
|
||||
"domain": "test.com",
|
||||
"relay_action": "hard_bounce",
|
||||
"user_match": "found",
|
||||
"fxa_id": self.sa.uid,
|
||||
}
|
||||
|
||||
_sns_notification(BOUNCE_SNS_BODIES["soft"])
|
||||
mm.assert_incr_once(
|
||||
"fx.private.relay.email_bounce",
|
||||
tags=[
|
||||
"bounce_type:permanent",
|
||||
"bounce_subtype:onaccountsuppressionlist",
|
||||
"user_match:found",
|
||||
"relay_action:hard_bounce",
|
||||
],
|
||||
)
|
||||
|
||||
def test_sns_message_with_soft_bounce(self) -> None:
|
||||
pre_request_datetime = datetime.now(UTC)
|
||||
|
||||
with self.assertLogs(INFO_LOG) as logs, MetricsMock() as mm:
|
||||
_sns_notification(BOUNCE_SNS_BODIES["soft"])
|
||||
|
||||
self.user.refresh_from_db()
|
||||
assert self.user.profile.last_soft_bounce is not None
|
||||
assert self.user.profile.last_soft_bounce >= pre_request_datetime
|
||||
|
||||
assert len(logs.records) == 1
|
||||
log_data = log_extra(logs.records[0])
|
||||
assert (diagnostic := log_data["bounce_diagnostic"])
|
||||
assert log_data == {
|
||||
"bounce_action": "failed",
|
||||
"bounce_diagnostic": diagnostic,
|
||||
"bounce_status": "5.1.1",
|
||||
"bounce_subtype": "SRETeamEatenByDinosaurs",
|
||||
"bounce_type": "Transient",
|
||||
"domain": "test.com",
|
||||
"relay_action": "soft_bounce",
|
||||
"user_match": "found",
|
||||
"fxa_id": self.sa.uid,
|
||||
}
|
||||
|
||||
mm.assert_incr_once(
|
||||
"fx.private.relay.email_bounce",
|
||||
tags=[
|
||||
"bounce_type:transient",
|
||||
"bounce_subtype:sreteameatenbydinosaurs",
|
||||
"user_match:found",
|
||||
"relay_action:soft_bounce",
|
||||
],
|
||||
)
|
||||
|
||||
def test_sns_message_with_spam_bounce_sets_auto_block_spam(self):
|
||||
_sns_notification(BOUNCE_SNS_BODIES["spam"])
|
||||
with self.assertLogs(INFO_LOG) as logs, MetricsMock() as mm:
|
||||
_sns_notification(BOUNCE_SNS_BODIES["spam"])
|
||||
self.user.refresh_from_db()
|
||||
assert self.user.profile.auto_block_spam
|
||||
|
||||
assert len(logs.records) == 1
|
||||
log_data = log_extra(logs.records[0])
|
||||
assert (diagnostic := log_data["bounce_diagnostic"])
|
||||
assert log_data == {
|
||||
"bounce_action": "failed",
|
||||
"bounce_diagnostic": diagnostic,
|
||||
"bounce_status": "5.1.1",
|
||||
"bounce_subtype": "StopRelayingSpamForThisUser",
|
||||
"bounce_type": "Transient",
|
||||
"domain": "test.com",
|
||||
"relay_action": "auto_block_spam",
|
||||
"user_match": "found",
|
||||
"fxa_id": self.sa.uid,
|
||||
}
|
||||
|
||||
mm.assert_incr_once(
|
||||
"fx.private.relay.email_bounce",
|
||||
tags=[
|
||||
"bounce_type:transient",
|
||||
"bounce_subtype:stoprelayingspamforthisuser",
|
||||
"user_match:found",
|
||||
"relay_action:auto_block_spam",
|
||||
],
|
||||
)
|
||||
|
||||
def test_sns_message_with_hard_bounce_and_optout(self) -> None:
|
||||
self.sa.extra_data["metricsEnabled"] = False
|
||||
self.sa.save()
|
||||
|
||||
with self.assertLogs(INFO_LOG) as logs:
|
||||
_sns_notification(BOUNCE_SNS_BODIES["hard"])
|
||||
|
||||
log_data = log_extra(logs.records[0])
|
||||
assert log_data["user_match"] == "found"
|
||||
assert not log_data["fxa_id"]
|
||||
|
||||
|
||||
@override_settings(STATSD_ENABLED=True)
|
||||
class ComplaintHandlingTest(TestCase):
|
||||
"""Test Complaint notifications and events."""
|
||||
|
||||
def setUp(self):
|
||||
self.user = baker.make(User, email="relayuser@test.com")
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def use_caplog(self, caplog):
|
||||
self.caplog = caplog
|
||||
|
||||
@override_settings(STATSD_ENABLED=True)
|
||||
def test_notification_type_complaint(self):
|
||||
"""
|
||||
A notificationType of complaint increments a counter, logs details, and
|
||||
returns 200.
|
||||
|
||||
Example derived from:
|
||||
https://docs.aws.amazon.com/ses/latest/dg/notification-contents.html#complaint-object
|
||||
"""
|
||||
assert self.user.profile.auto_block_spam is False
|
||||
|
||||
self.sa: SocialAccount = baker.make(
|
||||
SocialAccount, user=self.user, provider="fxa", uid=str(uuid4())
|
||||
)
|
||||
complaint = {
|
||||
"notificationType": "Complaint",
|
||||
"complaint": {
|
||||
|
@ -965,9 +1045,20 @@ class ComplaintHandlingTest(TestCase):
|
|||
),
|
||||
},
|
||||
}
|
||||
json_body = {"Message": json.dumps(complaint)}
|
||||
with MetricsMock() as mm:
|
||||
response = _sns_notification(json_body)
|
||||
self.complaint_body = {"Message": json.dumps(complaint)}
|
||||
|
||||
def test_notification_type_complaint(self):
|
||||
"""
|
||||
A notificationType of complaint increments a counter, logs details, and
|
||||
returns 200.
|
||||
|
||||
Example derived from:
|
||||
https://docs.aws.amazon.com/ses/latest/dg/notification-contents.html#complaint-object
|
||||
"""
|
||||
assert self.user.profile.auto_block_spam is False
|
||||
|
||||
with self.assertLogs(INFO_LOG) as logs, MetricsMock() as mm:
|
||||
response = _sns_notification(self.complaint_body)
|
||||
assert response.status_code == 200
|
||||
|
||||
self.user.profile.refresh_from_db()
|
||||
|
@ -982,20 +1073,30 @@ class ComplaintHandlingTest(TestCase):
|
|||
"relay_action:auto_block_spam",
|
||||
],
|
||||
)
|
||||
assert len(self.caplog.records) == 2
|
||||
record1, record2 = self.caplog.records
|
||||
assert record1.msg == "complaint_notification"
|
||||
assert record1.complaint_subtype is None
|
||||
assert record1.complaint_user_agent == "ExampleCorp Feedback Loop (V0.01)"
|
||||
assert record1.complaint_feedback == "abuse"
|
||||
assert record1.user_match == "found"
|
||||
assert record1.relay_action == "auto_block_spam"
|
||||
assert record1.domain == "test.com"
|
||||
assert len(logs.records) == 1
|
||||
record = logs.records[0]
|
||||
assert record.msg == "complaint_notification"
|
||||
log_data = log_extra(record)
|
||||
assert log_data == {
|
||||
"complaint_feedback": "abuse",
|
||||
"complaint_subtype": None,
|
||||
"complaint_user_agent": "ExampleCorp Feedback Loop (V0.01)",
|
||||
"domain": "test.com",
|
||||
"relay_action": "auto_block_spam",
|
||||
"user_match": "found",
|
||||
"fxa_id": self.sa.uid,
|
||||
}
|
||||
|
||||
assert record2.msg == "complaint_received"
|
||||
assert record2.recipient_domains == ["test.com"]
|
||||
assert record2.subtype is None
|
||||
assert record2.feedback == "abuse"
|
||||
def test_complaint_log_with_optout(self) -> None:
|
||||
self.sa.extra_data["metricsEnabled"] = False
|
||||
self.sa.save()
|
||||
|
||||
with self.assertLogs(INFO_LOG) as logs:
|
||||
_sns_notification(self.complaint_body)
|
||||
|
||||
log_data = log_extra(logs.records[0])
|
||||
assert log_data["user_match"] == "found"
|
||||
assert not log_data["fxa_id"]
|
||||
|
||||
|
||||
class SNSNotificationRemoveEmailsInS3Test(TestCase):
|
||||
|
@ -1261,7 +1362,6 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
) -> dict[str, Any]:
|
||||
extra_items = {
|
||||
"n_random_masks": "1",
|
||||
"mask_id": self.address.metrics_id,
|
||||
"is_random_mask": "true",
|
||||
"is_reply": "true" if is_reply else "false",
|
||||
}
|
||||
|
@ -1301,7 +1401,7 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
mm.assert_incr_once("fx.private.relay.email_auto_suppressed_for_spam")
|
||||
|
||||
def test_user_bounce_soft_paused_email_in_s3_deleted(self) -> None:
|
||||
self.profile.last_soft_bounce = datetime.now(timezone.utc)
|
||||
self.profile.last_soft_bounce = datetime.now(UTC)
|
||||
self.profile.save()
|
||||
|
||||
with self.assertLogs(INFO_LOG) as caplog, MetricsMock() as mm:
|
||||
|
@ -1313,7 +1413,7 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
mm.assert_incr_once("fx.private.relay.email_suppressed_for_soft_bounce")
|
||||
|
||||
def test_user_bounce_hard_paused_email_in_s3_deleted(self) -> None:
|
||||
self.profile.last_hard_bounce = datetime.now(timezone.utc)
|
||||
self.profile.last_hard_bounce = datetime.now(UTC)
|
||||
self.profile.save()
|
||||
|
||||
with self.assertLogs(INFO_LOG) as caplog, MetricsMock() as mm:
|
||||
|
@ -1343,8 +1443,8 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
|
||||
def test_flagged_user_email_in_s3_deleted(self) -> None:
|
||||
profile = self.address.user.profile
|
||||
profile.last_account_flagged = datetime.now(timezone.utc)
|
||||
profile.last_engagement = datetime.now(timezone.utc)
|
||||
profile.last_account_flagged = datetime.now(UTC)
|
||||
profile.last_engagement = datetime.now(UTC)
|
||||
profile.save()
|
||||
pre_flagged_last_engagement = profile.last_engagement
|
||||
|
||||
|
@ -1361,7 +1461,7 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
self.address.enabled = False
|
||||
self.address.save()
|
||||
profile = self.address.user.profile
|
||||
profile.last_engagement = datetime.now(timezone.utc)
|
||||
profile.last_engagement = datetime.now(UTC)
|
||||
profile.save()
|
||||
pre_blocked_email_last_engagement = profile.last_engagement
|
||||
|
||||
|
@ -1386,7 +1486,7 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
self.address.block_list_emails = True
|
||||
self.address.save()
|
||||
profile = self.address.user.profile
|
||||
profile.last_engagement = datetime.now(timezone.utc)
|
||||
profile.last_engagement = datetime.now(UTC)
|
||||
profile.save()
|
||||
pre_blocked_email_last_engagement = profile.last_engagement
|
||||
mocked_email_is_from_list.return_value = True
|
||||
|
@ -1412,9 +1512,10 @@ class SNSNotificationValidUserEmailsInS3Test(TestCase):
|
|||
{"Error": {"Code": "SomeErrorCode", "Message": "Details"}}, ""
|
||||
)
|
||||
|
||||
with self.assertLogs(INFO_LOG) as info_caplog, self.assertLogs(
|
||||
ERROR_LOG, "ERROR"
|
||||
) as error_caplog:
|
||||
with (
|
||||
self.assertLogs(INFO_LOG) as info_caplog,
|
||||
self.assertLogs(ERROR_LOG, "ERROR") as error_caplog,
|
||||
):
|
||||
response = _sns_notification(EMAIL_SNS_BODIES["s3_stored"])
|
||||
self.mock_remove_message_from_s3.assert_not_called()
|
||||
assert response.status_code == 503
|
||||
|
@ -1519,9 +1620,10 @@ class SnsMessageTest(TestCase):
|
|||
operation_name="S3.something",
|
||||
error_response={"Error": {"Code": "NoSuchKey", "Message": "the message"}},
|
||||
)
|
||||
with self.assertLogs(INFO_LOG) as info_caplog, self.assertLogs(
|
||||
ERROR_LOG, "ERROR"
|
||||
) as error_caplog:
|
||||
with (
|
||||
self.assertLogs(INFO_LOG) as info_caplog,
|
||||
self.assertLogs(ERROR_LOG, "ERROR") as error_caplog,
|
||||
):
|
||||
response = _sns_message(self.message_json)
|
||||
self.mock_ses_client.send_raw_email.assert_not_called()
|
||||
assert response.status_code == 404
|
||||
|
@ -1535,9 +1637,10 @@ class SnsMessageTest(TestCase):
|
|||
|
||||
def test_ses_send_raw_email_has_client_error_early_exits(self) -> None:
|
||||
self.mock_ses_client.send_raw_email.side_effect = SEND_RAW_EMAIL_FAILED
|
||||
with self.assertLogs(INFO_LOG) as info_caplog, self.assertLogs(
|
||||
ERROR_LOG, "ERROR"
|
||||
) as error_caplog:
|
||||
with (
|
||||
self.assertLogs(INFO_LOG) as info_caplog,
|
||||
self.assertLogs(ERROR_LOG, "ERROR") as error_caplog,
|
||||
):
|
||||
response = _sns_message(self.message_json)
|
||||
self.mock_ses_client.send_raw_email.assert_called_once()
|
||||
assert response.status_code == 503
|
||||
|
@ -1624,17 +1727,21 @@ class GetAddressTest(TestCase):
|
|||
assert _get_address("domain@subdomain.Test.Com") == self.domain_address
|
||||
|
||||
def test_subdomain_for_wrong_domain_raises(self) -> None:
|
||||
with pytest.raises(
|
||||
ObjectDoesNotExist
|
||||
) as exc_info, MetricsMock() as mm, self.assertNoLogs(GLEAN_LOG, "INFO"):
|
||||
with (
|
||||
pytest.raises(ObjectDoesNotExist) as exc_info,
|
||||
MetricsMock() as mm,
|
||||
self.assertNoLogs(GLEAN_LOG, "INFO"),
|
||||
):
|
||||
_get_address("unknown@subdomain.example.com")
|
||||
assert str(exc_info.value) == "Address does not exist"
|
||||
mm.assert_incr_once("fx.private.relay.email_for_not_supported_domain")
|
||||
|
||||
def test_unknown_subdomain_raises(self) -> None:
|
||||
with pytest.raises(
|
||||
Profile.DoesNotExist
|
||||
), MetricsMock() as mm, self.assertNoLogs(GLEAN_LOG, "INFO"):
|
||||
with (
|
||||
pytest.raises(Profile.DoesNotExist),
|
||||
MetricsMock() as mm,
|
||||
self.assertNoLogs(GLEAN_LOG, "INFO"),
|
||||
):
|
||||
_get_address("domain@unknown.test.com")
|
||||
mm.assert_incr_once("fx.private.relay.email_for_dne_subdomain")
|
||||
|
||||
|
@ -1661,7 +1768,6 @@ class GetAddressTest(TestCase):
|
|||
extra_items={
|
||||
"n_random_masks": "1",
|
||||
"n_domain_masks": "2",
|
||||
"mask_id": address.metrics_id,
|
||||
"is_random_mask": "false",
|
||||
"has_website": "false",
|
||||
"created_by_api": "false",
|
||||
|
@ -1694,7 +1800,6 @@ class GetAddressTest(TestCase):
|
|||
extra_items={
|
||||
"n_random_masks": "1",
|
||||
"n_domain_masks": "2",
|
||||
"mask_id": address.metrics_id,
|
||||
"is_random_mask": "false",
|
||||
"has_website": "false",
|
||||
"created_by_api": "false",
|
||||
|
|
|
@ -3,7 +3,6 @@ from django.urls import path
|
|||
|
||||
from . import views
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path("sns-inbound", views.sns_inbound),
|
||||
]
|
||||
|
|
|
@ -1,34 +1,35 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
from collections.abc import Callable
|
||||
from email.errors import InvalidHeaderDefect
|
||||
from email.headerregistry import Address, AddressHeader
|
||||
from email.message import EmailMessage
|
||||
from email.utils import formataddr, parseaddr
|
||||
from functools import cache
|
||||
from typing import cast, Any, Callable, TypeVar
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.text import Truncator
|
||||
import requests
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDFExpand
|
||||
from mypy_boto3_ses.type_defs import ContentTypeDef, SendRawEmailResponseTypeDef
|
||||
import jwcrypto.jwe
|
||||
import jwcrypto.jwk
|
||||
import markus
|
||||
import logging
|
||||
from typing import Any, Literal, TypeVar, cast
|
||||
from urllib.parse import quote_plus, urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.template.defaultfilters import linebreaksbr, urlize
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.text import Truncator
|
||||
|
||||
import jwcrypto.jwe
|
||||
import jwcrypto.jwk
|
||||
import markus
|
||||
import requests
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from botocore.exceptions import ClientError
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDFExpand
|
||||
from mypy_boto3_ses.type_defs import ContentTypeDef, SendRawEmailResponseTypeDef
|
||||
|
||||
from privaterelay.plans import get_bundle_country_language_mapping
|
||||
from privaterelay.utils import get_countries_info_from_lang_and_mapping
|
||||
|
@ -52,7 +53,9 @@ def ses_message_props(data: str) -> ContentTypeDef:
|
|||
return {"Charset": "UTF-8", "Data": data}
|
||||
|
||||
|
||||
def get_domains_from_settings():
|
||||
def get_domains_from_settings() -> (
|
||||
dict[Literal["RELAY_FIREFOX_DOMAIN", "MOZMAIL_DOMAIN"], str]
|
||||
):
|
||||
# HACK: detect if code is running in django tests
|
||||
if "testserver" in settings.ALLOWED_HOSTS:
|
||||
return {"RELAY_FIREFOX_DOMAIN": "default.com", "MOZMAIL_DOMAIN": "test.com"}
|
||||
|
@ -72,7 +75,7 @@ def get_trackers(level):
|
|||
trackers = []
|
||||
file_name = f"{tracker_list_name}.json"
|
||||
try:
|
||||
with open(TRACKER_FOLDER_PATH / file_name, "r") as f:
|
||||
with open(TRACKER_FOLDER_PATH / file_name) as f:
|
||||
trackers = json.load(f)
|
||||
except FileNotFoundError:
|
||||
trackers = download_trackers(shavar_prod_lists_url, category)
|
||||
|
|
105
emails/views.py
105
emails/views.py
|
@ -1,41 +1,45 @@
|
|||
import html
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import shlex
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from email import message_from_bytes
|
||||
from email.iterators import _structure
|
||||
from email.message import EmailMessage
|
||||
from email.utils import parseaddr
|
||||
import html
|
||||
from io import StringIO
|
||||
import json
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
import re
|
||||
import shlex
|
||||
from textwrap import dedent
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from codetiming import Timer
|
||||
from decouple import strtobool
|
||||
from django.shortcuts import render
|
||||
from sentry_sdk import capture_message
|
||||
from markus.utils import generate_tag
|
||||
from waffle import sample_is_active
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from django.db.models import prefetch_related_objects
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import escape
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
from privaterelay.utils import get_subplat_upgrade_link_by_language, glean_logger
|
||||
from botocore.exceptions import ClientError
|
||||
from codetiming import Timer
|
||||
from decouple import strtobool
|
||||
from markus.utils import generate_tag
|
||||
from sentry_sdk import capture_message
|
||||
from waffle import sample_is_active
|
||||
|
||||
from privaterelay.ftl_bundles import main as ftl_bundle
|
||||
from privaterelay.utils import (
|
||||
flag_is_active_in_task,
|
||||
get_subplat_upgrade_link_by_language,
|
||||
glean_logger,
|
||||
)
|
||||
|
||||
from .models import (
|
||||
CannotMakeAddressException,
|
||||
|
@ -48,14 +52,16 @@ from .models import (
|
|||
get_domain_numerical,
|
||||
)
|
||||
from .policy import relay_policy
|
||||
from .sns import SUPPORTED_SNS_TYPES, verify_from_sns
|
||||
from .types import (
|
||||
AWS_MailJSON,
|
||||
AWS_SNSMessageJSON,
|
||||
OutgoingHeaders,
|
||||
EmailForwardingIssues,
|
||||
EmailHeaderIssues,
|
||||
OutgoingHeaders,
|
||||
)
|
||||
from .utils import (
|
||||
InvalidFromHeader,
|
||||
_get_bucket_and_key_from_s3_json,
|
||||
b64_lookup_key,
|
||||
count_all_trackers,
|
||||
|
@ -69,17 +75,12 @@ from .utils import (
|
|||
get_reply_to_address,
|
||||
histogram_if_enabled,
|
||||
incr_if_enabled,
|
||||
parse_email_header,
|
||||
remove_message_from_s3,
|
||||
remove_trackers,
|
||||
ses_send_raw_email,
|
||||
urlize_and_linebreaks,
|
||||
InvalidFromHeader,
|
||||
parse_email_header,
|
||||
)
|
||||
from .sns import verify_from_sns, SUPPORTED_SNS_TYPES
|
||||
|
||||
from privaterelay.ftl_bundles import main as ftl_bundle
|
||||
from privaterelay.utils import flag_is_active_in_task
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
info_logger = logging.getLogger("eventsinfo")
|
||||
|
@ -675,7 +676,7 @@ def _handle_received(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
address.num_blocked += 1
|
||||
address.save(update_fields=["num_blocked"])
|
||||
_record_receipt_verdicts(receipt, "disabled_alias")
|
||||
user_profile.last_engagement = datetime.now(timezone.utc)
|
||||
user_profile.last_engagement = datetime.now(UTC)
|
||||
user_profile.save()
|
||||
glean_logger().log_email_blocked(mask=address, reason="block_all")
|
||||
return HttpResponse("Address is temporarily disabled.")
|
||||
|
@ -693,7 +694,7 @@ def _handle_received(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
incr_if_enabled("list_email_for_address_blocking_lists", 1)
|
||||
address.num_blocked += 1
|
||||
address.save(update_fields=["num_blocked"])
|
||||
user_profile.last_engagement = datetime.now(timezone.utc)
|
||||
user_profile.last_engagement = datetime.now(UTC)
|
||||
user_profile.save()
|
||||
glean_logger().log_email_blocked(mask=address, reason="block_promotional")
|
||||
return HttpResponse("Address is not accepting list emails.")
|
||||
|
@ -792,10 +793,10 @@ def _handle_received(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
user_profile.update_abuse_metric(
|
||||
email_forwarded=True, forwarded_email_size=len(incoming_email_bytes)
|
||||
)
|
||||
user_profile.last_engagement = datetime.now(timezone.utc)
|
||||
user_profile.last_engagement = datetime.now(UTC)
|
||||
user_profile.save()
|
||||
address.num_forwarded += 1
|
||||
address.last_used_at = datetime.now(timezone.utc)
|
||||
address.last_used_at = datetime.now(UTC)
|
||||
if level_one_trackers_removed:
|
||||
address.num_level_one_trackers_blocked = (
|
||||
address.num_level_one_trackers_blocked or 0
|
||||
|
@ -1007,7 +1008,7 @@ def _replace_headers(
|
|||
"""
|
||||
# Look for headers to drop
|
||||
to_drop: list[str] = []
|
||||
replacements: set[str] = set(_k.lower() for _k in headers.keys())
|
||||
replacements: set[str] = {_k.lower() for _k in headers.keys()}
|
||||
issues: EmailHeaderIssues = defaultdict(list)
|
||||
|
||||
# Detect non-compliant headers in incoming emails
|
||||
|
@ -1084,7 +1085,7 @@ def _convert_html_content(
|
|||
now: datetime | None = None,
|
||||
) -> tuple[str, int]:
|
||||
# frontend expects a timestamp in milliseconds
|
||||
now = now or datetime.now(timezone.utc)
|
||||
now = now or datetime.now(UTC)
|
||||
datetime_now_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# scramble alias so that clients don't recognize it
|
||||
|
@ -1319,7 +1320,7 @@ def _handle_reply(
|
|||
reply_record.increment_num_replied()
|
||||
profile = address.user.profile
|
||||
profile.update_abuse_metric(replied=True)
|
||||
profile.last_engagement = datetime.now(timezone.utc)
|
||||
profile.last_engagement = datetime.now(UTC)
|
||||
profile.save()
|
||||
glean_logger().log_email_forwarded(mask=address, is_reply=True)
|
||||
return HttpResponse("Sent email to final recipient.", status=200)
|
||||
|
@ -1363,7 +1364,7 @@ def _get_domain_address(local_portion: str, domain_portion: str) -> DomainAddres
|
|||
mask=domain_address,
|
||||
created_by_api=False,
|
||||
)
|
||||
domain_address.last_used_at = datetime.now(timezone.utc)
|
||||
domain_address.last_used_at = datetime.now(UTC)
|
||||
domain_address.save()
|
||||
return domain_address
|
||||
except Profile.DoesNotExist as e:
|
||||
|
@ -1439,16 +1440,14 @@ def _handle_bounce(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
* bounce_diagnostic: 'diagnosticCode' from bounced recipient data, or None
|
||||
* bounce_extra: Extra data from bounce_recipient data, if any
|
||||
* domain: User's real email address domain, if an address was given
|
||||
|
||||
Emits a legacy log "bounced recipient domain: {domain}", with data from
|
||||
bounced recipient data, without the email address.
|
||||
* fxa_id - The Mozilla account (previously known as Firefox Account) ID of the user
|
||||
"""
|
||||
bounce = message_json.get("bounce", {})
|
||||
bounce_type = bounce.get("bounceType", "none")
|
||||
bounce_subtype = bounce.get("bounceSubType", "none")
|
||||
bounced_recipients = bounce.get("bouncedRecipients", [])
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
bounce_data = []
|
||||
for recipient in bounced_recipients:
|
||||
recipient_address = recipient.pop("emailAddress", None)
|
||||
|
@ -1476,6 +1475,10 @@ def _handle_bounce(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
user = User.objects.get(email=recipient_address)
|
||||
profile = user.profile
|
||||
data["user_match"] = "found"
|
||||
if (fxa := profile.fxa) and profile.metrics_enabled:
|
||||
data["fxa_id"] = fxa.uid
|
||||
else:
|
||||
data["fxa_id"] = ""
|
||||
except User.DoesNotExist:
|
||||
# TODO: handle bounce for a user who no longer exists
|
||||
# add to SES account-wide suppression list?
|
||||
|
@ -1518,19 +1521,6 @@ def _handle_bounce(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
)
|
||||
info_logger.info("bounce_notification", extra=data)
|
||||
|
||||
# Legacy log, can be removed Q4 2023
|
||||
recipient_domain = data.get("domain")
|
||||
if recipient_domain:
|
||||
legacy_extra = {
|
||||
"action": data.get("bounce_action"),
|
||||
"status": data.get("bounce_status"),
|
||||
"diagnosticCode": data.get("bounce_diagnostic"),
|
||||
}
|
||||
legacy_extra.update(data.get("bounce_extra", {}))
|
||||
info_logger.info(
|
||||
f"bounced recipient domain: {recipient_domain}", extra=legacy_extra
|
||||
)
|
||||
|
||||
if any(data["user_match"] == "missing" for data in bounce_data):
|
||||
return HttpResponse("Address does not exist", status=404)
|
||||
return HttpResponse("OK", status=200)
|
||||
|
@ -1557,11 +1547,7 @@ def _handle_complaint(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
* complaint_user_agent - identifies the client used to file the complaint
|
||||
* complaint_extra - Extra data from complainedRecipients data, if any
|
||||
* domain - User's domain, if an address was given
|
||||
|
||||
Emits a legacy log "complaint_received", with data:
|
||||
* recipient_domains: list of extracted user domains
|
||||
* subtype: 'onaccounsuppressionlist', or 'none'
|
||||
* feedback: feedback from ISP or 'none'
|
||||
* fxa_id - The Mozilla account (previously known as Firefox Account) ID of the user
|
||||
"""
|
||||
complaint = deepcopy(message_json.get("complaint", {}))
|
||||
complained_recipients = complaint.pop("complainedRecipients", [])
|
||||
|
@ -1594,6 +1580,10 @@ def _handle_complaint(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
user = User.objects.get(email=recipient_address)
|
||||
profile = user.profile
|
||||
data["user_match"] = "found"
|
||||
if (fxa := profile.fxa) and profile.metrics_enabled:
|
||||
data["fxa_id"] = fxa.uid
|
||||
else:
|
||||
data["fxa_id"] = ""
|
||||
except User.DoesNotExist:
|
||||
data["user_match"] = "missing"
|
||||
continue
|
||||
|
@ -1620,17 +1610,6 @@ def _handle_complaint(message_json: AWS_SNSMessageJSON) -> HttpResponse:
|
|||
)
|
||||
info_logger.info("complaint_notification", extra=data)
|
||||
|
||||
# Legacy log, can be removed Q4 2023
|
||||
domains = [data["domain"] for data in complaint_data if "domain" in data]
|
||||
info_logger.info(
|
||||
"complaint_received",
|
||||
extra={
|
||||
"recipient_domains": sorted(domains),
|
||||
"subtype": subtype,
|
||||
"feedback": feedback,
|
||||
},
|
||||
)
|
||||
|
||||
if any(data["user_match"] == "missing" for data in complaint_data):
|
||||
return HttpResponse("Address does not exist", status=404)
|
||||
return HttpResponse("OK", status=200)
|
||||
|
|
|
@ -21,16 +21,16 @@
|
|||
"@fluent/langneg": "^0.7.0",
|
||||
"@fluent/react": "^0.15.2",
|
||||
"@mozilla-protocol/core": "^17.0.1",
|
||||
"@stripe/stripe-js": "^3.1.0",
|
||||
"@stripe/stripe-js": "^3.3.0",
|
||||
"chokidar-cli": "^3.0.0",
|
||||
"cldr-localenames-modern": "^44.1.0",
|
||||
"msw": "^2.2.13",
|
||||
"cldr-localenames-modern": "^45.0.0",
|
||||
"msw": "^2.2.14",
|
||||
"react": "18.2.0",
|
||||
"react-aria": "^3.32.1",
|
||||
"react-confetti": "^6.1.0",
|
||||
"react-dom": "18.2.0",
|
||||
"react-ga": "^3.3.1",
|
||||
"react-intersection-observer": "^9.8.1",
|
||||
"react-intersection-observer": "^9.8.2",
|
||||
"react-qr-code": "^2.0.12",
|
||||
"react-singleton-hook": "^4.0.1",
|
||||
"react-stately": "^3.30.1",
|
||||
|
@ -38,21 +38,21 @@
|
|||
"swr": "^2.2.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/eslint-plugin-next": "^14.1.4",
|
||||
"@testing-library/dom": "^9.3.4",
|
||||
"@next/eslint-plugin-next": "^14.2.2",
|
||||
"@testing-library/dom": "^10.0.0",
|
||||
"@testing-library/jest-dom": "^6.4.2",
|
||||
"@testing-library/react": "^14.2.2",
|
||||
"@testing-library/react": "^15.0.2",
|
||||
"@testing-library/user-event": "^14.5.2",
|
||||
"@types/jest-axe": "^3.5.9",
|
||||
"@types/react": "^18.2.73",
|
||||
"@types/react": "^18.2.79",
|
||||
"@typescript-eslint/eslint-plugin": "^7.4.0",
|
||||
"@typescript-eslint/parser": "^7.4.0",
|
||||
"babel-jest": "^29.7.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "^14.1.4",
|
||||
"eslint-config-next": "^14.2.2",
|
||||
"eslint-plugin-jest-dom": "^5.2.0",
|
||||
"eslint-plugin-testing-library": "^6.2.0",
|
||||
"fast-check": "^3.17.1",
|
||||
"fast-check": "^3.17.2",
|
||||
"husky": "^9.0.11",
|
||||
"jest": "^29.7.0",
|
||||
"jest-axe": "^8.0.0",
|
||||
|
@ -60,14 +60,14 @@
|
|||
"jest-junit": "^16.0.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"lint-staged": "^15.2.2",
|
||||
"next": "^14.1.4",
|
||||
"next": "^14.2.2",
|
||||
"prettier": "3.2.5",
|
||||
"react-test-renderer": "^18.2.0",
|
||||
"sass": "^1.72.0",
|
||||
"sass": "^1.75.0",
|
||||
"stylelint": "^16.3.1",
|
||||
"stylelint-config-recommended-scss": "^14.0.0",
|
||||
"stylelint-scss": "^6.2.1",
|
||||
"typescript": "^5.4.3"
|
||||
"typescript": "^5.4.5"
|
||||
},
|
||||
"volta": {
|
||||
"node": "18.18.2",
|
||||
|
|
|
@ -10,8 +10,8 @@ Changes:
|
|||
* Simplified interfaces of Csv and Choices to our usage
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Callable, Generic, Optional, TypeVar, Union, overload
|
||||
from collections.abc import Callable, Sequence
|
||||
from typing import Any, Generic, TypeVar, overload
|
||||
|
||||
# Unreleased as of 3.6 - accepts a bool
|
||||
# def strtobool(value: Union[str, bool]) -> bool: ...
|
||||
|
@ -25,7 +25,7 @@ def config(option: str) -> str: ...
|
|||
@overload
|
||||
def config(option: str, default: str) -> str: ...
|
||||
@overload
|
||||
def config(option: str, default: _DefaultType) -> Union[str, _DefaultType]: ...
|
||||
def config(option: str, default: _DefaultType) -> str | _DefaultType: ...
|
||||
@overload
|
||||
def config(
|
||||
option: str, default: _DefaultType, cast: Callable[[_DefaultType], _CastReturnType]
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
from typing import Literal, overload
|
||||
|
||||
from django.http import HttpRequest
|
||||
from waffle.models import Flag, Switch, Sample
|
||||
|
||||
from waffle.models import Flag, Sample, Switch
|
||||
|
||||
VERSION: tuple[int, ...]
|
||||
|
||||
|
|
|
@ -3,7 +3,9 @@
|
|||
# Can be removed once type hints ship in the release after v3.0.0
|
||||
|
||||
from typing import TypeVar
|
||||
|
||||
from django.db import models
|
||||
|
||||
from waffle.models import BaseModel
|
||||
|
||||
_BASE_T = TypeVar("_BASE_T", bound=BaseModel)
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.contrib.auth.models import AbstractBaseUser
|
|||
from django.db import models
|
||||
from django.http import HttpRequest
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from django_stubs_ext.db.models import TypedModelMeta
|
||||
from waffle import managers
|
||||
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
# https://github.com/django-waffle/django-waffle/blob/v3.0.0/waffle/testutils.py
|
||||
# Can be removed once type hints ship in the release after v3.0.0
|
||||
|
||||
from django.test.utils import TestContextDecorator
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from django.test.utils import TestContextDecorator
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_FlagActive = bool | None
|
||||
_SampleActive = bool | float | None
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
# Can be removed once type hints ship in the release after v3.0.0
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.core.cache.backends.base import BaseCache
|
||||
|
||||
def get_setting(name: str, default: Any | None = None) -> Any: ...
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
"frontend"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@playwright/test": "1.42.1",
|
||||
"@playwright/test": "1.43.1",
|
||||
"dotenv": "^16.4.5"
|
||||
}
|
||||
},
|
||||
|
@ -25,16 +25,16 @@
|
|||
"@fluent/langneg": "^0.7.0",
|
||||
"@fluent/react": "^0.15.2",
|
||||
"@mozilla-protocol/core": "^17.0.1",
|
||||
"@stripe/stripe-js": "^3.1.0",
|
||||
"@stripe/stripe-js": "^3.3.0",
|
||||
"chokidar-cli": "^3.0.0",
|
||||
"cldr-localenames-modern": "^44.1.0",
|
||||
"msw": "^2.2.13",
|
||||
"cldr-localenames-modern": "^45.0.0",
|
||||
"msw": "^2.2.14",
|
||||
"react": "18.2.0",
|
||||
"react-aria": "^3.32.1",
|
||||
"react-confetti": "^6.1.0",
|
||||
"react-dom": "18.2.0",
|
||||
"react-ga": "^3.3.1",
|
||||
"react-intersection-observer": "^9.8.1",
|
||||
"react-intersection-observer": "^9.8.2",
|
||||
"react-qr-code": "^2.0.12",
|
||||
"react-singleton-hook": "^4.0.1",
|
||||
"react-stately": "^3.30.1",
|
||||
|
@ -42,21 +42,21 @@
|
|||
"swr": "^2.2.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/eslint-plugin-next": "^14.1.4",
|
||||
"@testing-library/dom": "^9.3.4",
|
||||
"@next/eslint-plugin-next": "^14.2.2",
|
||||
"@testing-library/dom": "^10.0.0",
|
||||
"@testing-library/jest-dom": "^6.4.2",
|
||||
"@testing-library/react": "^14.2.2",
|
||||
"@testing-library/react": "^15.0.2",
|
||||
"@testing-library/user-event": "^14.5.2",
|
||||
"@types/jest-axe": "^3.5.9",
|
||||
"@types/react": "^18.2.73",
|
||||
"@types/react": "^18.2.79",
|
||||
"@typescript-eslint/eslint-plugin": "^7.4.0",
|
||||
"@typescript-eslint/parser": "^7.4.0",
|
||||
"babel-jest": "^29.7.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "^14.1.4",
|
||||
"eslint-config-next": "^14.2.2",
|
||||
"eslint-plugin-jest-dom": "^5.2.0",
|
||||
"eslint-plugin-testing-library": "^6.2.0",
|
||||
"fast-check": "^3.17.1",
|
||||
"fast-check": "^3.17.2",
|
||||
"husky": "^9.0.11",
|
||||
"jest": "^29.7.0",
|
||||
"jest-axe": "^8.0.0",
|
||||
|
@ -64,14 +64,14 @@
|
|||
"jest-junit": "^16.0.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"lint-staged": "^15.2.2",
|
||||
"next": "^14.1.4",
|
||||
"next": "^14.2.2",
|
||||
"prettier": "3.2.5",
|
||||
"react-test-renderer": "^18.2.0",
|
||||
"sass": "^1.72.0",
|
||||
"sass": "^1.75.0",
|
||||
"stylelint": "^16.3.1",
|
||||
"stylelint-config-recommended-scss": "^14.0.0",
|
||||
"stylelint-scss": "^6.2.1",
|
||||
"typescript": "^5.4.3"
|
||||
"typescript": "^5.4.5"
|
||||
}
|
||||
},
|
||||
"frontend/node_modules/@fluent/bundle": {
|
||||
|
@ -2013,15 +2013,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/env": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.4.tgz",
|
||||
"integrity": "sha512-e7X7bbn3Z6DWnDi75UWn+REgAbLEqxI8Tq2pkFOFAMpWAWApz/YCUhtWMWn410h8Q2fYiYL7Yg5OlxMOCfFjJQ==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.2.tgz",
|
||||
"integrity": "sha512-sk72qRfM1Q90XZWYRoJKu/UWlTgihrASiYw/scb15u+tyzcze3bOuJ/UV6TBOQEeUaxOkRqGeuGUdiiuxc5oqw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@next/eslint-plugin-next": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.4.tgz",
|
||||
"integrity": "sha512-n4zYNLSyCo0Ln5b7qxqQeQ34OZKXwgbdcx6kmkQbywr+0k6M3Vinft0T72R6CDAcDrne2IAgSud4uWCzFgc5HA==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.2.2.tgz",
|
||||
"integrity": "sha512-q+Ec2648JtBpKiu/FSJm8HAsFXlNvioHeBCbTP12T1SGcHYwhqHULSfQgFkPgHDu3kzNp2Kem4J54bK4rPQ5SQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"glob": "10.3.10"
|
||||
|
@ -2074,9 +2074,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-darwin-arm64": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.4.tgz",
|
||||
"integrity": "sha512-ubmUkbmW65nIAOmoxT1IROZdmmJMmdYvXIe8211send9ZYJu+SqxSnJM4TrPj9wmL6g9Atvj0S/2cFmMSS99jg==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.2.tgz",
|
||||
"integrity": "sha512-3iPgMhzbalizGwHNFUcGnDhFPSgVBHQ8aqSTAMxB5BvJG0oYrDf1WOJZlbXBgunOEj/8KMVbejEur/FpvFsgFQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
|
@ -2090,9 +2090,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-darwin-x64": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.4.tgz",
|
||||
"integrity": "sha512-b0Xo1ELj3u7IkZWAKcJPJEhBop117U78l70nfoQGo4xUSvv0PJSTaV4U9xQBLvZlnjsYkc8RwQN1HoH/oQmLlQ==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.2.tgz",
|
||||
"integrity": "sha512-x7Afi/jt0ZBRUZHTi49yyej4o8znfIMHO4RvThuoc0P+uli8Jd99y5GKjxoYunPKsXL09xBXEM1+OQy2xEL0Ag==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
|
@ -2106,9 +2106,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-arm64-gnu": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.4.tgz",
|
||||
"integrity": "sha512-457G0hcLrdYA/u1O2XkRMsDKId5VKe3uKPvrKVOyuARa6nXrdhJOOYU9hkKKyQTMru1B8qEP78IAhf/1XnVqKA==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.2.tgz",
|
||||
"integrity": "sha512-zbfPtkk7L41ODMJwSp5VbmPozPmMMQrzAc0HAUomVeVIIwlDGs/UCqLJvLNDt4jpWgc21SjjyIn762lNGrMaUA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
|
@ -2122,9 +2122,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-arm64-musl": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.4.tgz",
|
||||
"integrity": "sha512-l/kMG+z6MB+fKA9KdtyprkTQ1ihlJcBh66cf0HvqGP+rXBbOXX0dpJatjZbHeunvEHoBBS69GYQG5ry78JMy3g==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.2.tgz",
|
||||
"integrity": "sha512-wPbS3pI/JU16rm3XdLvvTmlsmm1nd+sBa2ohXgBZcShX4TgOjD4R+RqHKlI1cjo/jDZKXt6OxmcU0Iys0OC/yg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
|
@ -2138,9 +2138,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-x64-gnu": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.4.tgz",
|
||||
"integrity": "sha512-BapIFZ3ZRnvQ1uWbmqEGJuPT9cgLwvKtxhK/L2t4QYO7l+/DxXuIGjvp1x8rvfa/x1FFSsipERZK70pewbtJtw==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.2.tgz",
|
||||
"integrity": "sha512-NqWOHqqq8iC9tuHvZxjQ2tX+jWy2X9y8NX2mcB4sj2bIccuCxbIZrU/ThFPZZPauygajZuVQ6zediejQHwZHwQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
|
@ -2154,9 +2154,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-x64-musl": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.4.tgz",
|
||||
"integrity": "sha512-mqVxTwk4XuBl49qn2A5UmzFImoL1iLm0KQQwtdRJRKl21ylQwwGCxJtIYo2rbfkZHoSKlh/YgztY0qH3wG1xIg==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.2.tgz",
|
||||
"integrity": "sha512-lGepHhwb9sGhCcU7999+iK1ZZT+6rrIoVg40MP7DZski9GIZP80wORSbt5kJzh9v2x2ev2lxC6VgwMQT0PcgTA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
|
@ -2170,9 +2170,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-win32-arm64-msvc": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.4.tgz",
|
||||
"integrity": "sha512-xzxF4ErcumXjO2Pvg/wVGrtr9QQJLk3IyQX1ddAC/fi6/5jZCZ9xpuL9Tzc4KPWMFq8GGWFVDMshZOdHGdkvag==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.2.tgz",
|
||||
"integrity": "sha512-TZSh/48SfcLEQ4rD25VVn2kdIgUWmMflRX3OiyPwGNXn3NiyPqhqei/BaqCYXViIQ+6QsG9R0C8LftMqy8JPMA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
|
@ -2186,9 +2186,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-win32-ia32-msvc": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.4.tgz",
|
||||
"integrity": "sha512-WZiz8OdbkpRw6/IU/lredZWKKZopUMhcI2F+XiMAcPja0uZYdMTZQRoQ0WZcvinn9xZAidimE7tN9W5v9Yyfyw==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.2.tgz",
|
||||
"integrity": "sha512-M0tBVNMEBJN2ZNQWlcekMn6pvLria7Sa2Fai5znm7CCJz4pP3lrvlSxhKdkCerk0D9E0bqx5yAo3o2Q7RrD4gA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
|
@ -2202,9 +2202,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@next/swc-win32-x64-msvc": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.4.tgz",
|
||||
"integrity": "sha512-4Rto21sPfw555sZ/XNLqfxDUNeLhNYGO2dlPqsnuCg8N8a2a9u1ltqBOPQ4vj1Gf7eJC0W2hHG2eYUHuiXgY2w==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.2.tgz",
|
||||
"integrity": "sha512-a/20E/wtTJZ3Ykv3f/8F0l7TtgQa2LWHU2oNB9bsu0VjqGuGGHmm/q6waoUNQYTVPYrrlxxaHjJcDV6aiSTt/w==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
|
@ -2298,12 +2298,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.42.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz",
|
||||
"integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==",
|
||||
"version": "1.43.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.43.1.tgz",
|
||||
"integrity": "sha512-HgtQzFgNEEo4TE22K/X7sYTYNqEMMTZmFS8kTq6m8hXj+m1D8TgwgIbumHddJa9h4yl4GkKb8/bgAl2+g7eDgA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"playwright": "1.42.1"
|
||||
"playwright": "1.43.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
|
@ -3733,38 +3733,44 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@stripe/stripe-js": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-3.1.0.tgz",
|
||||
"integrity": "sha512-7+ciE35i8NZ6l4FiO1qFkBoZ64ul6B2ZhBVyygB+e/2EZa2WLUyjoxrP53SagnUW7+/q25nDyDLzQq5F0ebOEw==",
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-3.3.0.tgz",
|
||||
"integrity": "sha512-dUgAsko9KoYC1U2TIawHzbkQJzPoApxCc1Qf6/j318d1ArViyh6ROHVYTxnU3RlOQL/utUD9I4/QoyiCowsgrw==",
|
||||
"engines": {
|
||||
"node": ">=12.16"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/counter": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
|
||||
"integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ=="
|
||||
},
|
||||
"node_modules/@swc/helpers": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.2.tgz",
|
||||
"integrity": "sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==",
|
||||
"version": "0.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.5.tgz",
|
||||
"integrity": "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==",
|
||||
"dependencies": {
|
||||
"@swc/counter": "^0.1.3",
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@testing-library/dom": {
|
||||
"version": "9.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.4.tgz",
|
||||
"integrity": "sha512-FlS4ZWlp97iiNWig0Muq8p+3rVDjRiYE+YKGbAqXOu9nwJFFOdL00kFpz42M+4huzYi86vAK1sOOfyOG45muIQ==",
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.0.0.tgz",
|
||||
"integrity": "sha512-PmJPnogldqoVFf+EwbHvbBJ98MmqASV8kLrBYgsDNxQcFMeIS7JFL48sfyXvuMtgmWO/wMhh25odr+8VhDmn4g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.10.4",
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@types/aria-query": "^5.0.1",
|
||||
"aria-query": "5.1.3",
|
||||
"aria-query": "5.3.0",
|
||||
"chalk": "^4.1.0",
|
||||
"dom-accessibility-api": "^0.5.9",
|
||||
"lz-string": "^1.5.0",
|
||||
"pretty-format": "^27.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@testing-library/dom/node_modules/ansi-styles": {
|
||||
|
@ -3944,17 +3950,17 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@testing-library/react": {
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.2.2.tgz",
|
||||
"integrity": "sha512-SOUuM2ysCvjUWBXTNfQ/ztmnKDmqaiPV3SvoIuyxMUca45rbSWWAT/qB8CUs/JQ/ux/8JFs9DNdFQ3f6jH3crA==",
|
||||
"version": "15.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-15.0.2.tgz",
|
||||
"integrity": "sha512-5mzIpuytB1ctpyywvyaY2TAAUQVCZIGqwiqFQf6u9lvj/SJQepGUzNV18Xpk+NLCaCE2j7CWrZE0tEf9xLZYiQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@testing-library/dom": "^9.0.0",
|
||||
"@testing-library/dom": "^10.0.0",
|
||||
"@types/react-dom": "^18.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
"node": ">=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0",
|
||||
|
@ -4155,9 +4161,9 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/react": {
|
||||
"version": "18.2.73",
|
||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.73.tgz",
|
||||
"integrity": "sha512-XcGdod0Jjv84HOC7N5ziY3x+qL0AfmubvKOZ9hJjJ2yd5EE+KYjWhdOjt387e9HPheHkdggF9atTifMRtyAaRA==",
|
||||
"version": "18.2.79",
|
||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.79.tgz",
|
||||
"integrity": "sha512-RwGAGXPl9kSXwdNTafkOEuFrTBD5SA2B3iEB96xi8+xu5ddUa/cpvyVCSNn+asgLCTHkb5ZxN8gbuibYJi4s1w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/prop-types": "*",
|
||||
|
@ -5044,11 +5050,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/aria-query": {
|
||||
"version": "5.1.3",
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz",
|
||||
"integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"deep-equal": "^2.0.5"
|
||||
"dequal": "^2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/array-buffer-byte-length": {
|
||||
|
@ -5621,17 +5628,17 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/cldr-core": {
|
||||
"version": "44.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-core/-/cldr-core-44.1.0.tgz",
|
||||
"integrity": "sha512-ssbJaXu3pCkc4YqNC6xHhgleZG7YqnOFZ9laMlJfHOfnspoA9waI4AH54gKB3Fe/+wI4i3cVxKFdCTVGTRw+UA==",
|
||||
"version": "45.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-core/-/cldr-core-45.0.0.tgz",
|
||||
"integrity": "sha512-gQVxy3gzOQpXiTRGmlKiRQFLYimrr1RgvqGKZCS61JgmdkeNm7+LZGx+Lhw5/AW0t8WMM/uZhf4CMva6LuUobQ==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/cldr-localenames-modern": {
|
||||
"version": "44.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-localenames-modern/-/cldr-localenames-modern-44.1.0.tgz",
|
||||
"integrity": "sha512-PJzjKiknuCG3dPmk4fFvGbMLtAC/TbKidBsQrmMRypENrw2ExtbJPeUqenG5KIfrZdXVqn9DXMIG9B9W5UCbgg==",
|
||||
"version": "45.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-localenames-modern/-/cldr-localenames-modern-45.0.0.tgz",
|
||||
"integrity": "sha512-5r95nsq5LUNW2JfuZLygnHplFO2dNN66/XpD16bA1mZd/5IPIOjVf7GSin+4BF5FkvrEN+dqg6bWrIozabi4Qg==",
|
||||
"peerDependencies": {
|
||||
"cldr-core": "44.1.0"
|
||||
"cldr-core": "45.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cli-cursor": {
|
||||
|
@ -6163,6 +6170,15 @@
|
|||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dequal": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/detect-newline": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz",
|
||||
|
@ -6576,14 +6592,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/eslint-config-next": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.1.4.tgz",
|
||||
"integrity": "sha512-cihIahbhYAWwXJwZkAaRPpUi5t9aOi/HdfWXOjZeUOqNWXHD8X22kd1KG58Dc3MVaRx3HoR/oMGk2ltcrqDn8g==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.2.2.tgz",
|
||||
"integrity": "sha512-12/uFc0KX+wUs7EDpOUGKMXBXZJiBVGdK5/m/QgXOCg2mQ0bQWoKSWNrCeOg7Vum6Kw1d1TW453W6xh+GbHquw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@next/eslint-plugin-next": "14.1.4",
|
||||
"@next/eslint-plugin-next": "14.2.2",
|
||||
"@rushstack/eslint-patch": "^1.3.3",
|
||||
"@typescript-eslint/parser": "^5.4.2 || ^6.0.0",
|
||||
"@typescript-eslint/parser": "^5.4.2 || ^6.0.0 || 7.0.0 - 7.2.0",
|
||||
"eslint-import-resolver-node": "^0.3.6",
|
||||
"eslint-import-resolver-typescript": "^3.5.2",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
|
@ -6602,9 +6618,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/eslint-config-next/node_modules/@next/eslint-plugin-next": {
|
||||
"version": "14.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.3.tgz",
|
||||
"integrity": "sha512-VCnZI2cy77Yaj3L7Uhs3+44ikMM1VD/fBMwvTBb3hIaTIuqa+DmG4dhUDq+MASu3yx97KhgsVJbsas0XuiKyww==",
|
||||
"version": "14.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.2.1.tgz",
|
||||
"integrity": "sha512-Fp+mthEBjkn8r9qd6o4JgxKp0IDEzW0VYHD8ZC05xS5/lFNwHKuOdr2kVhWG7BQCO9L6eeepshM1Wbs2T+LgSg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"glob": "10.3.10"
|
||||
|
@ -7427,9 +7443,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/fast-check": {
|
||||
"version": "3.17.1",
|
||||
"resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.17.1.tgz",
|
||||
"integrity": "sha512-jIKXJVe6ZO0SpwEgVtEVujTf8TwjI9wMXFJCjsDHUB3RroUbXBgF4kOSz3A7MW0UR26aqsoB8i9O2mjtjERAiA==",
|
||||
"version": "3.17.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.17.2.tgz",
|
||||
"integrity": "sha512-+3DPTxtxABLgmmVpYxrash3DHoq0cMa1jjLYNp3qqokKKhqVEaS4lbnaDKqWU5Dd6C2pEudPPBAEEQ9nUou9OQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
|
@ -11667,9 +11683,9 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/msw": {
|
||||
"version": "2.2.13",
|
||||
"resolved": "https://registry.npmjs.org/msw/-/msw-2.2.13.tgz",
|
||||
"integrity": "sha512-ljFf1xZsU0b4zv1l7xzEmC6OZA6yD06hcx0H+dc8V0VypaP3HGYJa1rMLjQbBWl32ptGhcfwcPCWDB1wjmsftw==",
|
||||
"version": "2.2.14",
|
||||
"resolved": "https://registry.npmjs.org/msw/-/msw-2.2.14.tgz",
|
||||
"integrity": "sha512-64i8rNCa1xzDK8ZYsTrVMli05D687jty8+Th+PU5VTbJ2/4P7fkQFVyDQ6ZFT5FrNR8z2BHhbY47fKNvfHrumA==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@bundled-es-modules/cookie": "^2.0.0",
|
||||
|
@ -11879,13 +11895,13 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/next": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/next/-/next-14.1.4.tgz",
|
||||
"integrity": "sha512-1WTaXeSrUwlz/XcnhGTY7+8eiaFvdet5z9u3V2jb+Ek1vFo0VhHKSAIJvDWfQpttWjnyw14kBeq28TPq7bTeEQ==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/next/-/next-14.2.2.tgz",
|
||||
"integrity": "sha512-oGwUaa2bCs47FbuxWMpOoXtBMPYpvTPgdZr3UAo+pu7Ns00z9otmYpoeV1HEiYL06AlRQQIA/ypK526KjJfaxg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@next/env": "14.1.4",
|
||||
"@swc/helpers": "0.5.2",
|
||||
"@next/env": "14.2.2",
|
||||
"@swc/helpers": "0.5.5",
|
||||
"busboy": "1.6.0",
|
||||
"caniuse-lite": "^1.0.30001579",
|
||||
"graceful-fs": "^4.2.11",
|
||||
|
@ -11899,18 +11915,19 @@
|
|||
"node": ">=18.17.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@next/swc-darwin-arm64": "14.1.4",
|
||||
"@next/swc-darwin-x64": "14.1.4",
|
||||
"@next/swc-linux-arm64-gnu": "14.1.4",
|
||||
"@next/swc-linux-arm64-musl": "14.1.4",
|
||||
"@next/swc-linux-x64-gnu": "14.1.4",
|
||||
"@next/swc-linux-x64-musl": "14.1.4",
|
||||
"@next/swc-win32-arm64-msvc": "14.1.4",
|
||||
"@next/swc-win32-ia32-msvc": "14.1.4",
|
||||
"@next/swc-win32-x64-msvc": "14.1.4"
|
||||
"@next/swc-darwin-arm64": "14.2.2",
|
||||
"@next/swc-darwin-x64": "14.2.2",
|
||||
"@next/swc-linux-arm64-gnu": "14.2.2",
|
||||
"@next/swc-linux-arm64-musl": "14.2.2",
|
||||
"@next/swc-linux-x64-gnu": "14.2.2",
|
||||
"@next/swc-linux-x64-musl": "14.2.2",
|
||||
"@next/swc-win32-arm64-msvc": "14.2.2",
|
||||
"@next/swc-win32-ia32-msvc": "14.2.2",
|
||||
"@next/swc-win32-x64-msvc": "14.2.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@opentelemetry/api": "^1.1.0",
|
||||
"@playwright/test": "^1.41.2",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"sass": "^1.3.0"
|
||||
|
@ -11919,6 +11936,9 @@
|
|||
"@opentelemetry/api": {
|
||||
"optional": true
|
||||
},
|
||||
"@playwright/test": {
|
||||
"optional": true
|
||||
},
|
||||
"sass": {
|
||||
"optional": true
|
||||
}
|
||||
|
@ -12403,12 +12423,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.42.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz",
|
||||
"integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==",
|
||||
"version": "1.43.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.43.1.tgz",
|
||||
"integrity": "sha512-V7SoH0ai2kNt1Md9E3Gwas5B9m8KR2GVvwZnAI6Pg0m3sh7UvgiYhRrhsziCmqMJNouPckiOhk8T+9bSAK0VIA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"playwright-core": "1.42.1"
|
||||
"playwright-core": "1.43.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
|
@ -12421,9 +12441,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.42.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz",
|
||||
"integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==",
|
||||
"version": "1.43.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.43.1.tgz",
|
||||
"integrity": "sha512-EI36Mto2Vrx6VF7rm708qSnesVQKbxEWvPrfA1IPY6HgczBplDx7ENtx+K2n4kJ41sLLkuGfmb0ZLSSXlDhqPg==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
|
@ -12738,9 +12758,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/react-intersection-observer": {
|
||||
"version": "9.8.1",
|
||||
"resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.8.1.tgz",
|
||||
"integrity": "sha512-QzOFdROX8D8MH3wE3OVKH0f3mLjKTtEN1VX/rkNuECCff+aKky0pIjulDhr3Ewqj5el/L+MhBkM3ef0Tbt+qUQ==",
|
||||
"version": "9.8.2",
|
||||
"resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.8.2.tgz",
|
||||
"integrity": "sha512-901naEiiZmse3p+AmtbQ3NL9xx+gQ8TXLiGDc+8GiE3JKJkNV3vP737aGuWTAXBA+1QqxPrDDE+fIEgYpGDlrQ==",
|
||||
"peerDependencies": {
|
||||
"react": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0",
|
||||
"react-dom": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0"
|
||||
|
@ -13165,9 +13185,9 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/sass": {
|
||||
"version": "1.72.0",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.72.0.tgz",
|
||||
"integrity": "sha512-Gpczt3WA56Ly0Mn8Sl21Vj94s1axi9hDIzDFn9Ph9x3C3p4nNyvsqJoQyVXKou6cBlfFWEgRW4rT8Tb4i3XnVA==",
|
||||
"version": "1.75.0",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.75.0.tgz",
|
||||
"integrity": "sha512-ShMYi3WkrDWxExyxSZPst4/okE9ts46xZmJDSawJQrnte7M1V9fScVB+uNXOVKRBt0PggHOwoZcn8mYX4trnBw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"chokidar": ">=3.0.0 <4.0.0",
|
||||
|
@ -14569,9 +14589,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.4.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz",
|
||||
"integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==",
|
||||
"version": "5.4.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
|
||||
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
|
||||
"devOptional": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
|
@ -16594,15 +16614,15 @@
|
|||
}
|
||||
},
|
||||
"@next/env": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.4.tgz",
|
||||
"integrity": "sha512-e7X7bbn3Z6DWnDi75UWn+REgAbLEqxI8Tq2pkFOFAMpWAWApz/YCUhtWMWn410h8Q2fYiYL7Yg5OlxMOCfFjJQ==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.2.tgz",
|
||||
"integrity": "sha512-sk72qRfM1Q90XZWYRoJKu/UWlTgihrASiYw/scb15u+tyzcze3bOuJ/UV6TBOQEeUaxOkRqGeuGUdiiuxc5oqw==",
|
||||
"dev": true
|
||||
},
|
||||
"@next/eslint-plugin-next": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.4.tgz",
|
||||
"integrity": "sha512-n4zYNLSyCo0Ln5b7qxqQeQ34OZKXwgbdcx6kmkQbywr+0k6M3Vinft0T72R6CDAcDrne2IAgSud4uWCzFgc5HA==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.2.2.tgz",
|
||||
"integrity": "sha512-q+Ec2648JtBpKiu/FSJm8HAsFXlNvioHeBCbTP12T1SGcHYwhqHULSfQgFkPgHDu3kzNp2Kem4J54bK4rPQ5SQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"glob": "10.3.10"
|
||||
|
@ -16642,65 +16662,65 @@
|
|||
}
|
||||
},
|
||||
"@next/swc-darwin-arm64": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.4.tgz",
|
||||
"integrity": "sha512-ubmUkbmW65nIAOmoxT1IROZdmmJMmdYvXIe8211send9ZYJu+SqxSnJM4TrPj9wmL6g9Atvj0S/2cFmMSS99jg==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.2.tgz",
|
||||
"integrity": "sha512-3iPgMhzbalizGwHNFUcGnDhFPSgVBHQ8aqSTAMxB5BvJG0oYrDf1WOJZlbXBgunOEj/8KMVbejEur/FpvFsgFQ==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-darwin-x64": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.4.tgz",
|
||||
"integrity": "sha512-b0Xo1ELj3u7IkZWAKcJPJEhBop117U78l70nfoQGo4xUSvv0PJSTaV4U9xQBLvZlnjsYkc8RwQN1HoH/oQmLlQ==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.2.tgz",
|
||||
"integrity": "sha512-x7Afi/jt0ZBRUZHTi49yyej4o8znfIMHO4RvThuoc0P+uli8Jd99y5GKjxoYunPKsXL09xBXEM1+OQy2xEL0Ag==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-linux-arm64-gnu": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.4.tgz",
|
||||
"integrity": "sha512-457G0hcLrdYA/u1O2XkRMsDKId5VKe3uKPvrKVOyuARa6nXrdhJOOYU9hkKKyQTMru1B8qEP78IAhf/1XnVqKA==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.2.tgz",
|
||||
"integrity": "sha512-zbfPtkk7L41ODMJwSp5VbmPozPmMMQrzAc0HAUomVeVIIwlDGs/UCqLJvLNDt4jpWgc21SjjyIn762lNGrMaUA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-linux-arm64-musl": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.4.tgz",
|
||||
"integrity": "sha512-l/kMG+z6MB+fKA9KdtyprkTQ1ihlJcBh66cf0HvqGP+rXBbOXX0dpJatjZbHeunvEHoBBS69GYQG5ry78JMy3g==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.2.tgz",
|
||||
"integrity": "sha512-wPbS3pI/JU16rm3XdLvvTmlsmm1nd+sBa2ohXgBZcShX4TgOjD4R+RqHKlI1cjo/jDZKXt6OxmcU0Iys0OC/yg==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-linux-x64-gnu": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.4.tgz",
|
||||
"integrity": "sha512-BapIFZ3ZRnvQ1uWbmqEGJuPT9cgLwvKtxhK/L2t4QYO7l+/DxXuIGjvp1x8rvfa/x1FFSsipERZK70pewbtJtw==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.2.tgz",
|
||||
"integrity": "sha512-NqWOHqqq8iC9tuHvZxjQ2tX+jWy2X9y8NX2mcB4sj2bIccuCxbIZrU/ThFPZZPauygajZuVQ6zediejQHwZHwQ==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-linux-x64-musl": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.4.tgz",
|
||||
"integrity": "sha512-mqVxTwk4XuBl49qn2A5UmzFImoL1iLm0KQQwtdRJRKl21ylQwwGCxJtIYo2rbfkZHoSKlh/YgztY0qH3wG1xIg==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.2.tgz",
|
||||
"integrity": "sha512-lGepHhwb9sGhCcU7999+iK1ZZT+6rrIoVg40MP7DZski9GIZP80wORSbt5kJzh9v2x2ev2lxC6VgwMQT0PcgTA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-win32-arm64-msvc": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.4.tgz",
|
||||
"integrity": "sha512-xzxF4ErcumXjO2Pvg/wVGrtr9QQJLk3IyQX1ddAC/fi6/5jZCZ9xpuL9Tzc4KPWMFq8GGWFVDMshZOdHGdkvag==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.2.tgz",
|
||||
"integrity": "sha512-TZSh/48SfcLEQ4rD25VVn2kdIgUWmMflRX3OiyPwGNXn3NiyPqhqei/BaqCYXViIQ+6QsG9R0C8LftMqy8JPMA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-win32-ia32-msvc": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.4.tgz",
|
||||
"integrity": "sha512-WZiz8OdbkpRw6/IU/lredZWKKZopUMhcI2F+XiMAcPja0uZYdMTZQRoQ0WZcvinn9xZAidimE7tN9W5v9Yyfyw==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.2.tgz",
|
||||
"integrity": "sha512-M0tBVNMEBJN2ZNQWlcekMn6pvLria7Sa2Fai5znm7CCJz4pP3lrvlSxhKdkCerk0D9E0bqx5yAo3o2Q7RrD4gA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"@next/swc-win32-x64-msvc": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.4.tgz",
|
||||
"integrity": "sha512-4Rto21sPfw555sZ/XNLqfxDUNeLhNYGO2dlPqsnuCg8N8a2a9u1ltqBOPQ4vj1Gf7eJC0W2hHG2eYUHuiXgY2w==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.2.tgz",
|
||||
"integrity": "sha512-a/20E/wtTJZ3Ykv3f/8F0l7TtgQa2LWHU2oNB9bsu0VjqGuGGHmm/q6waoUNQYTVPYrrlxxaHjJcDV6aiSTt/w==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
|
@ -16763,12 +16783,12 @@
|
|||
}
|
||||
},
|
||||
"@playwright/test": {
|
||||
"version": "1.42.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz",
|
||||
"integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==",
|
||||
"version": "1.43.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.43.1.tgz",
|
||||
"integrity": "sha512-HgtQzFgNEEo4TE22K/X7sYTYNqEMMTZmFS8kTq6m8hXj+m1D8TgwgIbumHddJa9h4yl4GkKb8/bgAl2+g7eDgA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"playwright": "1.42.1"
|
||||
"playwright": "1.43.1"
|
||||
}
|
||||
},
|
||||
"@react-aria/breadcrumbs": {
|
||||
|
@ -17906,28 +17926,34 @@
|
|||
}
|
||||
},
|
||||
"@stripe/stripe-js": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-3.1.0.tgz",
|
||||
"integrity": "sha512-7+ciE35i8NZ6l4FiO1qFkBoZ64ul6B2ZhBVyygB+e/2EZa2WLUyjoxrP53SagnUW7+/q25nDyDLzQq5F0ebOEw=="
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-3.3.0.tgz",
|
||||
"integrity": "sha512-dUgAsko9KoYC1U2TIawHzbkQJzPoApxCc1Qf6/j318d1ArViyh6ROHVYTxnU3RlOQL/utUD9I4/QoyiCowsgrw=="
|
||||
},
|
||||
"@swc/counter": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
|
||||
"integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ=="
|
||||
},
|
||||
"@swc/helpers": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.2.tgz",
|
||||
"integrity": "sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==",
|
||||
"version": "0.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.5.tgz",
|
||||
"integrity": "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==",
|
||||
"requires": {
|
||||
"@swc/counter": "^0.1.3",
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"@testing-library/dom": {
|
||||
"version": "9.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.4.tgz",
|
||||
"integrity": "sha512-FlS4ZWlp97iiNWig0Muq8p+3rVDjRiYE+YKGbAqXOu9nwJFFOdL00kFpz42M+4huzYi86vAK1sOOfyOG45muIQ==",
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.0.0.tgz",
|
||||
"integrity": "sha512-PmJPnogldqoVFf+EwbHvbBJ98MmqASV8kLrBYgsDNxQcFMeIS7JFL48sfyXvuMtgmWO/wMhh25odr+8VhDmn4g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/code-frame": "^7.10.4",
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@types/aria-query": "^5.0.1",
|
||||
"aria-query": "5.1.3",
|
||||
"aria-query": "5.3.0",
|
||||
"chalk": "^4.1.0",
|
||||
"dom-accessibility-api": "^0.5.9",
|
||||
"lz-string": "^1.5.0",
|
||||
|
@ -18035,13 +18061,13 @@
|
|||
}
|
||||
},
|
||||
"@testing-library/react": {
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.2.2.tgz",
|
||||
"integrity": "sha512-SOUuM2ysCvjUWBXTNfQ/ztmnKDmqaiPV3SvoIuyxMUca45rbSWWAT/qB8CUs/JQ/ux/8JFs9DNdFQ3f6jH3crA==",
|
||||
"version": "15.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-15.0.2.tgz",
|
||||
"integrity": "sha512-5mzIpuytB1ctpyywvyaY2TAAUQVCZIGqwiqFQf6u9lvj/SJQepGUzNV18Xpk+NLCaCE2j7CWrZE0tEf9xLZYiQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@testing-library/dom": "^9.0.0",
|
||||
"@testing-library/dom": "^10.0.0",
|
||||
"@types/react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
|
@ -18212,9 +18238,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"@types/react": {
|
||||
"version": "18.2.73",
|
||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.73.tgz",
|
||||
"integrity": "sha512-XcGdod0Jjv84HOC7N5ziY3x+qL0AfmubvKOZ9hJjJ2yd5EE+KYjWhdOjt387e9HPheHkdggF9atTifMRtyAaRA==",
|
||||
"version": "18.2.79",
|
||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.79.tgz",
|
||||
"integrity": "sha512-RwGAGXPl9kSXwdNTafkOEuFrTBD5SA2B3iEB96xi8+xu5ddUa/cpvyVCSNn+asgLCTHkb5ZxN8gbuibYJi4s1w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/prop-types": "*",
|
||||
|
@ -18792,10 +18818,12 @@
|
|||
}
|
||||
},
|
||||
"aria-query": {
|
||||
"version": "5.1.3",
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz",
|
||||
"integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"deep-equal": "^2.0.5"
|
||||
"dequal": "^2.0.3"
|
||||
}
|
||||
},
|
||||
"array-buffer-byte-length": {
|
||||
|
@ -19162,15 +19190,15 @@
|
|||
"dev": true
|
||||
},
|
||||
"cldr-core": {
|
||||
"version": "44.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-core/-/cldr-core-44.1.0.tgz",
|
||||
"integrity": "sha512-ssbJaXu3pCkc4YqNC6xHhgleZG7YqnOFZ9laMlJfHOfnspoA9waI4AH54gKB3Fe/+wI4i3cVxKFdCTVGTRw+UA==",
|
||||
"version": "45.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-core/-/cldr-core-45.0.0.tgz",
|
||||
"integrity": "sha512-gQVxy3gzOQpXiTRGmlKiRQFLYimrr1RgvqGKZCS61JgmdkeNm7+LZGx+Lhw5/AW0t8WMM/uZhf4CMva6LuUobQ==",
|
||||
"peer": true
|
||||
},
|
||||
"cldr-localenames-modern": {
|
||||
"version": "44.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-localenames-modern/-/cldr-localenames-modern-44.1.0.tgz",
|
||||
"integrity": "sha512-PJzjKiknuCG3dPmk4fFvGbMLtAC/TbKidBsQrmMRypENrw2ExtbJPeUqenG5KIfrZdXVqn9DXMIG9B9W5UCbgg==",
|
||||
"version": "45.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cldr-localenames-modern/-/cldr-localenames-modern-45.0.0.tgz",
|
||||
"integrity": "sha512-5r95nsq5LUNW2JfuZLygnHplFO2dNN66/XpD16bA1mZd/5IPIOjVf7GSin+4BF5FkvrEN+dqg6bWrIozabi4Qg==",
|
||||
"requires": {}
|
||||
},
|
||||
"cli-cursor": {
|
||||
|
@ -19541,6 +19569,12 @@
|
|||
"version": "1.0.0",
|
||||
"dev": true
|
||||
},
|
||||
"dequal": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
|
||||
"dev": true
|
||||
},
|
||||
"detect-newline": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz",
|
||||
|
@ -19947,14 +19981,14 @@
|
|||
}
|
||||
},
|
||||
"eslint-config-next": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.1.4.tgz",
|
||||
"integrity": "sha512-cihIahbhYAWwXJwZkAaRPpUi5t9aOi/HdfWXOjZeUOqNWXHD8X22kd1KG58Dc3MVaRx3HoR/oMGk2ltcrqDn8g==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.2.2.tgz",
|
||||
"integrity": "sha512-12/uFc0KX+wUs7EDpOUGKMXBXZJiBVGdK5/m/QgXOCg2mQ0bQWoKSWNrCeOg7Vum6Kw1d1TW453W6xh+GbHquw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@next/eslint-plugin-next": "14.1.4",
|
||||
"@next/eslint-plugin-next": "14.2.2",
|
||||
"@rushstack/eslint-patch": "^1.3.3",
|
||||
"@typescript-eslint/parser": "^5.4.2 || ^6.0.0",
|
||||
"@typescript-eslint/parser": "^5.4.2 || ^6.0.0 || 7.0.0 - 7.2.0",
|
||||
"eslint-import-resolver-node": "^0.3.6",
|
||||
"eslint-import-resolver-typescript": "^3.5.2",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
|
@ -19964,8 +19998,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@next/eslint-plugin-next": {
|
||||
"version": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.3.tgz",
|
||||
"integrity": "sha512-VCnZI2cy77Yaj3L7Uhs3+44ikMM1VD/fBMwvTBb3hIaTIuqa+DmG4dhUDq+MASu3yx97KhgsVJbsas0XuiKyww==",
|
||||
"version": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.2.1.tgz",
|
||||
"integrity": "sha512-Fp+mthEBjkn8r9qd6o4JgxKp0IDEzW0VYHD8ZC05xS5/lFNwHKuOdr2kVhWG7BQCO9L6eeepshM1Wbs2T+LgSg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"glob": "10.3.10"
|
||||
|
@ -20392,9 +20426,9 @@
|
|||
}
|
||||
},
|
||||
"fast-check": {
|
||||
"version": "3.17.1",
|
||||
"resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.17.1.tgz",
|
||||
"integrity": "sha512-jIKXJVe6ZO0SpwEgVtEVujTf8TwjI9wMXFJCjsDHUB3RroUbXBgF4kOSz3A7MW0UR26aqsoB8i9O2mjtjERAiA==",
|
||||
"version": "3.17.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.17.2.tgz",
|
||||
"integrity": "sha512-+3DPTxtxABLgmmVpYxrash3DHoq0cMa1jjLYNp3qqokKKhqVEaS4lbnaDKqWU5Dd6C2pEudPPBAEEQ9nUou9OQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"pure-rand": "^6.1.0"
|
||||
|
@ -20532,24 +20566,24 @@
|
|||
"@fluent/langneg": "^0.7.0",
|
||||
"@fluent/react": "^0.15.2",
|
||||
"@mozilla-protocol/core": "^17.0.1",
|
||||
"@next/eslint-plugin-next": "^14.1.4",
|
||||
"@stripe/stripe-js": "^3.1.0",
|
||||
"@testing-library/dom": "^9.3.4",
|
||||
"@next/eslint-plugin-next": "^14.2.2",
|
||||
"@stripe/stripe-js": "^3.3.0",
|
||||
"@testing-library/dom": "^10.0.0",
|
||||
"@testing-library/jest-dom": "^6.4.2",
|
||||
"@testing-library/react": "^14.2.2",
|
||||
"@testing-library/react": "^15.0.2",
|
||||
"@testing-library/user-event": "^14.5.2",
|
||||
"@types/jest-axe": "^3.5.9",
|
||||
"@types/react": "^18.2.73",
|
||||
"@types/react": "^18.2.79",
|
||||
"@typescript-eslint/eslint-plugin": "^7.4.0",
|
||||
"@typescript-eslint/parser": "^7.4.0",
|
||||
"babel-jest": "^29.7.0",
|
||||
"chokidar-cli": "^3.0.0",
|
||||
"cldr-localenames-modern": "^44.1.0",
|
||||
"cldr-localenames-modern": "^45.0.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "^14.1.4",
|
||||
"eslint-config-next": "^14.2.2",
|
||||
"eslint-plugin-jest-dom": "^5.2.0",
|
||||
"eslint-plugin-testing-library": "^6.2.0",
|
||||
"fast-check": "^3.17.1",
|
||||
"fast-check": "^3.17.2",
|
||||
"husky": "^9.0.11",
|
||||
"jest": "^29.7.0",
|
||||
"jest-axe": "^8.0.0",
|
||||
|
@ -20557,26 +20591,26 @@
|
|||
"jest-junit": "^16.0.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"lint-staged": "^15.2.2",
|
||||
"msw": "^2.2.13",
|
||||
"next": "^14.1.4",
|
||||
"msw": "^2.2.14",
|
||||
"next": "^14.2.2",
|
||||
"prettier": "3.2.5",
|
||||
"react": "18.2.0",
|
||||
"react-aria": "^3.32.1",
|
||||
"react-confetti": "^6.1.0",
|
||||
"react-dom": "18.2.0",
|
||||
"react-ga": "^3.3.1",
|
||||
"react-intersection-observer": "^9.8.1",
|
||||
"react-intersection-observer": "^9.8.2",
|
||||
"react-qr-code": "^2.0.12",
|
||||
"react-singleton-hook": "^4.0.1",
|
||||
"react-stately": "^3.30.1",
|
||||
"react-test-renderer": "^18.2.0",
|
||||
"react-toastify": "^10.0.5",
|
||||
"sass": "^1.72.0",
|
||||
"sass": "^1.75.0",
|
||||
"stylelint": "^16.3.1",
|
||||
"stylelint-config-recommended-scss": "^14.0.0",
|
||||
"stylelint-scss": "^6.2.1",
|
||||
"swr": "^2.2.5",
|
||||
"typescript": "^5.4.3"
|
||||
"typescript": "^5.4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fluent/bundle": {
|
||||
|
@ -23345,9 +23379,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"msw": {
|
||||
"version": "2.2.13",
|
||||
"resolved": "https://registry.npmjs.org/msw/-/msw-2.2.13.tgz",
|
||||
"integrity": "sha512-ljFf1xZsU0b4zv1l7xzEmC6OZA6yD06hcx0H+dc8V0VypaP3HGYJa1rMLjQbBWl32ptGhcfwcPCWDB1wjmsftw==",
|
||||
"version": "2.2.14",
|
||||
"resolved": "https://registry.npmjs.org/msw/-/msw-2.2.14.tgz",
|
||||
"integrity": "sha512-64i8rNCa1xzDK8ZYsTrVMli05D687jty8+Th+PU5VTbJ2/4P7fkQFVyDQ6ZFT5FrNR8z2BHhbY47fKNvfHrumA==",
|
||||
"requires": {
|
||||
"@bundled-es-modules/cookie": "^2.0.0",
|
||||
"@bundled-es-modules/statuses": "^1.0.1",
|
||||
|
@ -23483,22 +23517,22 @@
|
|||
"dev": true
|
||||
},
|
||||
"next": {
|
||||
"version": "14.1.4",
|
||||
"resolved": "https://registry.npmjs.org/next/-/next-14.1.4.tgz",
|
||||
"integrity": "sha512-1WTaXeSrUwlz/XcnhGTY7+8eiaFvdet5z9u3V2jb+Ek1vFo0VhHKSAIJvDWfQpttWjnyw14kBeq28TPq7bTeEQ==",
|
||||
"version": "14.2.2",
|
||||
"resolved": "https://registry.npmjs.org/next/-/next-14.2.2.tgz",
|
||||
"integrity": "sha512-oGwUaa2bCs47FbuxWMpOoXtBMPYpvTPgdZr3UAo+pu7Ns00z9otmYpoeV1HEiYL06AlRQQIA/ypK526KjJfaxg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@next/env": "14.1.4",
|
||||
"@next/swc-darwin-arm64": "14.1.4",
|
||||
"@next/swc-darwin-x64": "14.1.4",
|
||||
"@next/swc-linux-arm64-gnu": "14.1.4",
|
||||
"@next/swc-linux-arm64-musl": "14.1.4",
|
||||
"@next/swc-linux-x64-gnu": "14.1.4",
|
||||
"@next/swc-linux-x64-musl": "14.1.4",
|
||||
"@next/swc-win32-arm64-msvc": "14.1.4",
|
||||
"@next/swc-win32-ia32-msvc": "14.1.4",
|
||||
"@next/swc-win32-x64-msvc": "14.1.4",
|
||||
"@swc/helpers": "0.5.2",
|
||||
"@next/env": "14.2.2",
|
||||
"@next/swc-darwin-arm64": "14.2.2",
|
||||
"@next/swc-darwin-x64": "14.2.2",
|
||||
"@next/swc-linux-arm64-gnu": "14.2.2",
|
||||
"@next/swc-linux-arm64-musl": "14.2.2",
|
||||
"@next/swc-linux-x64-gnu": "14.2.2",
|
||||
"@next/swc-linux-x64-musl": "14.2.2",
|
||||
"@next/swc-win32-arm64-msvc": "14.2.2",
|
||||
"@next/swc-win32-ia32-msvc": "14.2.2",
|
||||
"@next/swc-win32-x64-msvc": "14.2.2",
|
||||
"@swc/helpers": "0.5.5",
|
||||
"busboy": "1.6.0",
|
||||
"caniuse-lite": "^1.0.30001579",
|
||||
"graceful-fs": "^4.2.11",
|
||||
|
@ -23812,19 +23846,19 @@
|
|||
}
|
||||
},
|
||||
"playwright": {
|
||||
"version": "1.42.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz",
|
||||
"integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==",
|
||||
"version": "1.43.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.43.1.tgz",
|
||||
"integrity": "sha512-V7SoH0ai2kNt1Md9E3Gwas5B9m8KR2GVvwZnAI6Pg0m3sh7UvgiYhRrhsziCmqMJNouPckiOhk8T+9bSAK0VIA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fsevents": "2.3.2",
|
||||
"playwright-core": "1.42.1"
|
||||
"playwright-core": "1.43.1"
|
||||
}
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.42.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz",
|
||||
"integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==",
|
||||
"version": "1.43.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.43.1.tgz",
|
||||
"integrity": "sha512-EI36Mto2Vrx6VF7rm708qSnesVQKbxEWvPrfA1IPY6HgczBplDx7ENtx+K2n4kJ41sLLkuGfmb0ZLSSXlDhqPg==",
|
||||
"dev": true
|
||||
},
|
||||
"postcss": {
|
||||
|
@ -24014,9 +24048,9 @@
|
|||
"requires": {}
|
||||
},
|
||||
"react-intersection-observer": {
|
||||
"version": "9.8.1",
|
||||
"resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.8.1.tgz",
|
||||
"integrity": "sha512-QzOFdROX8D8MH3wE3OVKH0f3mLjKTtEN1VX/rkNuECCff+aKky0pIjulDhr3Ewqj5el/L+MhBkM3ef0Tbt+qUQ==",
|
||||
"version": "9.8.2",
|
||||
"resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.8.2.tgz",
|
||||
"integrity": "sha512-901naEiiZmse3p+AmtbQ3NL9xx+gQ8TXLiGDc+8GiE3JKJkNV3vP737aGuWTAXBA+1QqxPrDDE+fIEgYpGDlrQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"react-is": {
|
||||
|
@ -24294,9 +24328,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"sass": {
|
||||
"version": "1.72.0",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.72.0.tgz",
|
||||
"integrity": "sha512-Gpczt3WA56Ly0Mn8Sl21Vj94s1axi9hDIzDFn9Ph9x3C3p4nNyvsqJoQyVXKou6cBlfFWEgRW4rT8Tb4i3XnVA==",
|
||||
"version": "1.75.0",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.75.0.tgz",
|
||||
"integrity": "sha512-ShMYi3WkrDWxExyxSZPst4/okE9ts46xZmJDSawJQrnte7M1V9fScVB+uNXOVKRBt0PggHOwoZcn8mYX4trnBw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"chokidar": ">=3.0.0 <4.0.0",
|
||||
|
@ -25252,9 +25286,9 @@
|
|||
}
|
||||
},
|
||||
"typescript": {
|
||||
"version": "5.4.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz",
|
||||
"integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==",
|
||||
"version": "5.4.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
|
||||
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
|
||||
"devOptional": true
|
||||
},
|
||||
"unbox-primitive": {
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"url": "https://github.com/mozilla/fx-private-relay/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "1.42.1",
|
||||
"@playwright/test": "1.43.1",
|
||||
"dotenv": "^16.4.5"
|
||||
},
|
||||
"volta": {
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.contrib import admin
|
|||
|
||||
from .models import InboundContact, RealPhone, RelayNumber
|
||||
|
||||
|
||||
admin.site.register(InboundContact, admin.ModelAdmin)
|
||||
admin.site.register(RealPhone, admin.ModelAdmin)
|
||||
admin.site.register(RelayNumber, admin.ModelAdmin)
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
import logging
|
||||
|
||||
from django.apps import AppConfig, apps
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from twilio.base.instance_resource import InstanceResource
|
||||
from twilio.request_validator import RequestValidator
|
||||
from twilio.rest import Client
|
||||
|
||||
from django.apps import apps, AppConfig
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import json
|
||||
|
||||
import requests
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import requests
|
||||
from rest_framework import exceptions
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 3.2.13 on 2022-05-28 14:48
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 3.2.13 on 2022-05-29 16:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import phones.models
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 3.2.13 on 2022-05-30 17:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import phones.models
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 3.2.13 on 2022-06-04 16:37
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 3.2.13 on 2022-07-01 19:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import phones.models
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Generated by Django 3.2.13 on 2022-07-08 19:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import phones.models
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# Generated by Django 3.2.14 on 2022-08-07 21:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import phones.models
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.15 on 2022-08-15 17:42
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.15 on 2022-09-13 19:59
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# Generated by Django 3.2.15 on 2022-09-21 18:29
|
||||
# ruff: noqa: E501
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ def add_db_default_forward_func(apps, schema_editor):
|
|||
' "remaining_texts" integer NOT NULL DEFAULT 75,'
|
||||
' "texts_blocked" integer NOT NULL DEFAULT 0,'
|
||||
' "texts_forwarded" integer NOT NULL DEFAULT 0,'
|
||||
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
|
||||
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,' # noqa: E501
|
||||
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE);'
|
||||
)
|
||||
schema_editor.execute(
|
||||
|
|
|
@ -59,7 +59,7 @@ def add_db_default_forward_func(apps, schema_editor):
|
|||
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
|
||||
' "number" varchar(15) NOT NULL,'
|
||||
' "location" varchar(255) NOT NULL,'
|
||||
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
|
||||
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,' # noqa: E501
|
||||
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE,'
|
||||
' "enabled" bool NOT NULL,'
|
||||
' "calls_blocked" integer NOT NULL,'
|
||||
|
|
|
@ -22,7 +22,7 @@ def add_db_default_forward_func(apps, schema_editor):
|
|||
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
|
||||
' "number" varchar(15) NOT NULL,'
|
||||
' "location" varchar(255) NOT NULL,'
|
||||
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
|
||||
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,' # noqa: E501
|
||||
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE,'
|
||||
' "enabled" bool NOT NULL,'
|
||||
' "calls_blocked" integer NOT NULL,'
|
||||
|
@ -44,7 +44,7 @@ def add_db_default_forward_func(apps, schema_editor):
|
|||
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key",'
|
||||
' "enabled", "calls_blocked", "calls_forwarded", "remaining_texts",'
|
||||
' "texts_blocked", "texts_forwarded", "remaining_seconds",'
|
||||
' "remaining_minutes", "country_code", \'twilio\' FROM "phones_relaynumber";'
|
||||
' "remaining_minutes", "country_code", \'twilio\' FROM "phones_relaynumber";' # noqa: E501
|
||||
)
|
||||
schema_editor.execute('DROP TABLE "phones_relaynumber";')
|
||||
schema_editor.execute(
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
from __future__ import annotations
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor
|
||||
from typing import Iterator, Optional
|
||||
|
||||
import logging
|
||||
import phonenumbers
|
||||
import secrets
|
||||
import string
|
||||
from collections.abc import Iterator
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from math import floor
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import BadRequest, ValidationError
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db import models
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch.dispatcher import receiver
|
||||
from django.urls import reverse
|
||||
|
||||
import phonenumbers
|
||||
from twilio.base.exceptions import TwilioRestException
|
||||
from twilio.rest import Client
|
||||
|
||||
|
@ -40,7 +41,7 @@ def verification_code_default():
|
|||
|
||||
|
||||
def verification_sent_date_default():
|
||||
return datetime.now(timezone.utc)
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
def get_expired_unverified_realphone_records(number):
|
||||
|
@ -48,7 +49,7 @@ def get_expired_unverified_realphone_records(number):
|
|||
number=number,
|
||||
verified=False,
|
||||
verification_sent_date__lt=(
|
||||
datetime.now(timezone.utc)
|
||||
datetime.now(UTC)
|
||||
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE)
|
||||
),
|
||||
)
|
||||
|
@ -59,7 +60,7 @@ def get_pending_unverified_realphone_records(number):
|
|||
number=number,
|
||||
verified=False,
|
||||
verification_sent_date__gt=(
|
||||
datetime.now(timezone.utc)
|
||||
datetime.now(UTC)
|
||||
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE)
|
||||
),
|
||||
)
|
||||
|
@ -79,13 +80,13 @@ def get_valid_realphone_verification_record(user, number, verification_code):
|
|||
number=number,
|
||||
verification_code=verification_code,
|
||||
verification_sent_date__gt=(
|
||||
datetime.now(timezone.utc)
|
||||
datetime.now(UTC)
|
||||
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE)
|
||||
),
|
||||
).first()
|
||||
|
||||
|
||||
def get_last_text_sender(relay_number: "RelayNumber") -> Optional["InboundContact"]:
|
||||
def get_last_text_sender(relay_number: RelayNumber) -> InboundContact | None:
|
||||
"""
|
||||
Get the last text sender.
|
||||
|
||||
|
@ -177,7 +178,7 @@ class RealPhone(models.Model):
|
|||
def mark_verified(self):
|
||||
incr_if_enabled("phones_RealPhone.mark_verified")
|
||||
self.verified = True
|
||||
self.verified_date = datetime.now(timezone.utc)
|
||||
self.verified_date = datetime.now(UTC)
|
||||
self.save(force_update=True)
|
||||
return self
|
||||
|
||||
|
@ -234,16 +235,16 @@ class RelayNumber(models.Model):
|
|||
created_at = models.DateTimeField(null=True, auto_now_add=True)
|
||||
|
||||
@property
|
||||
def remaining_minutes(self):
|
||||
def remaining_minutes(self) -> int:
|
||||
# return a 0 or positive int for remaining minutes
|
||||
return floor(max(self.remaining_seconds, 0) / 60)
|
||||
|
||||
@property
|
||||
def calls_and_texts_forwarded(self):
|
||||
def calls_and_texts_forwarded(self) -> int:
|
||||
return self.calls_forwarded + self.texts_forwarded
|
||||
|
||||
@property
|
||||
def calls_and_texts_blocked(self):
|
||||
def calls_and_texts_blocked(self) -> int:
|
||||
return self.calls_blocked + self.texts_blocked
|
||||
|
||||
@property
|
||||
|
@ -392,7 +393,8 @@ def send_welcome_message(user, relay_number):
|
|||
client = twilio_client()
|
||||
client.messages.create(
|
||||
body=(
|
||||
"Welcome to Relay phone masking! 🎉 Please add your number to your contacts."
|
||||
"Welcome to Relay phone masking!"
|
||||
" 🎉 Please add your number to your contacts."
|
||||
" This will help you identify your Relay messages and calls."
|
||||
),
|
||||
from_=settings.TWILIO_MAIN_NUMBER,
|
||||
|
@ -402,7 +404,7 @@ def send_welcome_message(user, relay_number):
|
|||
|
||||
|
||||
def last_inbound_date_default():
|
||||
return datetime.now(timezone.utc)
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
class InboundContact(models.Model):
|
||||
|
@ -452,7 +454,7 @@ def suggested_numbers(user):
|
|||
same_prefix_options.extend(convert_twilio_numbers_to_dict(twilio_nums))
|
||||
|
||||
# look for numbers with same area code, 2-number prefix and suffix
|
||||
contains = "%s***%s" % (real_num[:7], real_num[10:]) if real_num else ""
|
||||
contains = f"{real_num[:7]}***{real_num[10:]}" if real_num else ""
|
||||
twilio_nums = avail_nums.local.list(contains=contains, limit=10)
|
||||
same_prefix_options.extend(convert_twilio_numbers_to_dict(twilio_nums))
|
||||
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
from datetime import datetime, timezone, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from io import StringIO
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management import call_command, CommandError
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management import CommandError, call_command
|
||||
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
import pytest
|
||||
|
||||
if settings.PHONES_ENABLED:
|
||||
from .models_tests import make_phone_test_user
|
||||
from ..models import InboundContact, RealPhone, RelayNumber
|
||||
from .models_tests import make_phone_test_user
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
not settings.PHONES_ENABLED, reason="PHONES_ENABLED is False"
|
||||
|
@ -31,7 +31,7 @@ def test_settings(settings: SettingsWrapper) -> SettingsWrapper:
|
|||
def phone_user(db: None, test_settings: SettingsWrapper) -> User:
|
||||
"""Return a Relay user with phone setup and phone usage."""
|
||||
# Create the user
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
now = datetime.now(tz=UTC)
|
||||
phone_user = make_phone_test_user()
|
||||
phone_user.profile.date_subscribed = now - timedelta(days=15)
|
||||
phone_user.profile.save()
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from types import SimpleNamespace
|
||||
import pytest
|
||||
import random
|
||||
import responses
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import Mock, call, patch
|
||||
from uuid import uuid4
|
||||
from unittest.mock import Mock, patch, call
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
|
@ -12,9 +10,12 @@ from django.core.cache import cache
|
|||
from django.core.exceptions import BadRequest, ValidationError
|
||||
from django.test import override_settings
|
||||
|
||||
import pytest
|
||||
import responses
|
||||
from allauth.socialaccount.models import SocialAccount, SocialToken
|
||||
from model_bakery import baker
|
||||
from twilio.base.exceptions import TwilioRestException
|
||||
|
||||
from emails.models import Profile
|
||||
|
||||
if settings.PHONES_ENABLED:
|
||||
|
@ -24,11 +25,11 @@ if settings.PHONES_ENABLED:
|
|||
RelayNumber,
|
||||
area_code_numbers,
|
||||
get_expired_unverified_realphone_records,
|
||||
get_valid_realphone_verification_record,
|
||||
get_last_text_sender,
|
||||
get_valid_realphone_verification_record,
|
||||
iq_fmt,
|
||||
location_numbers,
|
||||
suggested_numbers,
|
||||
iq_fmt,
|
||||
)
|
||||
|
||||
|
||||
|
@ -77,9 +78,7 @@ def mock_twilio_client(twilio_number_sid: str):
|
|||
def make_phone_test_user() -> User:
|
||||
phone_user = baker.make(User, email="phone_user@example.com")
|
||||
phone_user_profile = Profile.objects.get(user=phone_user)
|
||||
phone_user_profile.date_subscribed = datetime.now(tz=timezone.utc) - timedelta(
|
||||
days=15
|
||||
)
|
||||
phone_user_profile.date_subscribed = datetime.now(tz=UTC) - timedelta(days=15)
|
||||
phone_user_profile.save()
|
||||
upgrade_test_user_to_phone(phone_user)
|
||||
return phone_user
|
||||
|
@ -97,7 +96,7 @@ def upgrade_test_user_to_phone(user):
|
|||
baker.make(
|
||||
SocialToken,
|
||||
account=account,
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(1),
|
||||
expires_at=datetime.now(UTC) + timedelta(1),
|
||||
)
|
||||
return user
|
||||
|
||||
|
@ -120,7 +119,7 @@ def test_get_valid_realphone_verification_record_returns_object(phone_user):
|
|||
real_phone = RealPhone.objects.create(
|
||||
user=phone_user,
|
||||
number=number,
|
||||
verification_sent_date=datetime.now(timezone.utc),
|
||||
verification_sent_date=datetime.now(UTC),
|
||||
)
|
||||
record = get_valid_realphone_verification_record(
|
||||
phone_user, number, real_phone.verification_code
|
||||
|
@ -135,7 +134,7 @@ def test_get_valid_realphone_verification_record_returns_none(phone_user):
|
|||
user=phone_user,
|
||||
number=number,
|
||||
verification_sent_date=(
|
||||
datetime.now(timezone.utc)
|
||||
datetime.now(UTC)
|
||||
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE + 1)
|
||||
),
|
||||
)
|
||||
|
@ -202,7 +201,7 @@ def test_create_realphone_deletes_expired_unverified_records(
|
|||
number=number,
|
||||
verified=False,
|
||||
verification_sent_date=(
|
||||
datetime.now(timezone.utc)
|
||||
datetime.now(UTC)
|
||||
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE + 1)
|
||||
),
|
||||
)
|
||||
|
@ -319,7 +318,7 @@ def real_phone_us(phone_user, mock_twilio_client):
|
|||
user=phone_user,
|
||||
number="+12223334444",
|
||||
verified=True,
|
||||
verification_sent_date=datetime.now(timezone.utc),
|
||||
verification_sent_date=datetime.now(UTC),
|
||||
)
|
||||
mock_twilio_client.messages.create.assert_called_once()
|
||||
mock_twilio_client.messages.create.reset_mock()
|
||||
|
@ -563,7 +562,7 @@ def real_phone_ca(phone_user, mock_twilio_client):
|
|||
user=phone_user,
|
||||
number="+14035551234",
|
||||
verified=True,
|
||||
verification_sent_date=datetime.now(timezone.utc),
|
||||
verification_sent_date=datetime.now(UTC),
|
||||
country_code="CA",
|
||||
)
|
||||
mock_twilio_client.messages.create.assert_called_once()
|
||||
|
@ -771,31 +770,31 @@ def test_get_last_text_sender_lots_of_inbound_returns_one():
|
|||
InboundContact,
|
||||
relay_number=relay_number,
|
||||
last_inbound_type="call",
|
||||
last_inbound_date=datetime.now(timezone.utc) - timedelta(days=4),
|
||||
last_inbound_date=datetime.now(UTC) - timedelta(days=4),
|
||||
)
|
||||
baker.make(
|
||||
InboundContact,
|
||||
relay_number=relay_number,
|
||||
last_inbound_type="text",
|
||||
last_inbound_date=datetime.now(timezone.utc) - timedelta(days=3),
|
||||
last_inbound_date=datetime.now(UTC) - timedelta(days=3),
|
||||
)
|
||||
baker.make(
|
||||
InboundContact,
|
||||
relay_number=relay_number,
|
||||
last_inbound_type="call",
|
||||
last_inbound_date=datetime.now(timezone.utc) - timedelta(days=2),
|
||||
last_inbound_date=datetime.now(UTC) - timedelta(days=2),
|
||||
)
|
||||
baker.make(
|
||||
InboundContact,
|
||||
relay_number=relay_number,
|
||||
last_inbound_type="text",
|
||||
last_inbound_date=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
last_inbound_date=datetime.now(UTC) - timedelta(days=1),
|
||||
)
|
||||
inbound_contact = baker.make(
|
||||
InboundContact,
|
||||
relay_number=relay_number,
|
||||
last_inbound_type="text",
|
||||
last_inbound_date=datetime.now(timezone.utc),
|
||||
last_inbound_date=datetime.now(UTC),
|
||||
)
|
||||
|
||||
assert get_last_text_sender(relay_number) == inbound_contact
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from urllib.parse import urlencode, urlparse
|
||||
import logging
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
from django.http import Http404
|
||||
from django.shortcuts import resolve_url
|
||||
|
@ -9,7 +9,6 @@ from allauth.account.adapter import DefaultAccountAdapter
|
|||
|
||||
from .middleware import RelayStaticFilesMiddleware
|
||||
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
|
||||
|
||||
|
@ -41,7 +40,7 @@ class AccountAdapter(DefaultAccountAdapter):
|
|||
# Is this a known frontend path?
|
||||
try:
|
||||
middleware = RelayStaticFilesMiddleware()
|
||||
except Exception:
|
||||
except Exception: # noqa: S110 (exception pass without log)
|
||||
# Staticfiles are not available
|
||||
pass
|
||||
else:
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import base64
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
import requests
|
||||
import os
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
import requests
|
||||
|
||||
ROOT_DIR = os.path.abspath(os.curdir)
|
||||
|
||||
|
@ -31,7 +31,9 @@ def get_profiler_startup_data() -> tuple[str | None, str | None]:
|
|||
|
||||
|
||||
def write_gcp_key_json_file(gcp_key_json_path: Path) -> None:
|
||||
# create the gcp key json file from contents of GOOGLE_CLOUD_PROFILER_CREDENTIALS_B64
|
||||
"""
|
||||
Create the gcp key json file from contents of GOOGLE_CLOUD_PROFILER_CREDENTIALS_B64
|
||||
"""
|
||||
google_app_creds = base64.b64decode(settings.GOOGLE_CLOUD_PROFILER_CREDENTIALS_B64)
|
||||
if not google_app_creds == b"":
|
||||
with open(gcp_key_json_path, "w+") as gcp_key_file:
|
||||
|
@ -48,7 +50,7 @@ class PrivateRelayConfig(AppConfig):
|
|||
):
|
||||
# Set up Google Cloud Profiler
|
||||
service, version = get_profiler_startup_data()
|
||||
if service != None:
|
||||
if service is not None:
|
||||
gcp_key_json_path = Path(settings.GOOGLE_APPLICATION_CREDENTIALS)
|
||||
if not gcp_key_json_path.exists():
|
||||
write_gcp_key_json_file(gcp_key_json_path)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
Counts = dict[str, dict[str, int]]
|
||||
CleanupData = dict[str, Any]
|
||||
|
@ -16,8 +16,8 @@ class DataIssueTask:
|
|||
check_description: str # A sentence describing what this cleaner is checking.
|
||||
can_clean: bool # True if the issue can be automatically cleaned
|
||||
|
||||
_counts: Optional[Counts]
|
||||
_cleanup_data: Optional[CleanupData]
|
||||
_counts: Counts | None
|
||||
_cleanup_data: CleanupData | None
|
||||
_cleaned: bool
|
||||
|
||||
def __init__(self):
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
import logging
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import sentry_sdk
|
||||
from allauth.socialaccount.models import SocialAccount, SocialToken
|
||||
from allauth.socialaccount.providers.fxa.views import FirefoxAccountsOAuth2Adapter
|
||||
from oauthlib.oauth2.rfc6749.errors import CustomOAuth2Error, TokenExpiredError
|
||||
from requests_oauthlib import OAuth2Session
|
||||
import logging
|
||||
import sentry_sdk
|
||||
|
||||
from .utils import flag_is_active_in_task
|
||||
|
||||
|
||||
logger = logging.getLogger("events")
|
||||
|
||||
|
||||
|
@ -35,7 +34,7 @@ def update_social_token(
|
|||
) -> None:
|
||||
existing_social_token.token = new_oauth2_token["access_token"]
|
||||
existing_social_token.token_secret = new_oauth2_token["refresh_token"]
|
||||
existing_social_token.expires_at = datetime.now(timezone.utc) + timedelta(
|
||||
existing_social_token.expires_at = datetime.now(UTC) + timedelta(
|
||||
seconds=int(new_oauth2_token["expires_in"])
|
||||
)
|
||||
existing_social_token.save()
|
||||
|
@ -60,7 +59,7 @@ def _get_oauth2_session(social_account: SocialAccount) -> OAuth2Session:
|
|||
"client_secret": client_secret,
|
||||
}
|
||||
|
||||
expires_in = (social_token.expires_at - datetime.now(timezone.utc)).total_seconds()
|
||||
expires_in = (social_token.expires_at - datetime.now(UTC)).total_seconds()
|
||||
token = {
|
||||
"access_token": social_token.token,
|
||||
"refresh_token": social_token.token_secret,
|
||||
|
@ -172,10 +171,8 @@ def get_phone_subscription_dates(social_account):
|
|||
return None, None, None
|
||||
|
||||
date_subscribed_phone = datetime.fromtimestamp(
|
||||
subscription_created_timestamp, tz=timezone.utc
|
||||
subscription_created_timestamp, tz=UTC
|
||||
)
|
||||
start_date = datetime.fromtimestamp(
|
||||
subscription_start_timestamp, tz=timezone.utc
|
||||
)
|
||||
end_date = datetime.fromtimestamp(subscription_end_timestamp, tz=timezone.utc)
|
||||
start_date = datetime.fromtimestamp(subscription_start_timestamp, tz=UTC)
|
||||
end_date = datetime.fromtimestamp(subscription_end_timestamp, tz=UTC)
|
||||
return date_subscribed_phone, start_date, end_date
|
||||
|
|
|
@ -3,16 +3,17 @@ This Source Code Form is subject to the terms of the Mozilla Public
|
|||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
AUTOGENERATED BY glean_parser v13.0.1. DO NOT EDIT. To recreate, run:
|
||||
AUTOGENERATED BY glean_parser v14.0.1. DO NOT EDIT. To recreate, run:
|
||||
|
||||
bash .circleci/python_job.bash run build_glean
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import json
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
import json
|
||||
|
||||
GLEAN_EVENT_MOZLOG_TYPE = "glean-server-event"
|
||||
|
||||
|
@ -33,7 +34,7 @@ class EventsServerEventLogger:
|
|||
self._channel = channel
|
||||
|
||||
def _record(self, user_agent: str, ip_address: str, event: dict[str, Any]) -> None:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
timestamp = now.isoformat()
|
||||
event["timestamp"] = int(1000.0 * now.timestamp()) # Milliseconds since epoch
|
||||
event_payload = {
|
||||
|
@ -48,7 +49,7 @@ class EventsServerEventLogger:
|
|||
# `Unknown` fields below are required in the Glean schema, however they are
|
||||
# not useful in server context
|
||||
"client_info": {
|
||||
"telemetry_sdk_build": "glean_parser v13.0.1",
|
||||
"telemetry_sdk_build": "glean_parser v14.0.1",
|
||||
"first_run_date": "Unknown",
|
||||
"os": "Unknown",
|
||||
"os_version": "Unknown",
|
||||
|
@ -93,7 +94,6 @@ class EventsServerEventLogger:
|
|||
self,
|
||||
user_agent: str,
|
||||
ip_address: str,
|
||||
client_id: str,
|
||||
fxa_id: str,
|
||||
platform: str,
|
||||
n_random_masks: int,
|
||||
|
@ -105,7 +105,6 @@ class EventsServerEventLogger:
|
|||
date_joined_premium: int,
|
||||
has_extension: bool,
|
||||
date_got_extension: int,
|
||||
mask_id: str,
|
||||
is_random_mask: bool,
|
||||
is_reply: bool,
|
||||
reason: str,
|
||||
|
@ -118,7 +117,6 @@ class EventsServerEventLogger:
|
|||
:param str user_agent: The user agent.
|
||||
:param str ip_address: The IP address. Will be used to decode Geo information
|
||||
and scrubbed at ingestion.
|
||||
:param str client_id: Firefox client ID
|
||||
:param str fxa_id: Mozilla accounts user ID
|
||||
:param str platform: Relay client platform
|
||||
:param int n_random_masks: Number of random masks
|
||||
|
@ -130,7 +128,6 @@ class EventsServerEventLogger:
|
|||
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
|
||||
:param bool has_extension: The user has the Relay Add-on
|
||||
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
|
||||
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
|
||||
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
|
||||
:param bool is_reply: The email is a reply from the Relay user
|
||||
:param str reason: Code describing why the email was blocked
|
||||
|
@ -139,7 +136,6 @@ class EventsServerEventLogger:
|
|||
"category": "email",
|
||||
"name": "blocked",
|
||||
"extra": {
|
||||
"client_id": str(client_id),
|
||||
"fxa_id": str(fxa_id),
|
||||
"platform": str(platform),
|
||||
"n_random_masks": str(n_random_masks),
|
||||
|
@ -151,7 +147,6 @@ class EventsServerEventLogger:
|
|||
"date_joined_premium": str(date_joined_premium),
|
||||
"has_extension": str(has_extension).lower(),
|
||||
"date_got_extension": str(date_got_extension),
|
||||
"mask_id": str(mask_id),
|
||||
"is_random_mask": str(is_random_mask).lower(),
|
||||
"is_reply": str(is_reply).lower(),
|
||||
"reason": str(reason),
|
||||
|
@ -163,7 +158,6 @@ class EventsServerEventLogger:
|
|||
self,
|
||||
user_agent: str,
|
||||
ip_address: str,
|
||||
client_id: str,
|
||||
fxa_id: str,
|
||||
platform: str,
|
||||
n_random_masks: int,
|
||||
|
@ -175,7 +169,6 @@ class EventsServerEventLogger:
|
|||
date_joined_premium: int,
|
||||
has_extension: bool,
|
||||
date_got_extension: int,
|
||||
mask_id: str,
|
||||
is_random_mask: bool,
|
||||
is_reply: bool,
|
||||
) -> None:
|
||||
|
@ -187,7 +180,6 @@ class EventsServerEventLogger:
|
|||
:param str user_agent: The user agent.
|
||||
:param str ip_address: The IP address. Will be used to decode Geo information
|
||||
and scrubbed at ingestion.
|
||||
:param str client_id: Firefox client ID
|
||||
:param str fxa_id: Mozilla accounts user ID
|
||||
:param str platform: Relay client platform
|
||||
:param int n_random_masks: Number of random masks
|
||||
|
@ -199,7 +191,6 @@ class EventsServerEventLogger:
|
|||
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
|
||||
:param bool has_extension: The user has the Relay Add-on
|
||||
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
|
||||
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
|
||||
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
|
||||
:param bool is_reply: The email is a reply from the Relay user
|
||||
"""
|
||||
|
@ -207,7 +198,6 @@ class EventsServerEventLogger:
|
|||
"category": "email",
|
||||
"name": "forwarded",
|
||||
"extra": {
|
||||
"client_id": str(client_id),
|
||||
"fxa_id": str(fxa_id),
|
||||
"platform": str(platform),
|
||||
"n_random_masks": str(n_random_masks),
|
||||
|
@ -219,7 +209,6 @@ class EventsServerEventLogger:
|
|||
"date_joined_premium": str(date_joined_premium),
|
||||
"has_extension": str(has_extension).lower(),
|
||||
"date_got_extension": str(date_got_extension),
|
||||
"mask_id": str(mask_id),
|
||||
"is_random_mask": str(is_random_mask).lower(),
|
||||
"is_reply": str(is_reply).lower(),
|
||||
},
|
||||
|
@ -230,7 +219,6 @@ class EventsServerEventLogger:
|
|||
self,
|
||||
user_agent: str,
|
||||
ip_address: str,
|
||||
client_id: str,
|
||||
fxa_id: str,
|
||||
platform: str,
|
||||
n_random_masks: int,
|
||||
|
@ -242,7 +230,6 @@ class EventsServerEventLogger:
|
|||
date_joined_premium: int,
|
||||
has_extension: bool,
|
||||
date_got_extension: int,
|
||||
mask_id: str,
|
||||
is_random_mask: bool,
|
||||
created_by_api: bool,
|
||||
has_website: bool,
|
||||
|
@ -255,7 +242,6 @@ class EventsServerEventLogger:
|
|||
:param str user_agent: The user agent.
|
||||
:param str ip_address: The IP address. Will be used to decode Geo information
|
||||
and scrubbed at ingestion.
|
||||
:param str client_id: Firefox client ID
|
||||
:param str fxa_id: Mozilla accounts user ID
|
||||
:param str platform: Relay client platform
|
||||
:param int n_random_masks: Number of random masks
|
||||
|
@ -267,7 +253,6 @@ class EventsServerEventLogger:
|
|||
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
|
||||
:param bool has_extension: The user has the Relay Add-on
|
||||
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
|
||||
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
|
||||
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
|
||||
:param bool created_by_api: The mask was created via the API, rather than an incoming email
|
||||
:param bool has_website: The mask was created by the Add-on or integration on a website
|
||||
|
@ -276,7 +261,6 @@ class EventsServerEventLogger:
|
|||
"category": "email_mask",
|
||||
"name": "created",
|
||||
"extra": {
|
||||
"client_id": str(client_id),
|
||||
"fxa_id": str(fxa_id),
|
||||
"platform": str(platform),
|
||||
"n_random_masks": str(n_random_masks),
|
||||
|
@ -288,7 +272,6 @@ class EventsServerEventLogger:
|
|||
"date_joined_premium": str(date_joined_premium),
|
||||
"has_extension": str(has_extension).lower(),
|
||||
"date_got_extension": str(date_got_extension),
|
||||
"mask_id": str(mask_id),
|
||||
"is_random_mask": str(is_random_mask).lower(),
|
||||
"created_by_api": str(created_by_api).lower(),
|
||||
"has_website": str(has_website).lower(),
|
||||
|
@ -300,7 +283,6 @@ class EventsServerEventLogger:
|
|||
self,
|
||||
user_agent: str,
|
||||
ip_address: str,
|
||||
client_id: str,
|
||||
fxa_id: str,
|
||||
platform: str,
|
||||
n_random_masks: int,
|
||||
|
@ -312,7 +294,6 @@ class EventsServerEventLogger:
|
|||
date_joined_premium: int,
|
||||
has_extension: bool,
|
||||
date_got_extension: int,
|
||||
mask_id: str,
|
||||
is_random_mask: bool,
|
||||
) -> None:
|
||||
"""
|
||||
|
@ -323,7 +304,6 @@ class EventsServerEventLogger:
|
|||
:param str user_agent: The user agent.
|
||||
:param str ip_address: The IP address. Will be used to decode Geo information
|
||||
and scrubbed at ingestion.
|
||||
:param str client_id: Firefox client ID
|
||||
:param str fxa_id: Mozilla accounts user ID
|
||||
:param str platform: Relay client platform
|
||||
:param int n_random_masks: Number of random masks
|
||||
|
@ -335,14 +315,12 @@ class EventsServerEventLogger:
|
|||
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
|
||||
:param bool has_extension: The user has the Relay Add-on
|
||||
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
|
||||
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
|
||||
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
|
||||
"""
|
||||
event = {
|
||||
"category": "email_mask",
|
||||
"name": "deleted",
|
||||
"extra": {
|
||||
"client_id": str(client_id),
|
||||
"fxa_id": str(fxa_id),
|
||||
"platform": str(platform),
|
||||
"n_random_masks": str(n_random_masks),
|
||||
|
@ -354,7 +332,6 @@ class EventsServerEventLogger:
|
|||
"date_joined_premium": str(date_joined_premium),
|
||||
"has_extension": str(has_extension).lower(),
|
||||
"date_got_extension": str(date_got_extension),
|
||||
"mask_id": str(mask_id),
|
||||
"is_random_mask": str(is_random_mask).lower(),
|
||||
},
|
||||
}
|
||||
|
@ -364,7 +341,6 @@ class EventsServerEventLogger:
|
|||
self,
|
||||
user_agent: str,
|
||||
ip_address: str,
|
||||
client_id: str,
|
||||
fxa_id: str,
|
||||
platform: str,
|
||||
n_random_masks: int,
|
||||
|
@ -376,7 +352,6 @@ class EventsServerEventLogger:
|
|||
date_joined_premium: int,
|
||||
has_extension: bool,
|
||||
date_got_extension: int,
|
||||
mask_id: str,
|
||||
is_random_mask: bool,
|
||||
) -> None:
|
||||
"""
|
||||
|
@ -387,7 +362,6 @@ class EventsServerEventLogger:
|
|||
:param str user_agent: The user agent.
|
||||
:param str ip_address: The IP address. Will be used to decode Geo information
|
||||
and scrubbed at ingestion.
|
||||
:param str client_id: Firefox client ID
|
||||
:param str fxa_id: Mozilla accounts user ID
|
||||
:param str platform: Relay client platform
|
||||
:param int n_random_masks: Number of random masks
|
||||
|
@ -399,14 +373,12 @@ class EventsServerEventLogger:
|
|||
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
|
||||
:param bool has_extension: The user has the Relay Add-on
|
||||
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
|
||||
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
|
||||
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
|
||||
"""
|
||||
event = {
|
||||
"category": "email_mask",
|
||||
"name": "label_updated",
|
||||
"extra": {
|
||||
"client_id": str(client_id),
|
||||
"fxa_id": str(fxa_id),
|
||||
"platform": str(platform),
|
||||
"n_random_masks": str(n_random_masks),
|
||||
|
@ -418,7 +390,6 @@ class EventsServerEventLogger:
|
|||
"date_joined_premium": str(date_joined_premium),
|
||||
"has_extension": str(has_extension).lower(),
|
||||
"date_got_extension": str(date_got_extension),
|
||||
"mask_id": str(mask_id),
|
||||
"is_random_mask": str(is_random_mask).lower(),
|
||||
},
|
||||
}
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче