Bug 1877105 - Update to fluent.migrate 0.13.0. r=flod,mach-reviewers

Differential Revision: https://phabricator.services.mozilla.com/D200183
This commit is contained in:
Eemeli Aro 2024-02-02 12:25:28 +00:00
Родитель 336371826e
Коммит c66f71a403
23 изменённых файлов: 588 добавлений и 445 удалений

Просмотреть файл

@ -1,20 +0,0 @@
fluent/__init__.py,sha256=jv2YF__bseklT3OWEzlqJ5qE24c4aWd5F4r0TTjOrWQ,65
fluent/migrate/__init__.py,sha256=TLqGTEnsuW9uy9WaUKTkeA3AvhyhnCslKMx4f_zV45o,136
fluent/migrate/_context.py,sha256=kLTbci2fgVBtAXy6sTujse6l9hhgkk62F7sddhD_jhk,12360
fluent/migrate/blame.py,sha256=Fh645Z1kOZHQN-5fBDdDUOJUf7B3LPf5Qzw-V6tdI8k,2624
fluent/migrate/changesets.py,sha256=aSoQ5cmoJkP7EbFwNCZ8CL6HVD2cheuOxhJMp8yyzjk,1523
fluent/migrate/context.py,sha256=Z8AokS8xhFJEUtlq_bHAIJCTPQZfXqiBuwbMy5l8iXg,6090
fluent/migrate/errors.py,sha256=s7JjvA2yCWogO-Ta4OV3z_Ab31-V_ha_3LGyxF46SRk,313
fluent/migrate/evaluator.py,sha256=NhLfdlSo1zKBNDS54sa-Xz67CjNYCnAYHRsBx2Gwj2Q,859
fluent/migrate/helpers.py,sha256=YH6TGE6vjyR7B-d6zJGS2wuz0j-P3SVA22LuplqyCSM,5072
fluent/migrate/merge.py,sha256=h7W0N3O9VcgZpWqL8JUpNM65p3sbH7Sm4chGZXpMZV0,1854
fluent/migrate/tool.py,sha256=g0ecdS2vLC71opcHB1k0AX1pD1Dj9xRRV9aLh8gEhmI,5599
fluent/migrate/transforms.py,sha256=CD5dFwAA9yG1g6nezna8HVVzP8Lx516bQ4cPB2jqkVU,20968
fluent/migrate/util.py,sha256=V_m009XtdTmPj8YxQP4BQ2949Nar7kLQZQcXXeDLPV0,2875
fluent/migrate/validator.py,sha256=1qA1Y_lYIpVmSEG_Nt95ZmMt3FZcoTDwSvDFNRZiwyc,11148
fluent.migrate-0.12.0.dist-info/LICENSE,sha256=yC8xgAJuBJQ0ThoBNcQnXzmBUYVh5xfk3rMDaXQ8gO4,559
fluent.migrate-0.12.0.dist-info/METADATA,sha256=E8HaaCMrwRrqSquzRcjGmUCOnYDtFMAhRK88F-qakso,2315
fluent.migrate-0.12.0.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
fluent.migrate-0.12.0.dist-info/entry_points.txt,sha256=q0mh-Wn0Z8L4j7xyyQhxLDw5yxAMDvSzMgm2uWjIBK8,109
fluent.migrate-0.12.0.dist-info/top_level.txt,sha256=E6y0EXb_8ntRq2470rEss448Ec6wP_-DI3zVECukrn0,7
fluent.migrate-0.12.0.dist-info/RECORD,,

Просмотреть файл

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: fluent.migrate
Version: 0.12.0
Version: 0.13.0
Summary: Toolchain to migrate legacy translation to Fluent.
Home-page: https://github.com/mozilla/fluent-migrate
Author: Mozilla
@ -11,14 +11,15 @@ Classifier: Development Status :: 3 - Alpha
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: POSIX
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Description-Content-Type: text/markdown
License-File: LICENSE
Requires-Dist: compare-locales (<10.0,>=9.0.1)
Requires-Dist: fluent.syntax (<0.20,>=0.19.0)
Requires-Dist: compare-locales <10.0,>=9.0.1
Requires-Dist: fluent.syntax <0.20,>=0.19.0
Provides-Extra: hg
Requires-Dist: python-hglib ; extra == 'hg'

Просмотреть файл

@ -0,0 +1,21 @@
fluent/__init__.py,sha256=ED6jHcYiuYpr_0vjGz0zx2lrrmJT9sDJCzIljoDfmlM,65
fluent/migrate/__init__.py,sha256=N1wyurR01T2hdcUhMuvdU9W413HQyw9gi0VJP6dTlm4,129
fluent/migrate/_context.py,sha256=2NCpsBmG7QzwF33cRcrhzavAW0DYsn5Zyvs1mTpg2YI,12890
fluent/migrate/blame.py,sha256=r3a9Zjc2SxAKSLcLQH4YmybbDpyRaYIaC3rEabmxqF8,2493
fluent/migrate/changesets.py,sha256=KKkNk4Ga1rq9QXH5mdb-iy6P86CbXUrUJNhMJQG1s9g,1777
fluent/migrate/context.py,sha256=ORIO46pTDKKq1z_mpM_E-nQiKzkjOVj8_jlAWiEHYQE,6307
fluent/migrate/errors.py,sha256=s7JjvA2yCWogO-Ta4OV3z_Ab31-V_ha_3LGyxF46SRk,313
fluent/migrate/evaluator.py,sha256=NhLfdlSo1zKBNDS54sa-Xz67CjNYCnAYHRsBx2Gwj2Q,859
fluent/migrate/helpers.py,sha256=8jFxbqMuMYOwGrmtdLv8p46QKh_kGEFAcyn2BNQC4Ps,5150
fluent/migrate/merge.py,sha256=J9DL-QUoBL3n9UTObhhETq47bCYSsHcW9F_ZIomrwak,1808
fluent/migrate/repo_client.py,sha256=hZvfD1P-ZOoM6u-aMQ4hNzBtMlcjevZERLfizjcgDWo,3541
fluent/migrate/tool.py,sha256=hGHq4N7gVxNllVrXQiqiCktzYAiTUMKQIDovAQXCMjE,5759
fluent/migrate/transforms.py,sha256=aCKY-fGJBv3e5rTBfLYKCo0urzHUjtHpejt0H5Vlors,20689
fluent/migrate/util.py,sha256=7n0pjmbvyJq7GrWV1gatDj7BYP7amY1S4UfugptWxwk,2853
fluent/migrate/validator.py,sha256=SpjTfaKvH8ZN7ZKuoJCEWIp3xXEyplzN6vF23piXSGE,11043
fluent.migrate-0.13.0.dist-info/LICENSE,sha256=yC8xgAJuBJQ0ThoBNcQnXzmBUYVh5xfk3rMDaXQ8gO4,559
fluent.migrate-0.13.0.dist-info/METADATA,sha256=-mqYB_hRmQqgLT9EyWfNO85wJvKaz3AvY5K1r-jcsZg,2363
fluent.migrate-0.13.0.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110
fluent.migrate-0.13.0.dist-info/entry_points.txt,sha256=q0mh-Wn0Z8L4j7xyyQhxLDw5yxAMDvSzMgm2uWjIBK8,109
fluent.migrate-0.13.0.dist-info/top_level.txt,sha256=E6y0EXb_8ntRq2470rEss448Ec6wP_-DI3zVECukrn0,7
fluent.migrate-0.13.0.dist-info/RECORD,,

Просмотреть файл

@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.40.0)
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

Просмотреть файл

@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__path__ = __import__("pkgutil").extend_path(__path__, __name__)

Просмотреть файл

@ -1,3 +1,8 @@
from .transforms import ( # noqa: F401
CONCAT, COPY, COPY_PATTERN, PLURALS, REPLACE, REPLACE_IN_TEXT
from .transforms import ( # noqa: F401
CONCAT,
COPY,
COPY_PATTERN,
PLURALS,
REPLACE,
REPLACE_IN_TEXT,
)

Просмотреть файл

@ -1,20 +1,23 @@
from __future__ import annotations
from typing import Dict, Optional, Set, Tuple, cast
import os
import codecs
from functools import partial
import logging
from itertools import zip_longest
from compare_locales.parser import getParser
from compare_locales.plurals import get_plural
import fluent.syntax.ast as FTL
from fluent.syntax.parser import FluentParser
from fluent.syntax.serializer import FluentSerializer
from compare_locales.parser import getParser
from compare_locales.plurals import get_plural
from .changesets import Changes
from .errors import UnreadableReferenceError
from .evaluator import Evaluator
from .merge import merge_resource
from .errors import (
UnreadableReferenceError,
)
from .transforms import Source
class InternalContext:
@ -23,9 +26,11 @@ class InternalContext:
For the public interface, see `context.MigrationContext`.
"""
def __init__(
self, lang, reference_dir, localization_dir, enforce_translated=False
):
dependencies: Dict[Tuple[str, str], Set[Tuple[str, Source]]] = {}
localization_dir: str
reference_dir: str
def __init__(self, lang, enforce_translated=False):
self.fluent_parser = FluentParser(with_spans=False)
self.fluent_serializer = FluentSerializer()
@ -33,11 +38,11 @@ class InternalContext:
# language. E.g. ('one', 'other') for English.
self.plural_categories = get_plural(lang)
if self.plural_categories is None:
logger = logging.getLogger('migrate')
logger = logging.getLogger("migrate")
logger.warning(
'Plural rule for "{}" is not defined in '
'compare-locales'.format(lang))
self.plural_categories = ('one', 'other')
f'Plural rule for "{lang}" is not defined in "compare-locales"'
)
self.plural_categories = ("one", "other")
self.enforce_translated = enforce_translated
# Parsed input resources stored by resource path.
@ -53,14 +58,14 @@ class InternalContext:
# AST hierarchy and evaluating nodes which are migration Transforms.
self.evaluator = Evaluator(self)
def read_ftl_resource(self, path):
def read_ftl_resource(self, path: str):
"""Read an FTL resource and parse it into an AST."""
f = codecs.open(path, 'r', 'utf8')
f = codecs.open(path, "r", "utf8")
try:
contents = f.read()
except UnicodeDecodeError as err:
logger = logging.getLogger('migrate')
logger.warning(f'Unable to read file {path}: {err}')
logger = logging.getLogger("migrate")
logger.warning(f"Unable to read file {path}: {err}")
raise err
finally:
f.close()
@ -75,24 +80,25 @@ class InternalContext:
]
if len(annots):
logger = logging.getLogger('migrate')
logger = logging.getLogger("migrate")
for annot in annots:
msg = annot.message
logger.warning(f'Syntax error in {path}: {msg}')
logger.warning(f"Syntax error in {path}: {msg}")
return ast
def read_legacy_resource(self, path):
def read_legacy_resource(self, path: str):
"""Read a legacy resource and parse it into a dict."""
parser = getParser(path)
parser.readFile(path)
# Transform the parsed result which is an iterator into a dict.
return {
entity.key: entity.val for entity in parser
entity.key: entity.val
for entity in parser
if entity.localized or self.enforce_translated
}
def read_reference_ftl(self, path):
def read_reference_ftl(self, path: str):
"""Read and parse a reference FTL file.
A missing resource file is a fatal error and will raise an
@ -102,15 +108,15 @@ class InternalContext:
try:
return self.read_ftl_resource(fullpath)
except OSError:
error_message = f'Missing reference file: {fullpath}'
logging.getLogger('migrate').error(error_message)
error_message = f"Missing reference file: {fullpath}"
logging.getLogger("migrate").error(error_message)
raise UnreadableReferenceError(error_message)
except UnicodeDecodeError as err:
error_message = f'Error reading file {fullpath}: {err}'
logging.getLogger('migrate').error(error_message)
error_message = f"Error reading file {fullpath}: {err}"
logging.getLogger("migrate").error(error_message)
raise UnreadableReferenceError(error_message)
def read_localization_ftl(self, path):
def read_localization_ftl(self, path: str):
"""Read and parse an existing localization FTL file.
Create a new FTL.Resource if the file doesn't exist or can't be
@ -120,20 +126,22 @@ class InternalContext:
try:
return self.read_ftl_resource(fullpath)
except OSError:
logger = logging.getLogger('migrate')
logger = logging.getLogger("migrate")
logger.info(
'Localization file {} does not exist and '
'it will be created'.format(path))
"Localization file {} does not exist and "
"it will be created".format(path)
)
return FTL.Resource()
except UnicodeDecodeError:
logger = logging.getLogger('migrate')
logger = logging.getLogger("migrate")
logger.warning(
'Localization file {} has broken encoding. '
'It will be re-created and some translations '
'may be lost'.format(path))
"Localization file {} has broken encoding. "
"It will be re-created and some translations "
"may be lost".format(path)
)
return FTL.Resource()
def maybe_add_localization(self, path):
def maybe_add_localization(self, path: str):
"""Add a localization resource to migrate translations from.
Uses a compare-locales parser to create a dict of (key, string value)
@ -142,17 +150,17 @@ class InternalContext:
"""
try:
fullpath = os.path.join(self.localization_dir, path)
if not fullpath.endswith('.ftl'):
if not fullpath.endswith(".ftl"):
collection = self.read_legacy_resource(fullpath)
else:
collection = self.read_ftl_resource(fullpath)
except OSError:
logger = logging.getLogger('migrate')
logger.warning(f'Missing localization file: {path}')
logger = logging.getLogger("migrate")
logger.warning(f"Missing localization file: {path}")
else:
self.localization_resources[path] = collection
def get_legacy_source(self, path, key):
def get_legacy_source(self, path: str, key: str):
"""Get an entity value from a localized legacy source.
Used by the `Source` transform.
@ -160,14 +168,14 @@ class InternalContext:
resource = self.localization_resources[path]
return resource.get(key, None)
def get_fluent_source_pattern(self, path, key):
def get_fluent_source_pattern(self, path: str, key: str):
"""Get a pattern from a localized Fluent source.
If the key contains a `.`, does an attribute lookup.
Used by the `COPY_PATTERN` transform.
"""
resource = self.localization_resources[path]
msg_key, _, attr_key = key.partition('.')
msg_key, _, attr_key = key.partition(".")
found = None
for entry in resource.body:
if isinstance(entry, (FTL.Message, FTL.Term)):
@ -190,20 +198,27 @@ class InternalContext:
in two FTL resources.
If the order or number of messages differ, the result is also False.
"""
def message_id(message):
"Return the message's identifer name for sorting purposes."
return message.id.name
messages1 = sorted(
(entry for entry in res1.body
if isinstance(entry, FTL.Message)
or isinstance(entry, FTL.Term)),
key=message_id)
(
entry
for entry in res1.body
if isinstance(entry, FTL.Message) or isinstance(entry, FTL.Term)
),
key=message_id,
)
messages2 = sorted(
(entry for entry in res2.body
if isinstance(entry, FTL.Message)
or isinstance(entry, FTL.Term)),
key=message_id)
(
entry
for entry in res2.body
if isinstance(entry, FTL.Message) or isinstance(entry, FTL.Term)
),
key=message_id,
)
for msg1, msg2 in zip_longest(messages1, messages2):
if msg1 is None or msg2 is None:
return False
@ -211,7 +226,11 @@ class InternalContext:
return False
return True
def merge_changeset(self, changeset=None, known_translations=None):
def merge_changeset(
self,
changeset: Optional[Changes] = None,
known_translations: Optional[Changes] = None,
):
"""Return a generator of FTL ASTs for the changeset.
The input data must be configured earlier using the `add_*` methods.
@ -233,7 +252,7 @@ class InternalContext:
changeset = {
(path, key)
for path, strings in self.localization_resources.items()
if not path.endswith('.ftl')
if not path.endswith(".ftl")
for key in strings.keys()
}
@ -244,7 +263,8 @@ class InternalContext:
current = self.target_resources[path]
transforms = self.transforms.get(path, [])
in_changeset = partial(
self.in_changeset, changeset, known_translations, path)
self.in_changeset, changeset, known_translations, path
)
# Merge legacy translations with the existing ones using the
# reference as a template.
@ -269,7 +289,9 @@ class InternalContext:
# The result for this path is a complete `FTL.Resource`.
yield path, snapshot
def in_changeset(self, changeset, known_translations, path, ident):
def in_changeset(
self, changeset: Changes, known_translations: Changes, path: str, ident
) -> bool:
"""Check if a message should be migrated in this changeset.
The message is identified by path and ident.
@ -304,11 +326,13 @@ class InternalContext:
# See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271
# We only return True if our current changeset touches
# the transform, and we have all of the dependencies.
active_deps = message_deps & changeset
active_deps = cast(bool, message_deps & changeset)
available_deps = message_deps & known_translations
return active_deps and message_deps == available_deps
def serialize_changeset(self, changeset, known_translations=None):
def serialize_changeset(
self, changeset: Changes, known_translations: Optional[Changes] = None
):
"""Return a dict of serialized FTLs for the changeset.
Given `changeset`, return a dict whose keys are resource paths and
@ -317,9 +341,7 @@ class InternalContext:
return {
path: self.fluent_serializer.serialize(snapshot)
for path, snapshot in self.merge_changeset(
changeset, known_translations
)
for path, snapshot in self.merge_changeset(changeset, known_translations)
}
def evaluate(self, node):

Просмотреть файл

@ -1,36 +1,37 @@
from __future__ import annotations
from typing import Dict, Iterable, Tuple, TypedDict, cast
import argparse
import json
import os
from os.path import join
from compare_locales.parser import getParser, Junk
from compare_locales.parser import Junk, getParser
from compare_locales.parser.fluent import FluentEntity
from compare_locales import mozpath
import hglib
from hglib.util import b, cmdbuilder
from .repo_client import RepoClient
BlameData = Dict[str, Dict[str, Tuple[int, float]]]
"File path -> message key -> [userid, timestamp]"
class BlameResult(TypedDict):
authors: list[str]
blame: BlameData
class Blame:
def __init__(self, client):
def __init__(self, client: RepoClient):
self.client = client
self.users = []
self.blame = {}
self.users: list[str] = []
self.blame: BlameData = {}
def attribution(self, file_paths):
args = cmdbuilder(
b('annotate'), *[b(p) for p in file_paths], template='json',
date=True, user=True, cwd=self.client.root())
blame_json = self.client.rawcommand(args)
file_blames = json.loads(blame_json)
for file_blame in file_blames:
self.handleFile(file_blame)
return {'authors': self.users,
'blame': self.blame}
def handleFile(self, file_blame):
path = mozpath.normsep(file_blame['path'])
def attribution(self, file_paths: Iterable[str]) -> BlameResult:
for file in file_paths:
blame = self.client.blame(file)
self.handleFile(file, blame)
return {"authors": self.users, "blame": self.blame}
def handleFile(self, path: str, file_blame: list[Tuple[str, int]]):
try:
parser = getParser(path)
except UserWarning:
@ -44,37 +45,33 @@ class Blame:
if isinstance(e, Junk):
continue
if e.val_span:
key_vals = [(e.key, e.val_span)]
key_vals: list[tuple[str, str]] = [(e.key, e.val_span)]
else:
key_vals = []
if isinstance(e, FluentEntity):
key_vals += [
(f'{e.key}.{attr.key}', attr.val_span)
(f"{e.key}.{attr.key}", cast(str, attr.val_span))
for attr in e.attributes
]
for key, (val_start, val_end) in key_vals:
entity_lines = file_blame['lines'][
(e.ctx.linecol(val_start)[0] - 1):e.ctx.linecol(val_end)[0]
entity_lines = file_blame[
(e.ctx.linecol(val_start)[0] - 1) : e.ctx.linecol(val_end)[0]
]
# ignore timezone
entity_lines.sort(key=lambda blame: -blame['date'][0])
line_blame = entity_lines[0]
user = line_blame['user']
timestamp = line_blame['date'][0] # ignore timezone
user, timestamp = max(entity_lines, key=lambda x: x[1])
if user not in self.users:
self.users.append(user)
userid = self.users.index(user)
self.blame[path][key] = [userid, timestamp]
self.blame[path][key] = (userid, timestamp)
def readFile(self, parser, path):
parser.readFile(os.path.join(self.client.root().decode('utf-8'), path))
def readFile(self, parser, path: str):
parser.readFile(join(self.client.root, path))
if __name__ == '__main__':
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('repo_path')
parser.add_argument('file_path', nargs='+')
parser.add_argument("repo_path")
parser.add_argument("file_path", nargs="+")
args = parser.parse_args()
blame = Blame(hglib.open(args.repo_path))
blame = Blame(RepoClient(args.repo_path))
attrib = blame.attribution(args.file_path)
print(json.dumps(attrib, indent=4, separators=(',', ': ')))
print(json.dumps(attrib, indent=4, separators=(",", ": ")))

Просмотреть файл

@ -1,12 +1,25 @@
from __future__ import annotations
from typing import Set, Tuple, TypedDict
import time
from .blame import BlameResult
def by_first_commit(item):
Changes = Set[Tuple[str, str]]
class Changeset(TypedDict):
author: str
first_commit: float
changes: Changes
def by_first_commit(item: Changeset):
"""Order two changesets by their first commit date."""
return item['first_commit']
return item["first_commit"]
def convert_blame_to_changesets(blame_json):
def convert_blame_to_changesets(blame_json: BlameResult) -> list[Changeset]:
"""Convert a blame dict into a list of changesets.
The blame information in `blame_json` should be a dict of the following
@ -38,19 +51,16 @@ def convert_blame_to_changesets(blame_json):
"""
now = time.time()
changesets = [
{
'author': author,
'first_commit': now,
'changes': set()
} for author in blame_json['authors']
changesets: list[Changeset] = [
{"author": author, "first_commit": now, "changes": set()}
for author in blame_json["authors"]
]
for path, keys_info in blame_json['blame'].items():
for path, keys_info in blame_json["blame"].items():
for key, (author_index, timestamp) in keys_info.items():
changeset = changesets[author_index]
changeset['changes'].add((path, key))
if timestamp < changeset['first_commit']:
changeset['first_commit'] = timestamp
changeset["changes"].add((path, key))
if timestamp < changeset["first_commit"]:
changeset["first_commit"] = timestamp
return sorted(changesets, key=by_first_commit)

Просмотреть файл

@ -1,3 +1,6 @@
from __future__ import annotations
from typing import List, Set, Tuple, cast
import logging
import fluent.syntax.ast as FTL
@ -13,9 +16,9 @@ from ._context import InternalContext
__all__ = [
'EmptyLocalizationError',
'UnreadableReferenceError',
'MigrationContext',
"EmptyLocalizationError",
"UnreadableReferenceError",
"MigrationContext",
]
@ -48,23 +51,31 @@ class MigrationContext(InternalContext):
"""
def __init__(
self, locale, reference_dir, localization_dir, enforce_translated=False
self,
locale: str,
reference_dir: str,
localization_dir: str,
enforce_translated=False,
):
super().__init__(
locale, reference_dir, localization_dir,
enforce_translated=enforce_translated
locale,
enforce_translated=enforce_translated,
)
self.locale = locale
# Paths to directories with input data, relative to CWD.
self.reference_dir = reference_dir
self.localization_dir = localization_dir
# A dict whose keys are `(path, key)` tuples corresponding to target
# FTL translations, and values are sets of `(path, key)` tuples
# corresponding to localized entities which will be migrated.
self.dependencies = {}
"""
A dict whose keys are `(path, key)` tuples corresponding to target
FTL translations, and values are sets of `(path, key)` tuples
corresponding to localized entities which will be migrated.
"""
def add_transforms(self, target, reference, transforms):
def add_transforms(
self, target: str, reference: str, transforms: List[FTL.Message | FTL.Term]
):
"""Define transforms for target using reference as template.
`target` is a path of the destination FTL file relative to the
@ -82,6 +93,7 @@ class MigrationContext(InternalContext):
For transforms that merely copy legacy messages or Fluent patterns,
using `fluent.migrate.helpers.transforms_from` is recommended.
"""
def get_sources(acc, cur):
if isinstance(cur, Source):
acc.add((cur.path, cur.key))
@ -93,18 +105,16 @@ class MigrationContext(InternalContext):
reference_ast = self.reference_resources.get(target)
if reference_ast is None:
reference_ast = FTL.Resource()
reference_ast.body.extend(
skeleton(transform) for transform in transforms
)
reference_ast.body.extend(skeleton(transform) for transform in transforms)
else:
reference_ast = self.read_reference_ftl(reference)
self.reference_resources[target] = reference_ast
for node in transforms:
ident = node.id.name
ident = cast(str, node.id.name)
# Scan `node` for `Source` nodes and collect the information they
# store into a set of dependencies.
dependencies = fold(get_sources, node, set())
dependencies = cast(Set[Tuple[str, Source]], fold(get_sources, node, set()))
# Set these sources as dependencies for the current transform.
self.dependencies[(target, ident)] = dependencies
@ -114,10 +124,12 @@ class MigrationContext(InternalContext):
if self.reference_dir is None:
continue
if get_message(reference_ast.body, ident) is None:
logger = logging.getLogger('migrate')
logger = logging.getLogger("migrate")
logger.warning(
'{} "{}" was not found in {}'.format(
type(node).__name__, ident, reference))
type(node).__name__, ident, reference
)
)
# Keep track of localization resource paths which were defined as
# sources in the transforms.
@ -134,8 +146,8 @@ class MigrationContext(InternalContext):
# However, if all legacy resources are missing, bail out early. There
# are no translations to migrate. We'd also get errors in hg annotate.
if len(expected_paths) > 0 and len(self.localization_resources) == 0:
error_message = 'No localization files were found'
logging.getLogger('migrate').error(error_message)
error_message = "No localization files were found"
logging.getLogger("migrate").error(error_message)
raise EmptyLocalizationError(error_message)
# Add the current transforms to any other transforms added earlier for

Просмотреть файл

@ -7,6 +7,8 @@ They take a string argument and immediately return a corresponding AST node.
(As opposed to Transforms which are AST nodes on their own and only return the
migrated AST nodes when they are evaluated by a MigrationContext.) """
from __future__ import annotations
from typing import List
from fluent.syntax import FluentParser, ast as FTL
from fluent.syntax.visitor import Transformer
@ -17,9 +19,7 @@ from .errors import NotSupportedError, InvalidTransformError
def VARIABLE_REFERENCE(name):
"""Create an ExternalArgument expression."""
return FTL.VariableReference(
id=FTL.Identifier(name)
)
return FTL.VariableReference(id=FTL.Identifier(name))
def MESSAGE_REFERENCE(name):
@ -28,8 +28,8 @@ def MESSAGE_REFERENCE(name):
If the passed name contains a `.`, we're generating
a message reference with an attribute.
"""
if '.' in name:
name, attribute = name.split('.')
if "." in name:
name, attribute = name.split(".")
attribute = FTL.Identifier(attribute)
else:
attribute = None
@ -43,9 +43,7 @@ def MESSAGE_REFERENCE(name):
def TERM_REFERENCE(name):
"""Create a TermReference expression."""
return FTL.TermReference(
id=FTL.Identifier(name)
)
return FTL.TermReference(id=FTL.Identifier(name))
class IntoTranforms(Transformer):
@ -59,26 +57,29 @@ class IntoTranforms(Transformer):
anno = node.annotations[0]
raise InvalidTransformError(
"Transform contains parse error: {}, at {}".format(
anno.message, anno.span.start))
anno.message, anno.span.start
)
)
def visit_FunctionReference(self, node):
name = node.id.name
if name in self.IMPLICIT_TRANSFORMS:
raise NotSupportedError(
"{} may not be used with transforms_from(). It runs "
"implicitly on all Patterns anyways.".format(name))
"implicitly on all Patterns anyways.".format(name)
)
if name in self.FORBIDDEN_TRANSFORMS:
raise NotSupportedError(
"{} may not be used with transforms_from(). It requires "
"additional logic in Python code.".format(name))
if name in ('COPY', 'COPY_PATTERN'):
args = (
self.into_argument(arg) for arg in node.arguments.positional
"additional logic in Python code.".format(name)
)
if name in ("COPY", "COPY_PATTERN"):
args = (self.into_argument(arg) for arg in node.arguments.positional)
kwargs = {
arg.name.name: self.into_argument(arg.value)
for arg in node.arguments.named}
if name == 'COPY':
for arg in node.arguments.named
}
if name == "COPY":
return COPY(*args, **kwargs)
return COPY_PATTERN(*args, **kwargs)
return self.generic_visit(node)
@ -117,15 +118,15 @@ class IntoTranforms(Transformer):
return self.substitutions[node.id.name]
except KeyError:
raise InvalidTransformError(
"Unknown substitution in COPY: {}".format(
node.id.name))
"Unknown substitution in COPY: {}".format(node.id.name)
)
else:
raise InvalidTransformError(
"Invalid argument passed to COPY: {}".format(
type(node).__name__))
"Invalid argument passed to COPY: {}".format(type(node).__name__)
)
def transforms_from(ftl, **substitutions):
def transforms_from(ftl, **substitutions) -> List[FTL.Message | FTL.Term]:
"""Parse FTL code into a list of Message nodes with Transforms.
The FTL may use a fabricated COPY function inside of placeables which

Просмотреть файл

@ -15,11 +15,7 @@ def merge_resource(ctx, reference, current, transforms, in_changeset):
"""
def merge_body(body):
return [
entry
for entry in map(merge_entry, body)
if entry is not None
]
return [entry for entry in map(merge_entry, body) if entry is not None]
def merge_entry(entry):
# All standalone comments will be merged.

106
third_party/python/fluent.migrate/fluent/migrate/repo_client.py поставляемый Normal file
Просмотреть файл

@ -0,0 +1,106 @@
from __future__ import annotations
from typing import Tuple
import json
from subprocess import run
from os.path import isdir, join
import hglib
def git(root: str, *args: str) -> str:
"""
Wrapper for calling command-line git in the `root` directory.
Raises an exception on any error, including a non-0 return code.
Returns the command's stdout as a string.
"""
git = ["git"]
git.extend(args)
proc = run(git, capture_output=True, cwd=root, encoding="utf-8")
if proc.returncode != 0:
raise Exception(proc.stderr or f"git command failed: {args}")
return proc.stdout
class RepoClient:
def __init__(self, root: str):
self.root = root
if isdir(join(root, ".hg")):
self.hgclient = hglib.open(root, "utf-8")
elif isdir(join(root, ".git")):
self.hgclient = None
stdout = git(self.root, "rev-parse", "--is-inside-work-tree")
if stdout != "true\n":
raise Exception("git rev-parse failed")
else:
raise Exception(f"Unsupported repository: {root}")
def close(self):
if self.hgclient:
self.hgclient.close()
def blame(self, file: str) -> list[Tuple[str, int]]:
"Return a list of (author, time) tuples for each line in `file`."
if self.hgclient:
args = hglib.util.cmdbuilder(
b"annotate",
file.encode("latin-1"),
template="json",
date=True,
user=True,
cwd=self.root,
)
blame_json = self.hgclient.rawcommand(args)
return [
(line["user"], int(line["date"][0]))
for line in json.loads(blame_json)[0]["lines"]
]
else:
lines: list[Tuple[str, int]] = []
user = ""
time = 0
stdout = git(self.root, "blame", "--porcelain", file)
for line in stdout.splitlines():
if line.startswith("author "):
user = line[7:]
elif line.startswith("author-mail "):
user += line[11:] # includes leading space
elif line.startswith("author-time "):
time = int(line[12:])
elif line.startswith("\t"):
lines.append((user, time))
return lines
def commit(self, message: str, author: str):
"Add and commit all work tree files"
if self.hgclient:
self.hgclient.commit(message, user=author.encode("utf-8"), addremove=True)
else:
git(self.root, "add", ".")
git(self.root, "commit", f"--author={author}", f"--message={message}")
def head(self) -> str:
"Identifier for the most recent commit"
if self.hgclient:
return self.hgclient.tip().node.decode("utf-8")
else:
return git(self.root, "rev-parse", "HEAD").strip()
def log(self, from_commit: str, to_commit: str) -> list[str]:
if self.hgclient:
return [
rev.desc.decode("utf-8")
for rev in self.hgclient.log(f"{to_commit} % {from_commit}")
]
else:
return (
git(
self.root,
"log",
"--pretty=format:%s",
f"{from_commit}..{to_commit}",
)
.strip()
.splitlines()
)

Просмотреть файл

@ -1,16 +1,19 @@
import os
import logging
from __future__ import annotations
from types import ModuleType
from typing import Iterable, cast
import argparse
from contextlib import contextmanager
import importlib
import logging
import os
import sys
import hglib
from fluent.migrate.blame import Blame
from fluent.migrate.changesets import Changes, convert_blame_to_changesets
from fluent.migrate.context import MigrationContext
from fluent.migrate.errors import MigrationError
from fluent.migrate.changesets import convert_blame_to_changesets
from fluent.migrate.blame import Blame
from fluent.migrate.repo_client import RepoClient
@contextmanager
@ -22,7 +25,9 @@ def dont_write_bytecode():
class Migrator:
def __init__(self, locale, reference_dir, localization_dir, dry_run):
def __init__(
self, locale: str, reference_dir: str, localization_dir: str, dry_run: bool
):
self.locale = locale
self.reference_dir = reference_dir
self.localization_dir = localization_dir
@ -32,7 +37,7 @@ class Migrator:
@property
def client(self):
if self._client is None:
self._client = hglib.open(self.localization_dir, 'utf-8')
self._client = RepoClient(self.localization_dir)
return self._client
def close(self):
@ -40,26 +45,26 @@ class Migrator:
if self._client is not None:
self._client.close()
def run(self, migration):
print('\nRunning migration {} for {}'.format(
migration.__name__, self.locale))
def run(self, migration: ModuleType):
print("\nRunning migration {} for {}".format(migration.__name__, self.locale))
# For each migration create a new context.
ctx = MigrationContext(
self.locale, self.reference_dir, self.localization_dir
)
ctx = MigrationContext(self.locale, self.reference_dir, self.localization_dir)
try:
# Add the migration spec.
migration.migrate(ctx)
except MigrationError as e:
print(' Skipping migration {} for {}:\n {}'.format(
migration.__name__, self.locale, e))
print(
" Skipping migration {} for {}:\n {}".format(
migration.__name__, self.locale, e
)
)
return
# Keep track of how many changesets we're committing.
index = 0
description_template = migration.migrate.__doc__
description_template = cast(str, migration.migrate.__doc__)
# Annotate localization files used as sources by this migration
# to preserve attribution of translations.
@ -70,59 +75,58 @@ class Migrator:
for changeset in changesets:
snapshot = self.snapshot(
ctx, changeset['changes'], known_legacy_translations
ctx, changeset["changes"], known_legacy_translations
)
if not snapshot:
continue
self.serialize_changeset(snapshot)
index += 1
self.commit_changeset(
description_template, changeset['author'], index
)
self.commit_changeset(description_template, changeset["author"], index)
def snapshot(self, ctx, changes_in_changeset, known_legacy_translations):
'''Run the migration for the changeset, with the set of
def snapshot(
self,
ctx: MigrationContext,
changes_in_changeset: Changes,
known_legacy_translations: Changes,
):
"""Run the migration for the changeset, with the set of
this and all prior legacy translations.
'''
"""
known_legacy_translations.update(changes_in_changeset)
return ctx.serialize_changeset(
changes_in_changeset,
known_legacy_translations
)
return ctx.serialize_changeset(changes_in_changeset, known_legacy_translations)
def serialize_changeset(self, snapshot):
'''Write serialized FTL files to disk.'''
"""Write serialized FTL files to disk."""
for path, content in snapshot.items():
fullpath = os.path.join(self.localization_dir, path)
print(f' Writing to {fullpath}')
print(f" Writing to {fullpath}")
if not self.dry_run:
fulldir = os.path.dirname(fullpath)
if not os.path.isdir(fulldir):
os.makedirs(fulldir)
with open(fullpath, 'wb') as f:
f.write(content.encode('utf8'))
with open(fullpath, "wb") as f:
f.write(content.encode("utf8"))
f.close()
def commit_changeset(
self, description_template, author, index
):
message = description_template.format(
index=index,
author=author
)
def commit_changeset(self, description_template: str, author: str, index: int):
message = description_template.format(index=index, author=author)
print(f' Committing changeset: {message}')
print(f" Committing changeset: {message}")
if self.dry_run:
return
try:
self.client.commit(
message, user=author.encode('utf-8'), addremove=True
)
except hglib.error.CommandError as err:
print(f' WARNING: hg commit failed ({err})')
self.client.commit(message, author)
except Exception as err:
print(f" WARNING: commit failed ({err})")
def main(locale, reference_dir, localization_dir, migrations, dry_run):
def main(
locale,
reference_dir: str,
localization_dir: str,
migrations: Iterable[ModuleType],
dry_run: bool,
):
"""Run migrations and commit files with the result."""
migrator = Migrator(locale, reference_dir, localization_dir, dry_run)
@ -133,32 +137,31 @@ def main(locale, reference_dir, localization_dir, migrations, dry_run):
def cli():
parser = argparse.ArgumentParser(
description='Migrate translations to FTL.'
parser = argparse.ArgumentParser(description="Migrate translations to FTL.")
parser.add_argument(
"migrations",
metavar="MIGRATION",
type=str,
nargs="+",
help="migrations to run (Python modules)",
)
parser.add_argument(
'migrations', metavar='MIGRATION', type=str, nargs='+',
help='migrations to run (Python modules)'
"--locale", "--lang", type=str, help="target locale code (--lang is deprecated)"
)
parser.add_argument(
'--locale', '--lang', type=str,
help='target locale code (--lang is deprecated)'
"--reference-dir", type=str, help="directory with reference FTL files"
)
parser.add_argument(
'--reference-dir', type=str,
help='directory with reference FTL files'
"--localization-dir", type=str, help="directory for localization files"
)
parser.add_argument(
'--localization-dir', type=str,
help='directory for localization files'
)
parser.add_argument(
'--dry-run', action='store_true',
help='do not write to disk nor commit any changes'
"--dry-run",
action="store_true",
help="do not write to disk nor commit any changes",
)
parser.set_defaults(dry_run=False)
logger = logging.getLogger('migrate')
logger = logging.getLogger("migrate")
logger.setLevel(logging.INFO)
args = parser.parse_args()
@ -173,9 +176,9 @@ def cli():
reference_dir=args.reference_dir,
localization_dir=args.localization_dir,
migrations=migrations,
dry_run=args.dry_run
dry_run=args.dry_run,
)
if __name__ == '__main__':
if __name__ == "__main__":
cli()

Просмотреть файл

@ -69,7 +69,7 @@ from .errors import NotSupportedError
def chain_elements(elements):
'''Flatten a list of FTL nodes into an iterator over PatternElements.'''
"""Flatten a list of FTL nodes into an iterator over PatternElements."""
for element in elements:
if isinstance(element, FTL.Pattern):
# PY3 yield from element.elements
@ -79,22 +79,20 @@ def chain_elements(elements):
elif isinstance(element, FTL.Expression):
yield FTL.Placeable(element)
else:
raise RuntimeError(
'Expected Pattern, PatternElement or Expression')
raise RuntimeError("Expected Pattern, PatternElement or Expression")
re_leading_ws = re.compile(
r'\A(?:(?P<whitespace> +)(?P<text>.*?)|(?P<block_text>\n.*?))\Z',
r"\A(?:(?P<whitespace> +)(?P<text>.*?)|(?P<block_text>\n.*?))\Z",
re.S,
)
re_trailing_ws = re.compile(
r'\A(?:(?P<text>.*?)(?P<whitespace> +)|(?P<block_text>.*\n))\Z',
re.S
r"\A(?:(?P<text>.*?)(?P<whitespace> +)|(?P<block_text>.*\n))\Z", re.S
)
def extract_whitespace(regex, element):
'''Extract leading or trailing whitespace from a TextElement.
"""Extract leading or trailing whitespace from a TextElement.
Return a tuple of (Placeable, TextElement) in which the Placeable
encodes the extracted whitespace as a StringLiteral and the
@ -102,18 +100,18 @@ def extract_whitespace(regex, element):
Placeable with the extracted whitespace is always returned first.
If the element starts or ends with a newline, add an empty
StringLiteral.
'''
"""
match = re.search(regex, element.value)
if match:
# If white-space is None, we're a newline. Add an
# empty { "" }
whitespace = match.group('whitespace') or ''
whitespace = match.group("whitespace") or ""
placeable = FTL.Placeable(FTL.StringLiteral(whitespace))
if whitespace == element.value:
return placeable, None
else:
# Either text or block_text matched the rest.
text = match.group('text') or match.group('block_text')
text = match.group("text") or match.group("block_text")
return placeable, FTL.TextElement(text)
else:
return None, element
@ -137,9 +135,11 @@ class Transform(FTL.BaseNode):
for element in chain_elements(elements):
if isinstance(element, FTL.TextElement):
text_content = element.value
elif isinstance(element, FTL.Placeable) \
and isinstance(element.expression, FTL.StringLiteral) \
and re.match(r'^ *$', element.expression.value):
elif (
isinstance(element, FTL.Placeable)
and isinstance(element.expression, FTL.StringLiteral)
and re.match(r"^ *$", element.expression.value)
):
text_content = element.expression.value
else:
# The element does not contain text content which should be
@ -161,7 +161,7 @@ class Transform(FTL.BaseNode):
# Store empty values explicitly as {""}.
if len(normalized) == 0:
empty = FTL.Placeable(FTL.StringLiteral(''))
empty = FTL.Placeable(FTL.StringLiteral(""))
return FTL.Pattern([empty])
# Extract explicit leading whitespace into a StringLiteral.
@ -174,11 +174,7 @@ class Transform(FTL.BaseNode):
ws, text = extract_whitespace(re_trailing_ws, normalized[-1])
normalized[-1:] = [text, ws]
return FTL.Pattern([
element
for element in normalized
if element is not None
])
return FTL.Pattern([element for element in normalized if element is not None])
class Source(Transform):
@ -187,6 +183,7 @@ class Source(Transform):
The contract is that the first argument is the source path, and the
second is a key representing legacy string IDs, or Fluent id.attr.
"""
def __init__(self, path, key):
self.path = path
self.key = key
@ -197,16 +194,16 @@ class FluentSource(Source):
When evaluated, it clones the Pattern of the parsed source.
"""
def __init__(self, path, key):
if not path.endswith('.ftl'):
if not path.endswith(".ftl"):
raise NotSupportedError(
'Please use COPY to migrate from legacy files '
'({})'.format(path)
"Please use COPY to migrate from legacy files " "({})".format(path)
)
if key[0] == '-' and '.' in key:
if key[0] == "-" and "." in key:
raise NotSupportedError(
'Cannot migrate from Term Attributes, as they are'
'locale-dependent ({})'.format(path)
"Cannot migrate from Term Attributes, as they are"
"locale-dependent ({})".format(path)
)
super().__init__(path, key)
@ -222,6 +219,7 @@ class COPY_PATTERN(FluentSource):
Term ID. Accessing Term attributes is not supported, as they're internal
to the localization.
"""
pass
@ -231,6 +229,7 @@ class TransformPattern(FluentSource, Transformer):
Implement visit_* methods of the Transformer pattern to do the
actual modifications.
"""
def __call__(self, ctx):
pattern = super().__call__(ctx)
return self.visit(pattern)
@ -275,10 +274,11 @@ class LegacySource(Source):
"""
def __init__(self, path, key, trim=None):
if path.endswith('.ftl'):
if path.endswith(".ftl"):
raise NotSupportedError(
'Please use COPY_PATTERN to migrate from Fluent files '
'({})'.format(path))
"Please use COPY_PATTERN to migrate from Fluent files "
"({})".format(path)
)
super().__init__(path, key)
self.trim = trim
@ -289,11 +289,11 @@ class LegacySource(Source):
@staticmethod
def trim_text(text):
# strip leading white-space from each line
text = re.sub('^[ \t]+', '', text, flags=re.M)
text = re.sub("^[ \t]+", "", text, flags=re.M)
# strip trailing white-space from each line
text = re.sub('[ \t]+$', '', text, flags=re.M)
text = re.sub("[ \t]+$", "", text, flags=re.M)
# strip leading and trailing empty lines
text = text.strip('\r\n')
text = text.strip("\r\n")
return text
def __call__(self, ctx):
@ -312,11 +312,11 @@ class COPY(LegacySource):
PRINTF = re.compile(
r'%(?P<good>%|'
r'(?:(?P<number>[1-9][0-9]*)\$)?'
r'(?P<width>\*|[0-9]+)?'
r'(?P<prec>\.(?:\*|[0-9]+)?)?'
r'(?P<spec>[duxXosScpfg]))'
r"%(?P<good>%|"
r"(?:(?P<number>[1-9][0-9]*)\$)?"
r"(?P<width>\*|[0-9]+)?"
r"(?P<prec>\.(?:\*|[0-9]+)?)?"
r"(?P<spec>[duxXosScpfg]))"
)
@ -337,13 +337,13 @@ def normalize_printf(text):
next_number = number()
def normalized(match):
if match.group('good') == '%':
return '%'
hidden = match.group('width') == '0'
if match.group('number'):
return '' if hidden else match.group()
if match.group("good") == "%":
return "%"
hidden = match.group("width") == "0"
if match.group("number"):
return "" if hidden else match.group()
num = next(next_number)
return '' if hidden else '%{}${}'.format(num, match.group('spec'))
return "" if hidden else "%{}${}".format(num, match.group("spec"))
return PRINTF.sub(normalized, text)
@ -383,8 +383,7 @@ class REPLACE_IN_TEXT(Transform):
# the translation.
replacements = (
(key, ctx.evaluate(self.replacements[key]))
for index, key
in sorted(keys_indexed.items(), key=lambda x: x[0])
for index, key in sorted(keys_indexed.items(), key=lambda x: x[0])
)
# A list of PatternElements built from the legacy translation and the
@ -413,16 +412,14 @@ class REPLACE(LegacySource):
replaced with FTL placeables using the `REPLACE_IN_TEXT` transform.
"""
def __init__(
self, path, key, replacements, **kwargs
):
def __init__(self, path, key, replacements, **kwargs):
# We default normalize_printf to False except for .properties files.
# We still allow the caller to override the default value.
normalize_printf = False
if 'normalize_printf' in kwargs:
normalize_printf = kwargs['normalize_printf']
del kwargs['normalize_printf']
elif path.endswith('.properties'):
if "normalize_printf" in kwargs:
normalize_printf = kwargs["normalize_printf"]
del kwargs["normalize_printf"]
elif path.endswith(".properties"):
normalize_printf = True
super().__init__(path, key, **kwargs)
@ -432,8 +429,7 @@ class REPLACE(LegacySource):
def __call__(self, ctx):
element = super().__call__(ctx)
return REPLACE_IN_TEXT(
element, self.replacements,
normalize_printf=self.normalize_printf
element, self.replacements, normalize_printf=self.normalize_printf
)(ctx)
@ -447,10 +443,10 @@ class PLURALS(LegacySource):
return an `FTL.Node` or a `Transform`. By default, the `foreach` function
creates a valid Pattern from the TextElement passed into it.
"""
DEFAULT_ORDER = ('zero', 'one', 'two', 'few', 'many', 'other')
def __init__(self, path, key, selector, foreach=Transform.pattern_of,
**kwargs):
DEFAULT_ORDER = ("zero", "one", "two", "few", "many", "other")
def __init__(self, path, key, selector, foreach=Transform.pattern_of, **kwargs):
super().__init__(path, key, **kwargs)
self.selector = selector
self.foreach = foreach
@ -459,27 +455,18 @@ class PLURALS(LegacySource):
element = super().__call__(ctx)
selector = ctx.evaluate(self.selector)
keys = ctx.plural_categories
forms = [
FTL.TextElement(part.strip())
for part in element.value.split(';')
]
forms = [FTL.TextElement(part.strip()) for part in element.value.split(";")]
# The default CLDR form should be the last we have in DEFAULT_ORDER,
# usually `other`, but in some cases `many`. If we don't have a variant
# for that, we'll append one, using the, in CLDR order, last existing
# variant in the legacy translation. That may or may not be the last
# variant.
default_key = [
key for key in reversed(self.DEFAULT_ORDER) if key in keys
][0]
default_key = [key for key in reversed(self.DEFAULT_ORDER) if key in keys][0]
# Match keys to legacy forms in the order they are defined in Gecko's
# PluralForm.jsm. Filter out empty forms.
pairs = [
(key, var)
for key, var in zip(keys, forms)
if var.value
]
pairs = [(key, var) for key, var in zip(keys, forms) if var.value]
# A special case for legacy translations which don't define any
# plural forms.
@ -506,17 +493,12 @@ class PLURALS(LegacySource):
# variant. Then evaluate it to a migrated FTL node.
value = ctx.evaluate(self.foreach(form))
return FTL.Variant(
key=FTL.Identifier(key),
value=value,
default=key == default_key
key=FTL.Identifier(key), value=value, default=key == default_key
)
select = FTL.SelectExpression(
selector=selector,
variants=[
createVariant(key, form)
for key, form in pairs
]
variants=[createVariant(key, form) for key, form in pairs],
)
return Transform.pattern_of(select)
@ -561,7 +543,7 @@ class CONCAT(Transform):
# migration specs and as elements=[]. The latter is used by
# FTL.BaseNode.traverse when it recreates the traversed node using its
# attributes as kwargs.
self.elements = list(kwargs.get('elements', elements))
self.elements = list(kwargs.get("elements", elements))
# We want to make CONCAT(COPY()) equivalent to COPY() so that it's
# always safe (no-op) to wrap transforms in a CONCAT. This is used by

Просмотреть файл

@ -16,7 +16,7 @@ def parse(Parser, string):
# Parse the string into the internal Context.
parser = Parser()
# compare-locales expects ASCII strings.
parser.readContents(string.encode('utf8'))
parser.readContents(string.encode("utf8"))
# Transform the parsed result which is an iterator into a dict.
return {ent.key: ent for ent in parser}
@ -35,10 +35,7 @@ def ftl_pattern_to_json(code):
def to_json(merged_iter):
return {
path: resource.to_json()
for path, resource in merged_iter
}
return {path: resource.to_json() for path, resource in merged_iter}
LOCALIZABLE_ENTRIES = (FTL.Message, FTL.Term)
@ -79,7 +76,7 @@ def ftl(code):
"""
# The code might be triple-quoted.
code = code.lstrip('\n')
code = code.lstrip("\n")
return textwrap.dedent(code)

Просмотреть файл

@ -19,12 +19,14 @@ class BadContextAPIException(Exception):
def process_assign(node, context):
if isinstance(node.value, ast.Str):
val = node.value.s
if isinstance(node.value, ast.Constant):
val = node.value.value
elif isinstance(node.value, ast.Name):
val = context.get(node.value.id)
elif isinstance(node.value, ast.Call):
val = node.value
else:
val = None
if val is None:
return
for target in node.targets:
@ -55,20 +57,15 @@ class Validator:
migrate_func = None
global_assigns = {}
for top_level in ast.iter_child_nodes(self.ast):
if (
isinstance(top_level, ast.FunctionDef)
and top_level.name == 'migrate'
):
if isinstance(top_level, ast.FunctionDef) and top_level.name == "migrate":
if migrate_func:
raise MigrateNotFoundException(
'Duplicate definition of migrate'
)
raise MigrateNotFoundException("Duplicate definition of migrate")
migrate_func = top_level
details = self.inspect_migrate(migrate_func, global_assigns)
if isinstance(top_level, ast.Assign):
process_assign(top_level, global_assigns)
if isinstance(top_level, (ast.Import, ast.ImportFrom)):
if 'module' in top_level._fields:
if "module" in top_level._fields:
module = top_level.module
else:
module = None
@ -76,26 +73,19 @@ class Validator:
asname = alias.asname or alias.name
dotted = alias.name
if module:
dotted = f'{module}.{dotted}'
dotted = f"{module}.{dotted}"
global_assigns[asname] = dotted
if not migrate_func:
raise MigrateNotFoundException(
'migrate function not found'
)
raise MigrateNotFoundException("migrate function not found")
return details
def inspect_migrate(self, migrate_func, global_assigns):
if (
len(migrate_func.args.args) != 1 or
any(
getattr(migrate_func.args, arg_field)
for arg_field in migrate_func.args._fields
if arg_field != 'args'
)
if len(migrate_func.args.args) != 1 or any(
getattr(migrate_func.args, arg_field)
for arg_field in migrate_func.args._fields
if arg_field != "args"
):
raise MigrateNotFoundException(
'migrate takes only one positional argument'
)
raise MigrateNotFoundException("migrate takes only one positional argument")
arg = migrate_func.args.args[0]
if isinstance(arg, ast.Name):
ctx_var = arg.id # python 2
@ -104,8 +94,8 @@ class Validator:
visitor = MigrateAnalyzer(ctx_var, global_assigns)
visitor.visit(migrate_func)
return {
'references': visitor.references,
'issues': visitor.issues,
"references": visitor.references,
"issues": visitor.issues,
}
@ -116,7 +106,7 @@ def full_name(node, global_assigns):
node = node.value
if isinstance(node, ast.Name):
leafs.append(global_assigns.get(node.id, node.id))
return '.'.join(reversed(leafs))
return ".".join(reversed(leafs))
PATH_TYPES = (str,) + (ast.Call,)
@ -144,11 +134,11 @@ class MigrateAnalyzer(ast.NodeVisitor):
def visit_Attribute(self, node):
if isinstance(node.value, ast.Name) and node.value.id == self.ctx_var:
if node.attr not in (
'add_transforms',
'locale',
"add_transforms",
"locale",
):
raise BadContextAPIException(
'Unexpected attribute access on {}.{}'.format(
"Unexpected attribute access on {}.{}".format(
self.ctx_var, node.attr
)
)
@ -156,53 +146,55 @@ class MigrateAnalyzer(ast.NodeVisitor):
def visit_Call(self, node):
if (
isinstance(node.func, ast.Attribute) and
isinstance(node.func.value, ast.Name) and
node.func.value.id == self.ctx_var
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Name)
and node.func.value.id == self.ctx_var
):
return self.call_ctx(node)
dotted = full_name(node.func, self.global_assigns)
if dotted == 'fluent.migrate.helpers.transforms_from':
if dotted == "fluent.migrate.helpers.transforms_from":
return self.call_helpers_transforms_from(node)
if dotted.startswith('fluent.migrate.'):
if dotted.startswith("fluent.migrate."):
return self.call_transform(node, dotted)
self.generic_visit(node)
def call_ctx(self, node):
if node.func.attr == 'add_transforms':
if node.func.attr == "add_transforms":
return self.call_add_transforms(node)
raise BadContextAPIException(
'Unexpected call on {}.{}'.format(
self.ctx_var, node.func.attr
)
"Unexpected call on {}.{}".format(self.ctx_var, node.func.attr)
)
def call_add_transforms(self, node):
args_msg = (
'Expected arguments to {}.add_transforms: '
'target_ftl_path, reference_ftl_path, list_of_transforms'
"Expected arguments to {}.add_transforms: "
"target_ftl_path, reference_ftl_path, list_of_transforms"
).format(self.ctx_var)
ref_msg = (
'Expected second argument to {}.add_transforms: '
'reference should be string or variable with string value'
"Expected second argument to {}.add_transforms: "
"reference should be string or variable with string value"
).format(self.ctx_var)
# Just check call signature here, check actual types below
if not self.check_arguments(node, (ast.AST, ast.AST, ast.AST)):
self.issues.append({
'msg': args_msg,
'line': node.lineno,
})
self.issues.append(
{
"msg": args_msg,
"line": node.lineno,
}
)
return
in_reference = node.args[1]
if isinstance(in_reference, ast.Name):
in_reference = self.global_assigns.get(in_reference.id)
if isinstance(in_reference, ast.Str):
in_reference = in_reference.s
if isinstance(in_reference, ast.Constant):
in_reference = in_reference.value
if not isinstance(in_reference, str):
self.issues.append({
'msg': ref_msg,
'line': node.args[1].lineno,
})
self.issues.append(
{
"msg": ref_msg,
"line": node.args[1].lineno,
}
)
return
self.references.add(in_reference)
# Checked node.args[1].
@ -212,93 +204,91 @@ class MigrateAnalyzer(ast.NodeVisitor):
self.generic_visit(node.args[2])
def call_transform(self, node, dotted):
module, called = dotted.rsplit('.', 1)
if module not in ('fluent.migrate', 'fluent.migrate.transforms'):
module, called = dotted.rsplit(".", 1)
if module not in ("fluent.migrate", "fluent.migrate.transforms"):
return
transform = getattr(transforms, called)
if not issubclass(transform, transforms.Source):
return
bad_args = f'{called} takes path and key as first two params'
bad_args = f"{called} takes path and key as first two params"
if not self.check_arguments(
node, ((ast.Str, ast.Name), (ast.Str, ast.Name),),
allow_more=True, check_kwargs=False
node,
(
(ast.Constant, ast.Name),
(ast.Constant, ast.Name),
),
allow_more=True,
check_kwargs=False,
):
self.issues.append({
'msg': bad_args,
'line': node.lineno
})
self.issues.append({"msg": bad_args, "line": node.lineno})
return
path = node.args[0]
if isinstance(path, ast.Str):
path = path.s
if isinstance(path, ast.Constant):
path = path.value
if isinstance(path, ast.Name):
path = self.global_assigns.get(path.id)
if not isinstance(path, PATH_TYPES):
self.issues.append({
'msg': bad_args,
'line': node.lineno
})
self.issues.append({"msg": bad_args, "line": node.lineno})
def call_helpers_transforms_from(self, node):
args_msg = (
'Expected arguments to transforms_from: '
'str, **substitions'
)
if not self.check_arguments(
node, (ast.Str,), check_kwargs=False
):
self.issues.append({
'msg': args_msg,
'line': node.lineno,
})
args_msg = "Expected arguments to transforms_from: " "str, **substitions"
if not self.check_arguments(node, (ast.Constant,), check_kwargs=False):
self.issues.append(
{
"msg": args_msg,
"line": node.lineno,
}
)
return
kwargs = {}
found_bad_keywords = False
for keyword in node.keywords:
v = keyword.value
if isinstance(v, ast.Str):
v = v.s
if isinstance(v, ast.Constant):
v = v.value
if isinstance(v, ast.Name):
v = self.global_assigns.get(v.id)
if isinstance(v, ast.Call):
v = 'determined at runtime'
v = "determined at runtime"
if not isinstance(v, PATH_TYPES):
msg = 'Bad keyword arg {} to transforms_from'.format(
keyword.arg
msg = "Bad keyword arg {} to transforms_from".format(keyword.arg)
self.issues.append(
{
"msg": msg,
"line": node.lineno,
}
)
self.issues.append({
'msg': msg,
'line': node.lineno,
})
found_bad_keywords = True
else:
kwargs[keyword.arg] = v
if found_bad_keywords:
return
try:
transforms = transforms_from(node.args[0].s, **kwargs)
transforms = transforms_from(node.args[0].value, **kwargs)
except MigrationError as e:
self.issues.append({
'msg': str(e),
'line': node.lineno,
})
self.issues.append(
{
"msg": str(e),
"line": node.lineno,
}
)
return
ti = TransformsInspector()
ti.visit(transforms)
self.issues.extend({
'msg': issue,
'line': node.lineno,
} for issue in set(ti.issues))
self.issues.extend(
{
"msg": issue,
"line": node.lineno,
}
for issue in set(ti.issues)
)
def check_arguments(
self, node, argspec, check_kwargs=True, allow_more=False
):
def check_arguments(self, node, argspec, check_kwargs=True, allow_more=False):
if check_kwargs and (
node.keywords or
(hasattr(node, 'kwargs') and node.kwargs)
node.keywords or (hasattr(node, "kwargs") and node.kwargs)
):
return False
if hasattr(node, 'starargs') and node.starargs:
if hasattr(node, "starargs") and node.starargs:
return False
for arg, NODE_TYPE in zip_longest(node.args, argspec):
if NODE_TYPE is None:
@ -319,17 +309,15 @@ class TransformsInspector(Visitor):
# Source needs paths to be normalized
# https://bugzilla.mozilla.org/show_bug.cgi?id=1568199
if src != mozpath.normpath(src):
self.issues.append(
f'Source "{src}" needs to be a normalized path'
)
self.issues.append(f'Source "{src}" needs to be a normalized path')
super().generic_visit(node)
def cli():
parser = argparse.ArgumentParser()
parser.add_argument('migration')
parser.add_argument("migration")
args = parser.parse_args()
issues = Validator.validate(args.migration)['issues']
issues = Validator.validate(args.migration)["issues"]
for issue in issues:
print(issue['msg'], 'at line', issue['line'])
print(issue["msg"], "at line", issue["line"])
return 1 if issues else 0

19
third_party/python/poetry.lock сгенерированный поставляемый
Просмотреть файл

@ -495,14 +495,14 @@ files = [
[[package]]
name = "fluent-migrate"
version = "0.12.0"
version = "0.13.0"
description = "Toolchain to migrate legacy translation to Fluent."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "fluent.migrate-0.12.0-py2.py3-none-any.whl", hash = "sha256:e3564c92d1f53700e98792f1be1ff954488d431ff9f5ec290a4ab13b5de69487"},
{file = "fluent.migrate-0.12.0.tar.gz", hash = "sha256:926e69e94975521a974b206e242a479310c2cbca1865ca26bf40fa3c7a357338"},
{file = "fluent.migrate-0.13.0-py2.py3-none-any.whl", hash = "sha256:18a5c9d0c00cd50f45754a8e568d1fa57500679bdd00a98604de963cafef5a70"},
{file = "fluent.migrate-0.13.0.tar.gz", hash = "sha256:15d48d51c838167a2ace3788f34b130d40e4946f08f9f48d9495a34ac565bb1c"},
]
[package.dependencies]
@ -1129,6 +1129,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@ -1136,8 +1137,16 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@ -1154,6 +1163,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@ -1161,6 +1171,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@ -1595,4 +1606,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
content-hash = "65677d1751349ca40ddb3353b87d30a37a396424a743bd48e376de46c32c3687"
content-hash = "22aaa5192c95ba8c8effd4a6e39b32d356a790e72e7bae067615bbfe0c4e9896"

2
third_party/python/requirements.in поставляемый
Просмотреть файл

@ -18,7 +18,7 @@ cram==0.7
distro==1.8.0
ecdsa==0.15
esprima==4.0.1
fluent.migrate==0.12.0
fluent.migrate==0.13.0
fluent.syntax==0.19.0
# Pin `frozenlist` as it is required for `aiohttp`. Use minimum required version.
frozenlist==1.1.1

17
third_party/python/requirements.txt поставляемый
Просмотреть файл

@ -224,9 +224,9 @@ ecdsa==0.15 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277
esprima==4.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee
fluent-migrate==0.12.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:926e69e94975521a974b206e242a479310c2cbca1865ca26bf40fa3c7a357338 \
--hash=sha256:e3564c92d1f53700e98792f1be1ff954488d431ff9f5ec290a4ab13b5de69487
fluent-migrate==0.13.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:15d48d51c838167a2ace3788f34b130d40e4946f08f9f48d9495a34ac565bb1c \
--hash=sha256:18a5c9d0c00cd50f45754a8e568d1fa57500679bdd00a98604de963cafef5a70
fluent-syntax==0.19.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:920326d7f46864b9758f0044e9968e3112198bc826acee16ddd8f11d359004fd \
--hash=sha256:b352b3475fac6c6ed5f06527921f432aac073d764445508ee5218aeccc7cc5c4
@ -460,7 +460,9 @@ python-slugify==8.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 \
--hash=sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27
pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \
--hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \
--hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \
--hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \
--hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \
--hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \
@ -468,7 +470,10 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \
--hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \
--hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \
--hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \
--hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \
--hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \
--hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \
--hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \
--hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \
--hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \
@ -476,11 +481,15 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \
--hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \
--hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \
--hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \
--hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \
--hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \
--hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \
--hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \
--hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
--hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
--hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
--hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
--hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
--hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
--hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
@ -493,7 +502,9 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \
--hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \
--hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \
--hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \
--hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \
--hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \
--hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \
--hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \
--hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \