Test Request: Placeholder explanations #7

https://github.com/microsoft/localizationkit/issues/7
This commit is contained in:
Poonam Baxi 2021-01-07 11:49:40 -08:00
Родитель 69dd0c077b
Коммит c5927347c4
4 изменённых файлов: 191 добавлений и 0 удалений

Просмотреть файл

@ -36,6 +36,13 @@ class LocalizedString:
"""
return LocalizedString._TOKEN_PATTERN.findall(self.value)
def comment_tokens(self) -> List[str]:
"""Find and return the tokens in the comment string.
:returns: The list of tokens in the comment string
"""
return LocalizedString._TOKEN_PATTERN.findall(self.comment)
def __str__(self) -> str:
"""Generate and return the string representation of the object.

Просмотреть файл

@ -11,3 +11,4 @@ import localizationkit.tests.objectivec_alternative_tokens
import localizationkit.tests.swift_interpolation
import localizationkit.tests.token_matching
import localizationkit.tests.token_position_identifiers
import localizationkit.tests.placeholder_token_explanation

Просмотреть файл

@ -0,0 +1,95 @@
"""Placeholder token explanation."""
from typing import Any, Dict, List, Set
from localizationkit.tests.test_case import LocalizationTestCase
class PlaceholderTokenExplanation(LocalizationTestCase):
"""Check the placeholder tokens in strings have explanation in comments."""
@classmethod
def name(cls) -> str:
return "placeholder_token_explanation"
@classmethod
def default_settings(cls) -> Dict[str, Any]:
return {"always": False}
def run_test(self) -> List[str]:
violations = []
for string in self.collection.localized_strings:
tokens = string.tokens()
comment_tokens = string.comment_tokens()
if not tokens or len(tokens) == 0:
if comment_tokens or len(comment_tokens) > 0:
violations.append(f"Comment string has extra token explanations: {string}")
continue
continue
if not comment_tokens or len(comment_tokens) == 0:
violations.append(f"Comment string is missing all token explanation: {string}")
continue
if len(comment_tokens) < len(tokens):
violations.append(f"Comment string is missing explanantion for some tokens: {string}")
continue
if len(comment_tokens) > len(tokens):
violations.append(f"""Comment string has explanation for extra tokens
that are not a part of the string : {string}""")
continue
# Validate token format in both string and comment string if it has single token
if len(tokens) < 2:
if "%@" not in tokens[0]:
violations.append(f"String missing correctly formatted token: {string}")
continue
if "%@" not in comment_tokens[0]:
violations.append(f"String comment missing correctly formatted token: {string}")
continue
continue
# Validate position of both tokens and comment tokens to make sure all placeholder tokens
# in string have corresponding explanation in the comment string
positional_tokens: Set[int] = set()
comment_positional_tokens: Set[int] = set()
for token in tokens:
if "$" not in token:
violations.append(f"String missing positional tokens: {string}")
continue
position = int((token.split("$")[0]).replace("%", ""))
if position in positional_tokens:
violations.append(f"Duplicate token position: {string}")
continue
positional_tokens.add(position)
for token in comment_tokens:
if "$" not in token:
violations.append(f"Comment string missing positional tokens: {string}")
continue
position = int((token.split("$")[0]).replace("%", ""))
if position in comment_positional_tokens:
violations.append(f"Duplicate comment token position: {string}")
continue
comment_positional_tokens.add(position)
for i in range(1, len(tokens) + 1):
if i not in positional_tokens:
violations.append(f"Token position index skipped: {string}")
for i in range(1, len(comment_tokens) + 1):
if i not in comment_positional_tokens:
violations.append(f"Comment Token position index skipped: {string}")
return violations

Просмотреть файл

@ -0,0 +1,88 @@
"""Placeholder token explanation tests."""
# pylint: disable=line-too-long
import os
import sys
import unittest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..")))
import localizationkit
class PlaceholderTokenExplanationTests(unittest.TestCase):
"""Placeholder token explanation tests."""
def setUp(self):
current_file_path = os.path.abspath(__file__)
current_folder_path = os.path.dirname(current_file_path)
self.config_path = os.path.abspath(os.path.join(current_folder_path, "config.toml"))
self.configuration = localizationkit.Configuration.from_file(self.config_path)
def test_placeholder_token_explanation(self):
"""Test that all placeholder token explanations exists in comments"""
test_cases = [
(
True,
localizationkit.LocalizedString(
"Key", "This is a string with no tokens", "Some comment", "en"
),
),
(
True,
localizationkit.LocalizedString(
"Key", "This is a string with one token: %@", "Some comment %@ token explanation", "en"
),
),
(
True,
localizationkit.LocalizedString(
"Key", "This is a string with two tokens: %1$@ %2$@", "Some comment %1$@ token explanantion %2$@ token explanantion", "en"
),
),
(
False,
localizationkit.LocalizedString(
"Key",
"This is a string with two tokens: %1$@ %2$@",
"Some comment missing all token explanation",
"en",
),
),
(
False,
localizationkit.LocalizedString(
"Key", "This is a string with two tokens: %1$@ %2$@", "Some comment %@ token explanation missing some token explanantion", "en"
),
),
(
False,
localizationkit.LocalizedString(
"Key", "This is a string", "Some comment %@ extra token explanation", "en"
),
),
(
False,
localizationkit.LocalizedString(
"Key", "This is a string with two incorrect positioned tokens %1$@ %3$@", "Some comment %1$@ token explanantion %2$@ token explanantion", "en"
),
),
(
False,
localizationkit.LocalizedString(
"Key", "This is a string with two tokens: %1$@ %2$@", "Some comment with incorrect poistioned token explanantion %1$@ token explanation %3$@ token explanation", "en"
),
),
]
for (expected_result, string) in test_cases:
collection = localizationkit.LocalizedCollection([string])
test = localizationkit.tests.placeholder_token_explanation.PlaceholderTokenExplanation(
self.configuration,
collection
)
result = test.execute()
if expected_result:
self.assertTrue(result.succeeded())
else:
self.assertFalse(result.succeeded())