Add keygen module
This commit is contained in:
163
i18n_guardian/keygen/key_generator.py
Normal file
163
i18n_guardian/keygen/key_generator.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""Key generation for translation strings."""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
class KeyGenerator:
|
||||
"""Generate translation keys based on conventions."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
style: str = "snake_case",
|
||||
prefix: Optional[str] = None,
|
||||
max_length: int = 100,
|
||||
) -> None:
|
||||
self.style = style
|
||||
self.prefix = prefix
|
||||
self.max_length = max_length
|
||||
self._existing_keys: Dict[str, int] = {}
|
||||
|
||||
def set_existing_keys(self, keys: List[str]) -> None:
|
||||
"""Set existing translation keys to avoid duplicates."""
|
||||
self._existing_keys = {}
|
||||
for key in keys:
|
||||
if key in self._existing_keys:
|
||||
self._existing_keys[key] += 1
|
||||
else:
|
||||
self._existing_keys[key] = 1
|
||||
|
||||
def generate_key(
|
||||
self,
|
||||
text: str,
|
||||
file_path: Optional[Path] = None,
|
||||
context: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Generate a translation key for the given text."""
|
||||
base_key = self._text_to_key(text)
|
||||
|
||||
if file_path:
|
||||
path_prefix = self._path_to_prefix(file_path)
|
||||
base_key = f"{path_prefix}_{base_key}"
|
||||
|
||||
if context:
|
||||
context_key = self._text_to_key(context)
|
||||
base_key = f"{base_key}_{context_key}"
|
||||
|
||||
if self.prefix:
|
||||
base_key = f"{self.prefix}_{base_key}"
|
||||
|
||||
unique_key = self._make_unique(base_key)
|
||||
|
||||
return unique_key
|
||||
|
||||
def _text_to_key(self, text: str) -> str:
|
||||
"""Convert text to a valid key format."""
|
||||
normalized = text.lower().strip()
|
||||
|
||||
if self.style == "snake_case":
|
||||
key = re.sub(r"[^a-z0-9]+", "_", normalized)
|
||||
elif self.style == "kebab-case":
|
||||
key = re.sub(r"[^a-z0-9]+", "-", normalized)
|
||||
elif self.style == "camelCase":
|
||||
key = self._to_camel_case(normalized)
|
||||
elif self.style == "PascalCase":
|
||||
key = self._to_pascal_case(normalized)
|
||||
else:
|
||||
key = re.sub(r"[^a-z0-9]+", "_", normalized)
|
||||
|
||||
key = key.strip("_").strip("-")
|
||||
key = key[: self.max_length]
|
||||
|
||||
return key
|
||||
|
||||
def _path_to_prefix(self, file_path: Path) -> str:
|
||||
"""Convert file path to a key prefix."""
|
||||
parts = []
|
||||
|
||||
for part in file_path.parts:
|
||||
part = re.sub(r"[^a-z0-9]+", "_", part.lower())
|
||||
part = part.strip("_")
|
||||
if part and part not in ("src", "src", "app", "components", "pages", "views"):
|
||||
parts.append(part)
|
||||
|
||||
return "_".join(parts)
|
||||
|
||||
def _to_camel_case(self, text: str) -> str:
|
||||
"""Convert text to camelCase."""
|
||||
words = re.split(r"[^a-z0-9]+", text.lower())
|
||||
words = [w for w in words if w]
|
||||
|
||||
if not words:
|
||||
return ""
|
||||
|
||||
first = words[0]
|
||||
rest = [w.capitalize() for w in words[1:]]
|
||||
|
||||
return first + "".join(rest)
|
||||
|
||||
def _to_pascal_case(self, text: str) -> str:
|
||||
"""Convert text to PascalCase."""
|
||||
words = re.split(r"[^a-z0-9]+", text.lower())
|
||||
words = [w.capitalize() for w in words if w]
|
||||
|
||||
return "".join(words)
|
||||
|
||||
def _make_unique(self, key: str) -> str:
|
||||
"""Ensure key is unique by appending a number if needed."""
|
||||
if key not in self._existing_keys:
|
||||
self._existing_keys[key] = 0
|
||||
return key
|
||||
|
||||
count = self._existing_keys[key]
|
||||
self._existing_keys[key] = count + 1
|
||||
|
||||
suffix = f"_{count + 1}"
|
||||
new_key = key + suffix
|
||||
|
||||
while new_key in self._existing_keys:
|
||||
count += 1
|
||||
suffix = f"_{count}"
|
||||
new_key = key + suffix
|
||||
|
||||
self._existing_keys[new_key] = 0
|
||||
return new_key
|
||||
|
||||
|
||||
def analyze_existing_keys(translation_file: Path) -> List[str]:
|
||||
"""Analyze existing translation keys from a file."""
|
||||
keys: List[str] = []
|
||||
|
||||
try:
|
||||
content = translation_file.read_text(encoding="utf-8")
|
||||
except (OSError, UnicodeDecodeError):
|
||||
return keys
|
||||
|
||||
if translation_file.suffix == ".json":
|
||||
import json
|
||||
|
||||
try:
|
||||
data = json.loads(content)
|
||||
if isinstance(data, dict):
|
||||
keys.extend(_flatten_dict(data))
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
elif translation_file.suffix in (".po", ".pot"):
|
||||
for match in re.finditer(r'^msgid\s+"([^"]+)"', content, re.MULTILINE):
|
||||
keys.append(match.group(1))
|
||||
|
||||
return keys
|
||||
|
||||
|
||||
def _flatten_dict(d: dict, prefix: str = "") -> List[str]:
|
||||
"""Flatten a nested dictionary to get all keys."""
|
||||
keys = []
|
||||
for k, v in d.items():
|
||||
full_key = f"{prefix}.{k}" if prefix else k
|
||||
if isinstance(v, dict):
|
||||
keys.extend(_flatten_dict(v, full_key))
|
||||
else:
|
||||
keys.append(full_key)
|
||||
return keys
|
||||
Reference in New Issue
Block a user