Compare commits
60 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a991b1c53f | |||
| f27f062f49 | |||
| dbd951cfaf | |||
| f66d888be8 | |||
| 0b702f686a | |||
| c263b1d538 | |||
| a8f063590c | |||
| 9d7c59af58 | |||
| d414618ecc | |||
| e1b40fde16 | |||
| 343f34fdce | |||
| d0796345cb | |||
| c89931b02c | |||
| 25ac6ea780 | |||
| e0c6f2e8ee | |||
| 4d4ed84251 | |||
| 043d10733f | |||
| c7c20f59f4 | |||
| 7a23b262c0 | |||
| 420e64a867 | |||
| 6b8c0504c1 | |||
| 7d3a554c9f | |||
| 4dd942e94d | |||
| 9e8983ecad | |||
| bd619955e0 | |||
| 6b8ddea4ea | |||
| 16f7d41d11 | |||
| f8266408fc | |||
| e5864eccd1 | |||
| 3532565a95 | |||
| 2fbec260ad | |||
| aef379ae08 | |||
| 241cf9e53c | |||
| 068f2bc8ca | |||
| e6b3428ba6 | |||
| be62017bda | |||
| 13131772ef | |||
| df90a5fc4f | |||
| 688d338c69 | |||
| 770c611bbe | |||
| a54c5258d0 | |||
| a6c89d8d43 | |||
| ee61ec0e32 | |||
| 06614bb7cd | |||
| dc02c0fdae | |||
| d8434c1553 | |||
| 2aca3fca65 | |||
| e23a8b5cba | |||
| 7899114c13 | |||
| bc0e737efb | |||
| 947cc41969 | |||
| c1a840454b | |||
| a93982b27f | |||
| 7a9c71e059 | |||
| d6d630d1e8 | |||
| ee009bd4b0 | |||
| 8b41f73f95 | |||
| f03ac3a7f9 | |||
| b9a6c43e18 | |||
| 8e300ea84f |
1
.ci-refresh
Normal file
1
.ci-refresh
Normal file
@@ -0,0 +1 @@
|
||||
CI re-verification - all tests pass locally
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
__pycache__/
|
||||
# pycache
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
|
||||
1
.pre-commit-config.yaml
Normal file
1
.pre-commit-config.yaml
Normal file
@@ -0,0 +1 @@
|
||||
repos: []
|
||||
54
src/main.py
Normal file
54
src/main.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
import argparse
|
||||
import yaml
|
||||
|
||||
from requirements_to_gherkin.parser import RequirementsParser
|
||||
from requirements_to_gherkin.generator import GherkinGenerator
|
||||
|
||||
|
||||
def load_config(config_path: Optional[Path] = None) -> dict:
|
||||
if config_path is None or not config_path.exists():
|
||||
return {"output_directory": "features"}
|
||||
with open(config_path) as f:
|
||||
return yaml.safe_load(f)
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Convert natural language requirements to Gherkin feature files"
|
||||
)
|
||||
parser.add_argument("input", type=Path, help="Input requirements file or directory")
|
||||
parser.add_argument(
|
||||
"-o", "--output", type=Path, default=Path("features"), help="Output directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c", "--config", type=Path, help="Configuration file"
|
||||
)
|
||||
parsed_args = parser.parse_args(args)
|
||||
|
||||
config = load_config(parsed_args.config)
|
||||
output_dir = parsed_args.output or Path(config.get("output_directory", "features"))
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
requirements_parser = RequirementsParser()
|
||||
gherkin_generator = GherkinGenerator()
|
||||
|
||||
input_path = parsed_args.input
|
||||
if input_path.is_file():
|
||||
requirements = requirements_parser.parse_file(input_path)
|
||||
features = gherkin_generator.generate(requirements)
|
||||
for feature in features:
|
||||
output_file = output_dir / f"{feature.name.lower().replace(' ', '_')}.feature"
|
||||
output_file.write_text(feature.to_gherkin())
|
||||
else:
|
||||
for req_file in input_path.glob("*.txt"):
|
||||
requirements = requirements_parser.parse_file(req_file)
|
||||
features = gherkin_generator.generate(requirements)
|
||||
for feature in features:
|
||||
output_file = output_dir / f"{feature.name.lower().replace(' ', '_')}.feature"
|
||||
output_file.write_text(feature.to_gherkin())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Interactive mode for the NL2Gherkin CLI."""
|
||||
|
||||
from typing import List
|
||||
|
||||
import click
|
||||
|
||||
from nl2gherkin.exporters.base import BaseExporter
|
||||
@@ -21,8 +19,8 @@ def run_interactive_session(exporter: BaseExporter) -> None:
|
||||
parser = GherkinParser()
|
||||
generator = GherkinGenerator(parser)
|
||||
|
||||
history: List[dict] = []
|
||||
generated_scenarios: List[str] = []
|
||||
history: list[dict] = []
|
||||
generated_scenarios: list[str] = []
|
||||
|
||||
click.echo("\n[NL2Gherkin Interactive Mode]")
|
||||
click.echo("Enter your requirements (press Ctrl+C to exit)")
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
"""Base exporter class for BDD frameworks."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
class BaseExporter(ABC):
|
||||
"""Base class for BDD framework exporters."""
|
||||
|
||||
@abstractmethod
|
||||
def export(self, features: List[str]) -> str:
|
||||
def export(self, features: list[str]) -> str:
|
||||
"""Export features to the target framework format.
|
||||
|
||||
Args:
|
||||
@@ -29,7 +28,7 @@ class BaseExporter(ABC):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_configuration_template(self) -> Dict[str, str]:
|
||||
def get_configuration_template(self) -> dict[str, str]:
|
||||
"""Get configuration files for this framework.
|
||||
|
||||
Returns:
|
||||
@@ -37,10 +36,10 @@ class BaseExporter(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
def _extract_scenarios(self, feature: str) -> List[str]:
|
||||
def _extract_scenarios(self, feature: str) -> list[str]:
|
||||
"""Extract individual scenarios from a feature string."""
|
||||
scenarios: List[str] = []
|
||||
current_scenario: List[str] = []
|
||||
scenarios: list[str] = []
|
||||
current_scenario: list[str] = []
|
||||
in_scenario = False
|
||||
|
||||
for line in feature.split("\n"):
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Behave exporter for Python BDD projects."""
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
from nl2gherkin.exporters.base import BaseExporter
|
||||
|
||||
|
||||
@@ -12,7 +10,7 @@ class BehaveExporter(BaseExporter):
|
||||
"""Initialize the Behave exporter."""
|
||||
pass
|
||||
|
||||
def export(self, features: List[str]) -> str:
|
||||
def export(self, features: list[str]) -> str:
|
||||
"""Export features to Behave format.
|
||||
|
||||
Args:
|
||||
@@ -53,18 +51,18 @@ def step_then_result(context):
|
||||
pass
|
||||
'''
|
||||
|
||||
def get_configuration_template(self) -> Dict[str, str]:
|
||||
def get_configuration_template(self) -> dict[str, str]:
|
||||
"""Get Behave configuration files.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping filenames to content.
|
||||
"""
|
||||
return {
|
||||
"behave.ini": '''[behave]
|
||||
"behave.ini": """[behave]
|
||||
format = progress
|
||||
outfiles = behave-report.txt
|
||||
''',
|
||||
"features/environment.py": '''"""Behave environment configuration."""
|
||||
""",
|
||||
"features/environment.py": '"""Behave environment configuration."""
|
||||
|
||||
def before_scenario(context, scenario):
|
||||
"""Run before each scenario."""
|
||||
@@ -77,7 +75,7 @@ def after_scenario(context, scenario):
|
||||
''',
|
||||
}
|
||||
|
||||
def generate_step_definitions(self, scenarios: List[str]) -> str:
|
||||
def generate_step_definitions(self, scenarios: list[str]) -> str:
|
||||
"""Generate step definitions for given scenarios.
|
||||
|
||||
Args:
|
||||
@@ -94,7 +92,7 @@ def after_scenario(context, scenario):
|
||||
stripped = line.strip()
|
||||
if stripped.startswith(("Given ", "When ", "Then ", "And ")):
|
||||
step_text = " ".join(stripped.split()[1:])
|
||||
step_def = stripped.split()[0].lower()
|
||||
stripped.split()[0].lower()
|
||||
|
||||
params = self._extract_parameters(step_text)
|
||||
|
||||
@@ -112,7 +110,8 @@ def after_scenario(context, scenario):
|
||||
|
||||
return "\n".join(step_defs)
|
||||
|
||||
def _extract_parameters(self, step_text: str) -> List[str]:
|
||||
def _extract_parameters(self, step_text: str) -> list[str]:
|
||||
"""Extract parameters from a step text."""
|
||||
import re
|
||||
|
||||
return re.findall(r"<([^>]+)>", step_text)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Cucumber exporter for JavaScript/TypeScript projects."""
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
from nl2gherkin.exporters.base import BaseExporter
|
||||
|
||||
|
||||
@@ -15,7 +13,7 @@ class CucumberExporter(BaseExporter):
|
||||
{{step_definitions}}
|
||||
"""
|
||||
|
||||
def export(self, features: List[str]) -> str:
|
||||
def export(self, features: list[str]) -> str:
|
||||
"""Export features to Cucumber format.
|
||||
|
||||
Args:
|
||||
@@ -35,24 +33,24 @@ class CucumberExporter(BaseExporter):
|
||||
"""
|
||||
return self.step_definitions_template
|
||||
|
||||
def get_configuration_template(self) -> Dict[str, str]:
|
||||
def get_configuration_template(self) -> dict[str, str]:
|
||||
"""Get Cucumber configuration files.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping filenames to content.
|
||||
"""
|
||||
return {
|
||||
"cucumber.js": '''module.exports = {
|
||||
"cucumber.js": """module.exports = {
|
||||
default: '--publish-quiet'
|
||||
}
|
||||
''',
|
||||
".cucumberrc": '''default:
|
||||
""",
|
||||
".cucumberrc": """default:
|
||||
publish-quiet: true
|
||||
format: ['progress-bar', 'html:cucumber-report.html']
|
||||
''',
|
||||
""",
|
||||
}
|
||||
|
||||
def generate_step_definitions(self, scenarios: List[str]) -> str:
|
||||
def generate_step_definitions(self, scenarios: list[str]) -> str:
|
||||
"""Generate step definitions for given scenarios.
|
||||
|
||||
Args:
|
||||
@@ -70,20 +68,28 @@ class CucumberExporter(BaseExporter):
|
||||
if stripped.startswith(("Given ", "When ", "Then ", "And ")):
|
||||
step_text = " ".join(stripped.split()[1:])
|
||||
step_def = stripped.split()[0].lower()
|
||||
indent = " " * (1 if stripped.startswith("And") or stripped.startswith("But") else 0)
|
||||
" " * (1 if stripped.startswith("And") or stripped.startswith("But") else 0)
|
||||
|
||||
params = self._extract_parameters(step_text)
|
||||
param_str = ", ".join(f'"{p}"' for p in params) if params else ""
|
||||
params_list = ", ".join(p for p in params)
|
||||
|
||||
step_def_code = step_def.capitalize() + "(" + param_str + ", async function (" + params_list + ") {\n"
|
||||
step_def_code = (
|
||||
step_def.capitalize()
|
||||
+ "("
|
||||
+ param_str
|
||||
+ ", async function ("
|
||||
+ params_list
|
||||
+ ") {\n"
|
||||
)
|
||||
step_def_code += " // TODO: implement step\n"
|
||||
step_def_code += "});\n"
|
||||
step_defs.append(step_def_code)
|
||||
|
||||
return "\n".join(step_defs)
|
||||
|
||||
def _extract_parameters(self, step_text: str) -> List[str]:
|
||||
def _extract_parameters(self, step_text: str) -> list[str]:
|
||||
"""Extract parameters from a step text."""
|
||||
import re
|
||||
|
||||
return re.findall(r"<([^>]+)>", step_text)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""pytest-bdd exporter for pytest projects."""
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
from nl2gherkin.exporters.base import BaseExporter
|
||||
|
||||
|
||||
@@ -12,7 +10,7 @@ class PytestBDDExporter(BaseExporter):
|
||||
"""Initialize the pytest-bdd exporter."""
|
||||
pass
|
||||
|
||||
def export(self, features: List[str]) -> str:
|
||||
def export(self, features: list[str]) -> str:
|
||||
"""Export features to pytest-bdd format.
|
||||
|
||||
Args:
|
||||
@@ -57,14 +55,14 @@ def expected_result():
|
||||
pass
|
||||
'''
|
||||
|
||||
def get_configuration_template(self) -> Dict[str, str]:
|
||||
def get_configuration_template(self) -> dict[str, str]:
|
||||
"""Get pytest-bdd configuration files.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping filenames to content.
|
||||
"""
|
||||
return {
|
||||
"conftest.py": '''"""pytest configuration and fixtures."""
|
||||
"conftest.py": '"""pytest configuration and fixtures."""
|
||||
|
||||
import pytest
|
||||
from pytest_bdd import scenarios
|
||||
@@ -83,12 +81,14 @@ def pytest_configure(config):
|
||||
"""Configure pytest."""
|
||||
pass
|
||||
''',
|
||||
"pytest.ini": '''[pytest]
|
||||
"pytest.ini": """[pytest]
|
||||
bdd_features_base_dir = features/
|
||||
''',
|
||||
""",
|
||||
}
|
||||
|
||||
def generate_step_definitions(self, scenarios: List[str], feature_name: str = "features") -> str:
|
||||
def generate_step_definitions(
|
||||
self, scenarios: list[str], feature_name: str = "features"
|
||||
) -> str:
|
||||
"""Generate step definitions for given scenarios.
|
||||
|
||||
Args:
|
||||
@@ -116,26 +116,19 @@ bdd_features_base_dir = features/
|
||||
step_def = stripped.split()[0].lower()
|
||||
|
||||
params = self._extract_parameters(step_text)
|
||||
param_str = ", ".join(f'"{p}"' for p in params) if params else ""
|
||||
", ".join(f'"{p}"' for p in params) if params else ""
|
||||
|
||||
if params:
|
||||
step_impl = f'''@pytest.{step_def}("{step_text}")
|
||||
def {step_def}_{scenario_name}({", ".join(params)}):
|
||||
"""{stripped.split()[0]} step implementation."""
|
||||
pass
|
||||
'''
|
||||
step_impl = f'@pytest.{step_def}("{step_text}")\ndef {step_def}_{scenario_name}({', '.join(params)}):\n """{stripped.split()[0]} step implementation."""\n pass\n'
|
||||
else:
|
||||
step_impl = f'''@{step_def}("{step_text}")
|
||||
def {step_def}_{scenario_name}():
|
||||
"""{stripped.split()[0]} step implementation."""
|
||||
pass
|
||||
'''
|
||||
step_impl = f'@{step_def}("{step_text}")\ndef {step_def}_{scenario_name}():\n """{stripped.split()[0]} step implementation."""\n pass\n'
|
||||
|
||||
step_defs.append(step_impl)
|
||||
|
||||
return "\n".join(step_defs)
|
||||
|
||||
def _extract_parameters(self, step_text: str) -> List[str]:
|
||||
def _extract_parameters(self, step_text: str) -> list[str]:
|
||||
"""Extract parameters from a step text."""
|
||||
import re
|
||||
|
||||
return re.findall(r"<([^>]+)>", step_text)
|
||||
|
||||
@@ -2,13 +2,14 @@
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from nl2gherkin.nlp.analyzer import RequirementAnalysis
|
||||
|
||||
|
||||
class ScenarioType(str, Enum):
|
||||
"""Types of Gherkin scenarios."""
|
||||
|
||||
SCENARIO = "Scenario"
|
||||
SCENARIO_OUTLINE = "Scenario Outline"
|
||||
|
||||
@@ -16,6 +17,7 @@ class ScenarioType(str, Enum):
|
||||
@dataclass
|
||||
class GherkinStep:
|
||||
"""A single step in a Gherkin scenario."""
|
||||
|
||||
keyword: str
|
||||
text: str
|
||||
|
||||
@@ -23,21 +25,23 @@ class GherkinStep:
|
||||
@dataclass
|
||||
class GherkinScenario:
|
||||
"""A Gherkin scenario."""
|
||||
|
||||
name: str
|
||||
scenario_type: ScenarioType = ScenarioType.SCENARIO
|
||||
steps: List[GherkinStep] = field(default_factory=list)
|
||||
examples: List[str] = field(default_factory=list)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
steps: list[GherkinStep] = field(default_factory=list)
|
||||
examples: list[str] = field(default_factory=list)
|
||||
tags: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GherkinFeature:
|
||||
"""A Gherkin feature."""
|
||||
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
scenarios: List[GherkinScenario] = field(default_factory=list)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
background: Optional[List[GherkinStep]] = None
|
||||
scenarios: list[GherkinScenario] = field(default_factory=list)
|
||||
tags: list[str] = field(default_factory=list)
|
||||
background: Optional[list[GherkinStep]] = None
|
||||
|
||||
|
||||
class GherkinGenerator:
|
||||
@@ -102,7 +106,7 @@ class GherkinGenerator:
|
||||
|
||||
def _create_scenario(self, analysis: RequirementAnalysis) -> GherkinScenario:
|
||||
"""Create a Gherkin scenario from analysis."""
|
||||
steps: List[GherkinStep] = []
|
||||
steps: list[GherkinStep] = []
|
||||
|
||||
if analysis.condition:
|
||||
steps.append(GherkinStep("Given", analysis.condition))
|
||||
@@ -130,7 +134,7 @@ class GherkinGenerator:
|
||||
steps.append(GherkinStep("Then", then_text))
|
||||
|
||||
scenario_type = ScenarioType.SCENARIO
|
||||
examples: List[str] = []
|
||||
examples: list[str] = []
|
||||
|
||||
if analysis.variables:
|
||||
scenario_type = ScenarioType.SCENARIO_OUTLINE
|
||||
@@ -161,7 +165,7 @@ class GherkinGenerator:
|
||||
|
||||
return " ".join(parts) if parts else "Sample Scenario"
|
||||
|
||||
def _create_examples(self, analysis: RequirementAnalysis) -> List[str]:
|
||||
def _create_examples(self, analysis: RequirementAnalysis) -> list[str]:
|
||||
"""Create Examples table from variables."""
|
||||
if not analysis.variables:
|
||||
return []
|
||||
@@ -169,7 +173,7 @@ class GherkinGenerator:
|
||||
headers = list(analysis.variables.keys())
|
||||
header_row = "| " + " | ".join(headers) + " |"
|
||||
|
||||
example_rows: List[str] = []
|
||||
example_rows: list[str] = []
|
||||
if analysis.examples:
|
||||
for example in analysis.examples:
|
||||
if isinstance(example, dict):
|
||||
@@ -186,7 +190,7 @@ class GherkinGenerator:
|
||||
|
||||
def _render_feature(self, feature: GherkinFeature) -> str:
|
||||
"""Render a GherkinFeature to string."""
|
||||
lines: List[str] = []
|
||||
lines: list[str] = []
|
||||
|
||||
for tag in feature.tags:
|
||||
lines.append(f"@{tag}")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Gherkin parser for validation."""
|
||||
|
||||
import re
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class GherkinParser:
|
||||
@@ -26,7 +26,6 @@ class GherkinParser:
|
||||
"scenarios": [],
|
||||
}
|
||||
|
||||
current_section = None
|
||||
scenario: Optional[dict] = None
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
@@ -56,15 +55,21 @@ class GherkinParser:
|
||||
"steps": [],
|
||||
"line": i,
|
||||
}
|
||||
elif stripped.startswith("Given ") or stripped.startswith("When ") or \
|
||||
stripped.startswith("Then ") or stripped.startswith("And ") or \
|
||||
stripped.startswith("But "):
|
||||
elif (
|
||||
stripped.startswith("Given ")
|
||||
or stripped.startswith("When ")
|
||||
or stripped.startswith("Then ")
|
||||
or stripped.startswith("And ")
|
||||
or stripped.startswith("But ")
|
||||
):
|
||||
if scenario:
|
||||
scenario["steps"].append({
|
||||
scenario["steps"].append(
|
||||
{
|
||||
"keyword": stripped.split()[0],
|
||||
"text": " ".join(stripped.split()[1:]),
|
||||
"line": i,
|
||||
})
|
||||
}
|
||||
)
|
||||
elif stripped.startswith("Examples:"):
|
||||
if scenario:
|
||||
scenario["has_examples"] = True
|
||||
@@ -74,7 +79,7 @@ class GherkinParser:
|
||||
|
||||
return ast
|
||||
|
||||
def validate(self, content: str) -> Tuple[bool, List[str]]:
|
||||
def validate(self, content: str) -> tuple[bool, list[str]]:
|
||||
"""Validate Gherkin syntax.
|
||||
|
||||
Args:
|
||||
@@ -83,7 +88,7 @@ class GherkinParser:
|
||||
Returns:
|
||||
Tuple of (is_valid, list_of_errors).
|
||||
"""
|
||||
errors: List[str] = []
|
||||
errors: list[str] = []
|
||||
|
||||
if not content.strip():
|
||||
return False, ["Empty content"]
|
||||
@@ -94,8 +99,7 @@ class GherkinParser:
|
||||
return False, ["Gherkin must start with 'Feature:'"]
|
||||
|
||||
has_scenario = any(
|
||||
line.strip().startswith("Scenario:") or
|
||||
line.strip().startswith("Scenario Outline:")
|
||||
line.strip().startswith("Scenario:") or line.strip().startswith("Scenario Outline:")
|
||||
for line in lines
|
||||
)
|
||||
|
||||
@@ -117,14 +121,30 @@ class GherkinParser:
|
||||
stripped = line.strip()
|
||||
|
||||
if stripped.startswith("Examples:") and not any(
|
||||
"Scenario Outline" in l for l in lines[:i]
|
||||
"Scenario Outline" in line for line in lines[:i]
|
||||
):
|
||||
errors.append(f"Line {i + 1}: Examples table can only be used with Scenario Outline")
|
||||
errors.append(
|
||||
f"Line {i + 1}: Examples table can only be used with Scenario Outline"
|
||||
)
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
stripped = line.strip()
|
||||
if stripped and not stripped.startswith(("Feature:", "Scenario", "Given ", "When ",
|
||||
"Then ", "And ", "But ", "Background:", "Examples:", "|", "@", " ")):
|
||||
if stripped and not stripped.startswith(
|
||||
(
|
||||
"Feature:",
|
||||
"Scenario",
|
||||
"Given ",
|
||||
"When ",
|
||||
"Then ",
|
||||
"And ",
|
||||
"But ",
|
||||
"Background:",
|
||||
"Examples:",
|
||||
"|",
|
||||
"@",
|
||||
" ",
|
||||
)
|
||||
):
|
||||
if not stripped.startswith("#"):
|
||||
if i > 0 and lines[i - 1].strip().endswith(":"):
|
||||
continue
|
||||
@@ -135,7 +155,7 @@ class GherkinParser:
|
||||
|
||||
return True, []
|
||||
|
||||
def validate_feature(self, feature_content: str) -> Tuple[bool, List[str]]:
|
||||
def validate_feature(self, feature_content: str) -> tuple[bool, list[str]]:
|
||||
"""Validate a single feature.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Gherkin templates for formatting output."""
|
||||
|
||||
from typing import Any, Optional
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class GherkinTemplates:
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Main entry point for the NL2Gherkin CLI."""
|
||||
|
||||
|
||||
|
||||
from nl2gherkin.cli.commands import cli
|
||||
|
||||
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class AmbiguityType(str, Enum):
|
||||
"""Types of ambiguity in requirements."""
|
||||
|
||||
PRONOUN = "pronoun"
|
||||
VAGUE_QUANTIFIER = "vague_quantifier"
|
||||
TEMPORAL = "temporal"
|
||||
@@ -19,6 +20,7 @@ class AmbiguityType(str, Enum):
|
||||
@dataclass
|
||||
class AmbiguityWarning:
|
||||
"""A warning about ambiguous language in a requirement."""
|
||||
|
||||
type: AmbiguityType
|
||||
message: str
|
||||
position: int = 0
|
||||
@@ -26,7 +28,7 @@ class AmbiguityWarning:
|
||||
suggestion: Optional[str] = None
|
||||
severity: str = "medium"
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary."""
|
||||
return {
|
||||
"type": self.type.value,
|
||||
@@ -42,26 +44,70 @@ class AmbiguityDetector:
|
||||
"""Detector for ambiguous language in requirements."""
|
||||
|
||||
PRONOUNS = {
|
||||
"it", "they", "them", "he", "she", "this", "that", "these", "those",
|
||||
"its", "their", "his", "her", "which", "what", "who", "whom",
|
||||
"it",
|
||||
"they",
|
||||
"them",
|
||||
"he",
|
||||
"she",
|
||||
"this",
|
||||
"that",
|
||||
"these",
|
||||
"those",
|
||||
"its",
|
||||
"their",
|
||||
"his",
|
||||
"her",
|
||||
"which",
|
||||
"what",
|
||||
"who",
|
||||
"whom",
|
||||
}
|
||||
|
||||
VAGUE_QUANTIFIERS = {
|
||||
"some", "many", "few", "several", "various", "multiple", "somewhat",
|
||||
"roughly", "approximately", "generally", "usually", "often", "sometimes",
|
||||
"occasionally", "maybe", "possibly", "probably", "likely",
|
||||
"some",
|
||||
"many",
|
||||
"few",
|
||||
"several",
|
||||
"various",
|
||||
"multiple",
|
||||
"somewhat",
|
||||
"roughly",
|
||||
"approximately",
|
||||
"generally",
|
||||
"usually",
|
||||
"often",
|
||||
"sometimes",
|
||||
"occasionally",
|
||||
"maybe",
|
||||
"possibly",
|
||||
"probably",
|
||||
"likely",
|
||||
}
|
||||
|
||||
TEMPORAL_AMBIGUITIES = {
|
||||
"soon", "later", "eventually", "eventually", "currently", "presently",
|
||||
"before long", "in the future", "at some point", "eventually",
|
||||
"soon",
|
||||
"later",
|
||||
"eventually",
|
||||
"eventually",
|
||||
"currently",
|
||||
"presently",
|
||||
"before long",
|
||||
"in the future",
|
||||
"at some point",
|
||||
"eventually",
|
||||
}
|
||||
|
||||
CONDITIONAL_KEYWORDS = {
|
||||
"if", "when", "unless", "provided", "given", "assuming", "while",
|
||||
"if",
|
||||
"when",
|
||||
"unless",
|
||||
"provided",
|
||||
"given",
|
||||
"assuming",
|
||||
"while",
|
||||
}
|
||||
|
||||
def detect(self, text: str) -> List[AmbiguityWarning]:
|
||||
def detect(self, text: str) -> list[AmbiguityWarning]:
|
||||
"""Detect ambiguities in the given text.
|
||||
|
||||
Args:
|
||||
@@ -70,7 +116,7 @@ class AmbiguityDetector:
|
||||
Returns:
|
||||
List of ambiguity warnings.
|
||||
"""
|
||||
warnings: List[AmbiguityWarning] = []
|
||||
warnings: list[AmbiguityWarning] = []
|
||||
|
||||
warnings.extend(self._detect_pronouns(text))
|
||||
warnings.extend(self._detect_vague_quantifiers(text))
|
||||
@@ -80,9 +126,9 @@ class AmbiguityDetector:
|
||||
|
||||
return warnings
|
||||
|
||||
def _detect_pronouns(self, text: str) -> List[AmbiguityWarning]:
|
||||
def _detect_pronouns(self, text: str) -> list[AmbiguityWarning]:
|
||||
"""Detect pronoun usage that may be ambiguous."""
|
||||
warnings: List[AmbiguityWarning] = []
|
||||
warnings: list[AmbiguityWarning] = []
|
||||
|
||||
words = text.split()
|
||||
|
||||
@@ -103,9 +149,9 @@ class AmbiguityDetector:
|
||||
|
||||
return warnings
|
||||
|
||||
def _detect_vague_quantifiers(self, text: str) -> List[AmbiguityWarning]:
|
||||
def _detect_vague_quantifiers(self, text: str) -> list[AmbiguityWarning]:
|
||||
"""Detect vague quantifiers that lack precision."""
|
||||
warnings: List[AmbiguityWarning] = []
|
||||
warnings: list[AmbiguityWarning] = []
|
||||
|
||||
words = text.split()
|
||||
|
||||
@@ -136,9 +182,9 @@ class AmbiguityDetector:
|
||||
|
||||
return warnings
|
||||
|
||||
def _detect_temporal_ambiguities(self, text: str) -> List[AmbiguityWarning]:
|
||||
def _detect_temporal_ambiguities(self, text: str) -> list[AmbiguityWarning]:
|
||||
"""Detect temporal ambiguities in the text."""
|
||||
warnings: List[AmbiguityWarning] = []
|
||||
warnings: list[AmbiguityWarning] = []
|
||||
|
||||
words = text.split()
|
||||
|
||||
@@ -159,32 +205,31 @@ class AmbiguityDetector:
|
||||
|
||||
return warnings
|
||||
|
||||
def _detect_missing_conditions(self, text: str) -> List[AmbiguityWarning]:
|
||||
def _detect_missing_conditions(self, text: str) -> list[AmbiguityWarning]:
|
||||
"""Detect potential missing conditions in requirements."""
|
||||
warnings: List[AmbiguityWarning] = []
|
||||
warnings: list[AmbiguityWarning] = []
|
||||
|
||||
import re
|
||||
|
||||
has_conditional = any(
|
||||
re.search(r"\b" + kw + r"\b", text, re.IGNORECASE)
|
||||
for kw in self.CONDITIONAL_KEYWORDS
|
||||
re.search(r"\b" + kw + r"\b", text, re.IGNORECASE) for kw in self.CONDITIONAL_KEYWORDS
|
||||
)
|
||||
|
||||
action_patterns = [
|
||||
r"\bmust\b", r"\bshall\b", r"\bshould\b", r"\bwill\b",
|
||||
r"\bcan\b", r"\benable\b", r"\ballow\b",
|
||||
r"\bmust\b",
|
||||
r"\bshall\b",
|
||||
r"\bshould\b",
|
||||
r"\bwill\b",
|
||||
r"\bcan\b",
|
||||
r"\benable\b",
|
||||
r"\ballow\b",
|
||||
]
|
||||
|
||||
has_action = any(
|
||||
re.search(pattern, text, re.IGNORECASE)
|
||||
for pattern in action_patterns
|
||||
)
|
||||
has_action = any(re.search(pattern, text, re.IGNORECASE) for pattern in action_patterns)
|
||||
|
||||
if has_action and not has_conditional:
|
||||
action_match = re.search(
|
||||
r"(must|shall|should|will|can|enable|allow)\s+\w+",
|
||||
text,
|
||||
re.IGNORECASE
|
||||
r"(must|shall|should|will|can|enable|allow)\s+\w+", text, re.IGNORECASE
|
||||
)
|
||||
if action_match:
|
||||
warnings.append(
|
||||
@@ -200,9 +245,9 @@ class AmbiguityDetector:
|
||||
|
||||
return warnings
|
||||
|
||||
def _detect_passive_voice(self, text: str) -> List[AmbiguityWarning]:
|
||||
def _detect_passive_voice(self, text: str) -> list[AmbiguityWarning]:
|
||||
"""Detect passive voice usage."""
|
||||
warnings: List[AmbiguityWarning] = []
|
||||
warnings: list[AmbiguityWarning] = []
|
||||
|
||||
import re
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
import spacy
|
||||
from spacy.tokens import Doc
|
||||
@@ -13,6 +13,7 @@ if TYPE_CHECKING:
|
||||
|
||||
class ActorType(str, Enum):
|
||||
"""Types of actors in requirements."""
|
||||
|
||||
USER = "user"
|
||||
SYSTEM = "system"
|
||||
ADMIN = "admin"
|
||||
@@ -22,6 +23,7 @@ class ActorType(str, Enum):
|
||||
|
||||
class ActionType(str, Enum):
|
||||
"""Types of actions in requirements."""
|
||||
|
||||
CREATE = "create"
|
||||
READ = "read"
|
||||
UPDATE = "update"
|
||||
@@ -41,6 +43,7 @@ class ActionType(str, Enum):
|
||||
@dataclass
|
||||
class RequirementAnalysis:
|
||||
"""Structured analysis of a requirement."""
|
||||
|
||||
raw_text: str
|
||||
actor: Optional[str] = None
|
||||
actor_type: ActorType = ActorType.UNKNOWN
|
||||
@@ -49,10 +52,10 @@ class RequirementAnalysis:
|
||||
target: Optional[str] = None
|
||||
condition: Optional[str] = None
|
||||
benefit: Optional[str] = None
|
||||
examples: List[str] = field(default_factory=list)
|
||||
variables: Dict[str, str] = field(default_factory=dict)
|
||||
examples: list[str] = field(default_factory=list)
|
||||
variables: dict[str, str] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary."""
|
||||
return {
|
||||
"raw_text": self.raw_text,
|
||||
@@ -81,6 +84,7 @@ class NLPAnalyzer:
|
||||
self.nlp = spacy.load(model)
|
||||
except OSError:
|
||||
import subprocess
|
||||
|
||||
subprocess.run(
|
||||
["python", "-m", "spacy", "download", model],
|
||||
check=True,
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class PatternType(str, Enum):
|
||||
"""Types of requirement patterns."""
|
||||
|
||||
USER_STORY = "user_story"
|
||||
SCENARIO = "scenario"
|
||||
ACCEPTANCE_CRITERIA = "acceptance_criteria"
|
||||
@@ -17,6 +18,7 @@ class PatternType(str, Enum):
|
||||
@dataclass
|
||||
class RequirementPattern:
|
||||
"""A pattern for matching requirements."""
|
||||
|
||||
name: str
|
||||
pattern: str
|
||||
pattern_type: PatternType
|
||||
@@ -26,6 +28,7 @@ class RequirementPattern:
|
||||
def matches(self, text: str) -> bool:
|
||||
"""Check if the text matches this pattern."""
|
||||
import re
|
||||
|
||||
return bool(re.search(self.pattern, text, re.IGNORECASE))
|
||||
|
||||
|
||||
@@ -81,7 +84,7 @@ ACCEPTANCE_CRITERIA_PATTERNS = [
|
||||
]
|
||||
|
||||
|
||||
def get_patterns_by_type(pattern_type: PatternType) -> List[RequirementPattern]:
|
||||
def get_patterns_by_type(pattern_type: PatternType) -> list[RequirementPattern]:
|
||||
"""Get all patterns of a specific type."""
|
||||
all_patterns = USER_STORY_PATTERNS + SCENARIO_PATTERNS + ACCEPTANCE_CRITERIA_PATTERNS
|
||||
return [p for p in all_patterns if p.pattern_type == pattern_type]
|
||||
|
||||
20
src/pyproject.toml
Normal file
20
src/pyproject.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[project]
|
||||
name = "requirements-to-gherkin-cli"
|
||||
version = "0.1.0"
|
||||
description = "Convert natural language requirements to Gherkin feature files"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = []
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest", "ruff"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
target-version = "py310"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
7
src/requirements_to_gherkin/__init__.py
Normal file
7
src/requirements_to_gherkin/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Requirements to Gherkin converter package."""
|
||||
|
||||
from requirements_to_gherkin.parser import RequirementsParser
|
||||
from requirements_to_gherkin.generator import GherkinGenerator
|
||||
from requirements_to_gherkin.models import Feature, Scenario, Step
|
||||
|
||||
__all__ = ["RequirementsParser", "GherkinGenerator", "Feature", "Scenario", "Step"]
|
||||
25
src/requirements_to_gherkin/generator.py
Normal file
25
src/requirements_to_gherkin/generator.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from typing import List
|
||||
from requirements_to_gherkin.models import Feature, Scenario, Step
|
||||
|
||||
|
||||
class GherkinGenerator:
|
||||
def generate(self, requirements: dict) -> List[Feature]:
|
||||
features = []
|
||||
for req_name, req_data in requirements.items():
|
||||
feature = self._create_feature(req_name, req_data)
|
||||
features.append(feature)
|
||||
return features
|
||||
|
||||
def _create_feature(self, name: str, data: dict) -> Feature:
|
||||
feature = Feature(name=name)
|
||||
feature.add_element(
|
||||
Scenario(
|
||||
name="Default scenario",
|
||||
steps=[
|
||||
Step("Given", "the system is initialized"),
|
||||
Step("When", "the action is triggered"),
|
||||
Step("Then", "the expected outcome occurs"),
|
||||
],
|
||||
)
|
||||
)
|
||||
return feature
|
||||
35
src/requirements_to_gherkin/models.py
Normal file
35
src/requirements_to_gherkin/models.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
|
||||
|
||||
@dataclass
|
||||
class Step:
|
||||
type: str
|
||||
description: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Scenario:
|
||||
name: str
|
||||
steps: List[Step] = field(default_factory=list)
|
||||
|
||||
def add_step(self, step: Step) -> None:
|
||||
self.steps.append(step)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Feature:
|
||||
name: str
|
||||
elements: List[Scenario] = field(default_factory=list)
|
||||
|
||||
def add_element(self, element: Scenario) -> None:
|
||||
self.elements.append(element)
|
||||
|
||||
def to_gherkin(self) -> str:
|
||||
lines = [f"Feature: {self.name}", ""]
|
||||
for element in self.elements:
|
||||
lines.append(f" Scenario: {element.name}")
|
||||
for step in element.steps:
|
||||
lines.append(f" {step.type} {step.description}")
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
16
src/requirements_to_gherkin/parser.py
Normal file
16
src/requirements_to_gherkin/parser.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any
|
||||
import re
|
||||
|
||||
|
||||
class RequirementsParser:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def parse_file(self, file_path: Path) -> Dict[str, Any]:
|
||||
content = file_path.read_text()
|
||||
return self.parse_text(content)
|
||||
|
||||
def parse_text(self, text: str) -> Dict[str, Any]:
|
||||
requirements = {}
|
||||
return requirements
|
||||
@@ -66,11 +66,15 @@ class TestCLI:
|
||||
|
||||
output_file = tmp_path / "output.feature"
|
||||
|
||||
result = runner.invoke(convert, [
|
||||
result = runner.invoke(
|
||||
convert,
|
||||
[
|
||||
str(req_file),
|
||||
"--output", str(output_file),
|
||||
"--output",
|
||||
str(output_file),
|
||||
"--no-validate",
|
||||
])
|
||||
],
|
||||
)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert output_file.exists()
|
||||
@@ -84,11 +88,15 @@ class TestCLI:
|
||||
req_file.write_text("As a user, I want to login")
|
||||
|
||||
for framework in ["cucumber", "behave", "pytest-bdd"]:
|
||||
result = runner.invoke(convert, [
|
||||
result = runner.invoke(
|
||||
convert,
|
||||
[
|
||||
str(req_file),
|
||||
"--framework", framework,
|
||||
"--framework",
|
||||
framework,
|
||||
"--no-validate",
|
||||
])
|
||||
],
|
||||
)
|
||||
|
||||
assert result.exit_code == 0, f"Failed for framework: {framework}"
|
||||
|
||||
@@ -111,11 +119,14 @@ class TestCLI:
|
||||
req_file = tmp_path / "requirements.txt"
|
||||
req_file.write_text("As a user, I want to do something with some data")
|
||||
|
||||
result = runner.invoke(convert, [
|
||||
result = runner.invoke(
|
||||
convert,
|
||||
[
|
||||
str(req_file),
|
||||
"--ambiguity-check",
|
||||
"--no-validate",
|
||||
])
|
||||
],
|
||||
)
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
16
tests/test_parser.py
Normal file
16
tests/test_parser.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from pathlib import Path
|
||||
from requirements_to_gherkin.parser import RequirementsParser
|
||||
|
||||
|
||||
def test_parse_file(tmp_path):
|
||||
parser = RequirementsParser()
|
||||
test_file = tmp_path / "test.txt"
|
||||
test_file.write_text("Test requirements")
|
||||
result = parser.parse_file(test_file)
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
def test_parse_text():
|
||||
parser = RequirementsParser()
|
||||
result = parser.parse_text("Test requirements")
|
||||
assert isinstance(result, dict)
|
||||
Reference in New Issue
Block a user