Add test suite
Some checks failed
CI / test (3.10) (push) Has been cancelled
CI / test (3.11) (push) Has been cancelled
CI / test (3.12) (push) Has been cancelled
CI / test (3.8) (push) Has been cancelled
CI / test (3.9) (push) Has been cancelled
CI / lint (push) Has been cancelled
CI / typecheck (push) Has been cancelled
CI / build-package (push) Has been cancelled
Some checks failed
CI / test (3.10) (push) Has been cancelled
CI / test (3.11) (push) Has been cancelled
CI / test (3.12) (push) Has been cancelled
CI / test (3.8) (push) Has been cancelled
CI / test (3.9) (push) Has been cancelled
CI / lint (push) Has been cancelled
CI / typecheck (push) Has been cancelled
CI / build-package (push) Has been cancelled
This commit is contained in:
129
tests/unit/test_validators.py
Normal file
129
tests/unit/test_validators.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""Unit tests for validation logic."""
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
from jsonschema import ValidationError, validate
|
||||
|
||||
from configforge.commands.validate import validate as validate_cmd
|
||||
|
||||
|
||||
def validate_against_schema(config: dict, schema: dict):
|
||||
"""Validate configuration against a schema and return errors."""
|
||||
errors = []
|
||||
try:
|
||||
validate(instance=config, schema=schema)
|
||||
except ValidationError as e:
|
||||
path = list(e.absolute_path) if e.absolute_path else []
|
||||
errors.append({
|
||||
"path": path,
|
||||
"message": e.message,
|
||||
"validator": e.validator,
|
||||
"validator_value": e.validator_value,
|
||||
})
|
||||
return errors
|
||||
|
||||
|
||||
def format_validation_error(error: ValidationError):
|
||||
"""Format a jsonschema ValidationError."""
|
||||
path = list(error.absolute_path) if error.absolute_path else []
|
||||
return {
|
||||
"path": path,
|
||||
"message": error.message,
|
||||
"validator": error.validator,
|
||||
"validator_value": e.validator_value,
|
||||
}
|
||||
|
||||
|
||||
class TestValidateCommand:
|
||||
"""Tests for the validate command."""
|
||||
|
||||
def test_validate_valid_config(self, temp_config_file, temp_schema_file):
|
||||
"""Test validating a valid configuration."""
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(validate_cmd, [temp_config_file, temp_schema_file])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_validate_invalid_config(self, sample_json_config, temp_schema_file):
|
||||
"""Test validating an invalid configuration."""
|
||||
invalid_config = sample_json_config.copy()
|
||||
invalid_config["database"]["port"] = "invalid"
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
|
||||
json.dump(invalid_config, f)
|
||||
filepath = f.name
|
||||
|
||||
try:
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(validate_cmd, [filepath, temp_schema_file])
|
||||
assert result.exit_code == 1
|
||||
assert "error" in result.output.lower() or result.exit_code == 1
|
||||
finally:
|
||||
pass
|
||||
|
||||
def test_validate_dry_run(self, temp_config_file, temp_schema_file):
|
||||
"""Test dry-run mode always exits with 0."""
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(validate_cmd, [temp_config_file, temp_schema_file, "--dry-run"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_validate_json_format(self, temp_config_file, temp_schema_file):
|
||||
"""Test validation with JSON output format."""
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(validate_cmd, [temp_config_file, temp_schema_file, "--format", "json"])
|
||||
assert result.exit_code == 0
|
||||
assert "valid" in result.output
|
||||
|
||||
def test_validate_table_format(self, temp_config_file, temp_schema_file):
|
||||
"""Test validation with table output format."""
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(validate_cmd, [temp_config_file, temp_schema_file, "--format", "table"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
class TestValidationLogic:
|
||||
"""Tests for validation logic functions."""
|
||||
|
||||
def test_validate_against_schema_valid(self, sample_json_config, sample_json_schema):
|
||||
"""Test validating valid config against schema."""
|
||||
errors = validate_against_schema(sample_json_config, sample_json_schema)
|
||||
assert len(errors) == 0
|
||||
|
||||
def test_validate_against_schema_invalid_type(self, sample_json_schema):
|
||||
"""Test validation catches type errors."""
|
||||
invalid_config = {
|
||||
"database": {
|
||||
"host": "localhost",
|
||||
"port": "should_be_number",
|
||||
"name": "test"
|
||||
},
|
||||
"server": {
|
||||
"host": "0.0.0.0",
|
||||
"port": 8080
|
||||
}
|
||||
}
|
||||
errors = validate_against_schema(invalid_config, sample_json_schema)
|
||||
assert len(errors) > 0
|
||||
|
||||
def test_validate_against_schema_missing_required(self, sample_json_schema):
|
||||
"""Test validation catches missing required fields."""
|
||||
invalid_config = {
|
||||
"database": {
|
||||
"host": "localhost"
|
||||
}
|
||||
}
|
||||
errors = validate_against_schema(invalid_config, sample_json_schema)
|
||||
assert len(errors) > 0
|
||||
|
||||
def test_format_validation_error(self):
|
||||
"""Test formatting validation errors."""
|
||||
schema = {"type": "string"}
|
||||
try:
|
||||
validate(instance=123, schema=schema)
|
||||
except ValidationError as e:
|
||||
formatted = format_validation_error(e)
|
||||
assert "path" in formatted
|
||||
assert "message" in formatted
|
||||
assert "validator" in formatted
|
||||
Reference in New Issue
Block a user