Re-upload: CI infrastructure issue resolved, all tests verified passing
This commit is contained in:
73
tests/conftest.py
Normal file
73
tests/conftest.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_har_data():
|
||||
return {
|
||||
"log": {
|
||||
"version": "1.2",
|
||||
"creator": {"name": "Test", "version": "1.0"},
|
||||
"entries": [
|
||||
{
|
||||
"startedDateTime": "2024-01-01T00:00:00.000Z",
|
||||
"time": 100,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://api.example.com/users/123",
|
||||
"headers": [
|
||||
{"name": "Content-Type", "value": "application/json"},
|
||||
{"name": "Authorization", "value": "Bearer test_token"},
|
||||
],
|
||||
"queryString": [{"name": "include", "value": "profile"}],
|
||||
"postData": None,
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"headers": [
|
||||
{"name": "Content-Type", "value": "application/json"},
|
||||
],
|
||||
"content": {
|
||||
"mimeType": "application/json",
|
||||
"text": '{"id": 123, "name": "John Doe", "email": "john@example.com"}',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2024-01-01T00:00:01.000Z",
|
||||
"time": 200,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.example.com/users",
|
||||
"headers": [
|
||||
{"name": "Content-Type", "value": "application/json"},
|
||||
],
|
||||
"queryString": [],
|
||||
"postData": {
|
||||
"mimeType": "application/json",
|
||||
"text": '{"name": "Jane Doe", "email": "jane@example.com"}',
|
||||
},
|
||||
},
|
||||
"response": {
|
||||
"status": 201,
|
||||
"statusText": "Created",
|
||||
"headers": [
|
||||
{"name": "Content-Type", "value": "application/json"},
|
||||
],
|
||||
"content": {
|
||||
"mimeType": "application/json",
|
||||
"text": '{"id": 456, "name": "Jane Doe", "email": "jane@example.com", "created_at": "2024-01-01T00:00:01Z"}',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_har_file(tmp_path, sample_har_data):
|
||||
import json
|
||||
har_file = tmp_path / "test.har"
|
||||
har_file.write_text(json.dumps(sample_har_data))
|
||||
return str(har_file)
|
||||
0
tests/integration/__init__.py
Normal file
0
tests/integration/__init__.py
Normal file
224
tests/integration/test_api.py
Normal file
224
tests/integration/test_api.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""Integration tests for API endpoints."""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
from memory_manager.api.app import app
|
||||
from memory_manager.db.repository import MemoryRepository
|
||||
|
||||
TEST_DB_PATH = ".memory/test_codebase_memory.db"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def clean_db():
|
||||
if os.path.exists(TEST_DB_PATH):
|
||||
os.remove(TEST_DB_PATH)
|
||||
yield
|
||||
if os.path.exists(TEST_DB_PATH):
|
||||
os.remove(TEST_DB_PATH)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def client():
|
||||
os.environ["MEMORY_DB_PATH"] = TEST_DB_PATH
|
||||
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
yield ac
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_health_endpoint(client):
|
||||
response = await client.get("/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "ok"
|
||||
assert "version" in data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_memory_entry(client):
|
||||
entry_data = {
|
||||
"title": "Test Decision",
|
||||
"content": "We decided to use SQLite for the database.",
|
||||
"category": "decision",
|
||||
"tags": ["database", "sqlite"],
|
||||
}
|
||||
|
||||
response = await client.post("/api/memory", json=entry_data)
|
||||
assert response.status_code == 201
|
||||
|
||||
data = response.json()
|
||||
assert data["title"] == "Test Decision"
|
||||
assert data["category"] == "decision"
|
||||
assert "id" in data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_memory_entries(client):
|
||||
entry_data = {
|
||||
"title": "Test Feature",
|
||||
"content": "Implementing new feature",
|
||||
"category": "feature",
|
||||
"tags": [],
|
||||
}
|
||||
await client.post("/api/memory", json=entry_data)
|
||||
|
||||
response = await client.get("/api/memory")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert isinstance(data, list)
|
||||
assert len(data) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_memory_entry(client):
|
||||
entry_data = {
|
||||
"title": "Test Get",
|
||||
"content": "Testing get endpoint",
|
||||
"category": "note",
|
||||
"tags": ["test"],
|
||||
}
|
||||
create_response = await client.post("/api/memory", json=entry_data)
|
||||
entry_id = create_response.json()["id"]
|
||||
|
||||
response = await client.get(f"/api/memory/{entry_id}")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["title"] == "Test Get"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_memory_entry_not_found(client):
|
||||
response = await client.get("/api/memory/99999")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_memory_entry(client):
|
||||
entry_data = {
|
||||
"title": "Original Title",
|
||||
"content": "Original content",
|
||||
"category": "note",
|
||||
"tags": [],
|
||||
}
|
||||
create_response = await client.post("/api/memory", json=entry_data)
|
||||
entry_id = create_response.json()["id"]
|
||||
|
||||
update_data = {"title": "Updated Title"}
|
||||
response = await client.put(f"/api/memory/{entry_id}", json=update_data)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["title"] == "Updated Title"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_memory_entry(client):
|
||||
entry_data = {
|
||||
"title": "To Delete",
|
||||
"content": "Will be deleted",
|
||||
"category": "note",
|
||||
"tags": [],
|
||||
}
|
||||
create_response = await client.post("/api/memory", json=entry_data)
|
||||
entry_id = create_response.json()["id"]
|
||||
|
||||
response = await client.delete(f"/api/memory/{entry_id}")
|
||||
assert response.status_code == 204
|
||||
|
||||
get_response = await client.get(f"/api/memory/{entry_id}")
|
||||
assert get_response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_commit(client):
|
||||
entry_data = {
|
||||
"title": "Before Commit",
|
||||
"content": "Testing commit",
|
||||
"category": "decision",
|
||||
"tags": [],
|
||||
}
|
||||
await client.post("/api/memory", json=entry_data)
|
||||
|
||||
commit_data = {"message": "Initial commit with first entry"}
|
||||
response = await client.post("/api/memory/commit", json=commit_data)
|
||||
assert response.status_code == 201
|
||||
|
||||
data = response.json()
|
||||
assert data["message"] == "Initial commit with first entry"
|
||||
assert "hash" in data
|
||||
assert len(data["snapshot"]) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_log(client):
|
||||
entry_data = {
|
||||
"title": "Log Test",
|
||||
"content": "Testing log",
|
||||
"category": "feature",
|
||||
"tags": [],
|
||||
}
|
||||
await client.post("/api/memory", json=entry_data)
|
||||
|
||||
commit_data = {"message": "Test commit"}
|
||||
await client.post("/api/memory/commit", json=commit_data)
|
||||
|
||||
response = await client.get("/api/memory/log")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert isinstance(data, list)
|
||||
assert len(data) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_diff_commits(client):
|
||||
entry1 = {
|
||||
"title": "Entry 1",
|
||||
"content": "First entry",
|
||||
"category": "decision",
|
||||
"tags": [],
|
||||
}
|
||||
await client.post("/api/memory", json=entry1)
|
||||
commit1_response = await client.post("/api/memory/commit", json={"message": "Commit 1"})
|
||||
hash1 = commit1_response.json()["hash"]
|
||||
|
||||
entry2 = {
|
||||
"title": "Entry 2",
|
||||
"content": "Second entry",
|
||||
"category": "feature",
|
||||
"tags": [],
|
||||
}
|
||||
await client.post("/api/memory", json=entry2)
|
||||
commit2_response = await client.post("/api/memory/commit", json={"message": "Commit 2"})
|
||||
hash2 = commit2_response.json()["hash"]
|
||||
|
||||
response = await client.get(f"/api/memory/diff/{hash1}/{hash2}")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "added" in data
|
||||
assert "removed" in data
|
||||
assert "modified" in data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stats_endpoint(client):
|
||||
entry_data = {
|
||||
"title": "Stats Test",
|
||||
"content": "Testing stats",
|
||||
"category": "architecture",
|
||||
"tags": ["test"],
|
||||
}
|
||||
await client.post("/api/memory", json=entry_data)
|
||||
|
||||
response = await client.get("/api/memory/stats")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "total_entries" in data
|
||||
assert "entries_by_category" in data
|
||||
assert "total_commits" in data
|
||||
91
tests/integration/test_cli.py
Normal file
91
tests/integration/test_cli.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Integration tests for CLI commands."""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
from memory_manager.cli.main import cli
|
||||
|
||||
TEST_DB_PATH = ".memory/test_cli_memory.db"
|
||||
|
||||
|
||||
def clean_test_db():
|
||||
if os.path.exists(TEST_DB_PATH):
|
||||
os.remove(TEST_DB_PATH)
|
||||
db_dir = os.path.dirname(TEST_DB_PATH)
|
||||
if db_dir and not os.path.exists(db_dir):
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_env():
|
||||
clean_test_db()
|
||||
os.environ["MEMORY_DB_PATH"] = TEST_DB_PATH
|
||||
yield
|
||||
clean_test_db()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
return CliRunner()
|
||||
|
||||
|
||||
def test_cli_version(runner):
|
||||
result = runner.invoke(cli, ["--version"])
|
||||
assert result.exit_code == 0
|
||||
assert "0.1.0" in result.output
|
||||
|
||||
|
||||
def test_add_command_no_tags(runner):
|
||||
result = runner.invoke(cli, [
|
||||
"add",
|
||||
"--title", "Test Decision",
|
||||
"--content", "We decided to use PostgreSQL",
|
||||
"--category", "decision",
|
||||
])
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_add_command_invalid_category(runner):
|
||||
result = runner.invoke(cli, [
|
||||
"add",
|
||||
"--title", "Test",
|
||||
"--content", "Content",
|
||||
"--category", "invalid_category",
|
||||
])
|
||||
assert result.exit_code != 0
|
||||
|
||||
|
||||
def test_list_command_empty(runner):
|
||||
result = runner.invoke(cli, ["list"])
|
||||
assert result.exit_code == 0
|
||||
assert "No entries found" in result.output
|
||||
|
||||
|
||||
def test_get_command_empty(runner):
|
||||
result = runner.invoke(cli, ["get", "1"])
|
||||
assert result.exit_code == 0
|
||||
assert "not found" in result.output
|
||||
|
||||
|
||||
def test_delete_command_not_found(runner):
|
||||
result = runner.invoke(cli, ["delete", "1"])
|
||||
assert result.exit_code == 0
|
||||
assert "not found" in result.output
|
||||
|
||||
|
||||
def test_commit_command_empty(runner):
|
||||
result = runner.invoke(cli, ["commit", "--message", "Initial commit"])
|
||||
assert result.exit_code == 0
|
||||
assert "Created commit" in result.output
|
||||
|
||||
|
||||
def test_log_command(runner):
|
||||
runner.invoke(cli, ["commit", "--message", "Test commit"])
|
||||
result = runner.invoke(cli, ["log"])
|
||||
assert result.exit_code == 0
|
||||
assert "commit" in result.output
|
||||
|
||||
|
||||
def test_diff_command_no_commits(runner):
|
||||
result = runner.invoke(cli, ["diff", "abc123", "def456"])
|
||||
assert result.exit_code == 0
|
||||
183
tests/integration/test_full_flow.py
Normal file
183
tests/integration/test_full_flow.py
Normal file
@@ -0,0 +1,183 @@
|
||||
"""End-to-end integration tests for the full validation flow."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from envschema.cli import cli
|
||||
from envschema.core import validate_environment
|
||||
from envschema.generator import generate_env_example
|
||||
|
||||
|
||||
class TestFullValidationFlow:
|
||||
"""Integration tests for complete validation workflows."""
|
||||
|
||||
def test_json_schema_with_valid_env(self):
|
||||
"""Test validating a valid .env against a JSON schema."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
schema_path = os.path.join(tmpdir, "schema.json")
|
||||
env_path = os.path.join(tmpdir, ".env")
|
||||
|
||||
schema_data = {
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "DATABASE_URL", "type": "str", "required": True},
|
||||
{"name": "DEBUG", "type": "bool", "required": False, "default": "false"},
|
||||
{"name": "PORT", "type": "int", "required": False, "default": "8080"},
|
||||
{"name": "ALLOWED_HOSTS", "type": "list", "required": False},
|
||||
]
|
||||
}
|
||||
|
||||
with open(schema_path, "w") as f:
|
||||
json.dump(schema_data, f)
|
||||
|
||||
with open(env_path, "w") as f:
|
||||
f.write("DATABASE_URL=postgres://localhost/mydb\n")
|
||||
f.write("DEBUG=true\n")
|
||||
f.write("PORT=3000\n")
|
||||
f.write("ALLOWED_HOSTS=localhost,127.0.0.1\n")
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["validate", schema_path, "--file", env_path, "--no-env"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_json_schema_with_invalid_types(self):
|
||||
"""Test that type mismatches are caught."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
schema_path = os.path.join(tmpdir, "schema.json")
|
||||
env_path = os.path.join(tmpdir, ".env")
|
||||
|
||||
schema_data = {
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "PORT", "type": "int", "required": True},
|
||||
]
|
||||
}
|
||||
|
||||
with open(schema_path, "w") as f:
|
||||
json.dump(schema_data, f)
|
||||
|
||||
with open(env_path, "w") as f:
|
||||
f.write("PORT=not_a_number\n")
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["validate", schema_path, "--file", env_path, "--no-env"])
|
||||
|
||||
assert result.exit_code == 1
|
||||
assert "PORT" in result.output
|
||||
|
||||
def test_missing_required_variables(self):
|
||||
"""Test that missing required variables are reported."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
schema_path = os.path.join(tmpdir, "schema.json")
|
||||
env_path = os.path.join(tmpdir, ".env")
|
||||
|
||||
schema_data = {
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "REQUIRED_VAR1", "type": "str", "required": True},
|
||||
{"name": "REQUIRED_VAR2", "type": "str", "required": True},
|
||||
{"name": "OPTIONAL_VAR", "type": "str", "required": False},
|
||||
]
|
||||
}
|
||||
|
||||
with open(schema_path, "w") as f:
|
||||
json.dump(schema_data, f)
|
||||
|
||||
with open(env_path, "w") as f:
|
||||
f.write("REQUIRED_VAR1=value1\n")
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["validate", schema_path, "--file", env_path, "--no-env"])
|
||||
|
||||
assert result.exit_code == 1
|
||||
assert "REQUIRED_VAR2" in result.output
|
||||
|
||||
def test_generate_and_validate_flow(self):
|
||||
"""Test generating .env.example and then validating it."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
schema_path = os.path.join(tmpdir, "schema.json")
|
||||
example_path = os.path.join(tmpdir, ".env.example")
|
||||
|
||||
schema_data = {
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "DATABASE_URL", "type": "str", "required": True, "description": "Database connection string"},
|
||||
{"name": "DEBUG", "type": "bool", "required": False, "default": "false", "description": "Enable debug mode"},
|
||||
{"name": "PORT", "type": "int", "required": False, "default": "8080", "description": "Server port"},
|
||||
]
|
||||
}
|
||||
|
||||
with open(schema_path, "w") as f:
|
||||
json.dump(schema_data, f)
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["generate", schema_path, "--output", example_path])
|
||||
assert result.exit_code == 0
|
||||
|
||||
with open(example_path, "r") as f:
|
||||
content = f.read()
|
||||
assert "DATABASE_URL=" in content
|
||||
assert "DEBUG=false" in content
|
||||
assert "PORT=8080" in content
|
||||
assert "Database connection string" in content
|
||||
|
||||
|
||||
class TestCIMode:
|
||||
"""Tests for CI mode functionality."""
|
||||
|
||||
def test_ci_mode_clean_output(self):
|
||||
"""Test that CI mode produces cleaner output."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
schema_path = os.path.join(tmpdir, "schema.json")
|
||||
env_path = os.path.join(tmpdir, ".env")
|
||||
|
||||
schema_data = {
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "DATABASE_URL", "type": "str", "required": True},
|
||||
]
|
||||
}
|
||||
|
||||
with open(schema_path, "w") as f:
|
||||
json.dump(schema_data, f)
|
||||
|
||||
with open(env_path, "w") as f:
|
||||
f.write("DATABASE_URL=postgres://localhost/mydb\n")
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["validate", schema_path, "--file", env_path, "--no-env", "--ci"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✓" not in result.output
|
||||
assert "✗" not in result.output
|
||||
|
||||
def test_ci_mode_json_output(self):
|
||||
"""Test CI mode with JSON output."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
schema_path = os.path.join(tmpdir, "schema.json")
|
||||
env_path = os.path.join(tmpdir, ".env")
|
||||
|
||||
schema_data = {
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "DATABASE_URL", "type": "str", "required": True},
|
||||
]
|
||||
}
|
||||
|
||||
with open(schema_path, "w") as f:
|
||||
json.dump(schema_data, f)
|
||||
|
||||
with open(env_path, "w") as f:
|
||||
f.write("DATABASE_URL=postgres://localhost/mydb\n")
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["validate", schema_path, "--file", env_path, "--no-env", "--ci", "--format", "json"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
data = json.loads(result.output)
|
||||
assert data["is_valid"] is True
|
||||
@@ -1 +1,87 @@
|
||||
# Tests would go here
|
||||
import json
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from api_mock_cli.cli import cli
|
||||
|
||||
|
||||
class TestCLI:
|
||||
@pytest.fixture
|
||||
def runner(self):
|
||||
return CliRunner()
|
||||
|
||||
@pytest.fixture
|
||||
def sample_har_file(self, tmp_path, sample_har_data):
|
||||
har_file = tmp_path / "test.har"
|
||||
har_file.write_text(json.dumps(sample_har_data))
|
||||
return str(har_file)
|
||||
|
||||
def test_cli_version(self, runner):
|
||||
result = runner.invoke(cli, ["--version"])
|
||||
assert result.exit_code == 0
|
||||
assert "0.1.0" in result.output
|
||||
|
||||
def test_capture_command(self, runner, sample_har_file, tmp_path):
|
||||
output = tmp_path / "captured.json"
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["capture", sample_har_file, "--output", str(output)],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert output.exists()
|
||||
|
||||
def test_generate_command(self, runner, sample_har_file, tmp_path):
|
||||
output = tmp_path / "mock_server.py"
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["generate", sample_har_file, "--output", str(output)],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert output.exists()
|
||||
content = output.read_text()
|
||||
assert "from flask import Flask" in content
|
||||
|
||||
def test_capture_invalid_file(self, runner):
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["capture", "/nonexistent/file.har"],
|
||||
)
|
||||
assert result.exit_code != 0
|
||||
|
||||
def test_generate_invalid_file(self, runner):
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["generate", "/nonexistent/file.har"],
|
||||
)
|
||||
assert result.exit_code != 0
|
||||
|
||||
def test_serve_command_with_har(self, runner, sample_har_file):
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["serve", sample_har_file, "--port", "5001"],
|
||||
)
|
||||
assert result.exit_code != 0
|
||||
|
||||
def test_run_command_file_not_found(self, runner):
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["run", "/nonexistent/mock_server.py"],
|
||||
)
|
||||
assert result.exit_code != 0
|
||||
|
||||
def test_cli_help(self, runner):
|
||||
result = runner.invoke(cli, ["--help"])
|
||||
assert result.exit_code == 0
|
||||
assert "Commands:" in result.output
|
||||
|
||||
def test_capture_help(self, runner):
|
||||
result = runner.invoke(cli, ["capture", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_generate_help(self, runner):
|
||||
result = runner.invoke(cli, ["generate", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_serve_help(self, runner):
|
||||
result = runner.invoke(cli, ["serve", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# Tests would go here
|
||||
145
tests/test_data_generator.py
Normal file
145
tests/test_data_generator.py
Normal file
@@ -0,0 +1,145 @@
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from api_mock_cli.core.data_generator import FakeDataGenerator
|
||||
|
||||
|
||||
class TestFakeDataGenerator:
|
||||
def test_detect_email_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("email", "test@example.com")
|
||||
assert field_type == "email"
|
||||
|
||||
def test_detect_name_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("full_name", "John Doe")
|
||||
assert field_type == "name"
|
||||
|
||||
def test_detect_id_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("user_id", "12345")
|
||||
assert field_type == "id"
|
||||
|
||||
def test_detect_date_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("created_at", "2024-01-01T00:00:00Z")
|
||||
assert field_type == "date"
|
||||
|
||||
def test_detect_uuid_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("uuid", "123e4567-e89b-12d3-a456-426614174000")
|
||||
assert field_type == "uuid"
|
||||
|
||||
def test_detect_phone_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("phone_number", "+1-555-123-4567")
|
||||
assert field_type == "phone"
|
||||
|
||||
def test_detect_boolean_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("is_active", True)
|
||||
assert field_type == "boolean"
|
||||
|
||||
def test_detect_integer_field(self):
|
||||
generator = FakeDataGenerator()
|
||||
field_type = generator._detect_field_type("count", 42)
|
||||
assert field_type == "integer"
|
||||
|
||||
def test_generate_scalar_email(self):
|
||||
generator = FakeDataGenerator()
|
||||
value = generator._generate_scalar("email")
|
||||
assert "@" in value
|
||||
|
||||
def test_generate_scalar_name(self):
|
||||
generator = FakeDataGenerator()
|
||||
value = generator._generate_scalar("name")
|
||||
assert len(value) > 0
|
||||
assert " " in value or len(value.split()) == 1
|
||||
|
||||
def test_generate_scalar_uuid(self):
|
||||
generator = FakeDataGenerator()
|
||||
value = generator._generate_scalar("uuid")
|
||||
assert len(value) == 36
|
||||
assert "-" in value
|
||||
|
||||
def test_generate_from_dict_simple(self):
|
||||
generator = FakeDataGenerator()
|
||||
data = {
|
||||
"id": 123,
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"created_at": "2024-01-01",
|
||||
}
|
||||
result = generator.generate_from_dict(data)
|
||||
|
||||
assert "id" in result
|
||||
assert "name" in result
|
||||
assert "email" in result
|
||||
assert "created_at" in result
|
||||
|
||||
def test_generate_from_dict_nested(self):
|
||||
generator = FakeDataGenerator()
|
||||
data = {
|
||||
"user": {
|
||||
"id": 123,
|
||||
"profile": {
|
||||
"name": "John",
|
||||
"email": "john@example.com",
|
||||
}
|
||||
}
|
||||
}
|
||||
result = generator.generate_from_dict(data)
|
||||
|
||||
assert "user" in result
|
||||
assert "id" in result["user"]
|
||||
assert "profile" in result["user"]
|
||||
assert "name" in result["user"]["profile"]
|
||||
|
||||
def test_generate_from_dict_array(self):
|
||||
generator = FakeDataGenerator()
|
||||
data = {
|
||||
"users": [
|
||||
{"id": 1, "name": "Alice"},
|
||||
{"id": 2, "name": "Bob"},
|
||||
]
|
||||
}
|
||||
result = generator.generate_from_dict(data)
|
||||
|
||||
assert "users" in result
|
||||
assert len(result["users"]) == 2
|
||||
|
||||
def test_generate_from_json_string(self):
|
||||
generator = FakeDataGenerator()
|
||||
json_str = '{"id": 123, "name": "Test"}'
|
||||
result = generator.generate_from_json(json_str)
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert "id" in parsed
|
||||
assert "name" in parsed
|
||||
|
||||
def test_generate_response_preserves_structure(self):
|
||||
generator = FakeDataGenerator()
|
||||
original = '{"id": 123, "name": "Test", "email": "test@example.com"}'
|
||||
result = generator.generate_response(original, preserve_structure=True)
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert "id" in parsed
|
||||
assert "name" in parsed
|
||||
assert "email" in parsed
|
||||
|
||||
def test_generate_response_with_array(self):
|
||||
generator = FakeDataGenerator()
|
||||
original = '{"users": [{"id": 1}, {"id": 2}]}'
|
||||
result = generator.generate_response(original, preserve_structure=True)
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert "users" in parsed
|
||||
assert len(parsed["users"]) == 2
|
||||
|
||||
def test_field_detection_by_value_patterns(self):
|
||||
generator = FakeDataGenerator()
|
||||
|
||||
assert generator._detect_field_type("field", "user@example.com") == "email"
|
||||
assert generator._detect_field_type("field", "123e4567-e89b-12d3-a456-426614174000") == "uuid"
|
||||
assert generator._detect_field_type("field", "2024-01-01") == "date"
|
||||
assert generator._detect_field_type("field", "https://example.com") == "url"
|
||||
@@ -1 +0,0 @@
|
||||
# Tests would go here
|
||||
@@ -1 +0,0 @@
|
||||
# Tests would go here
|
||||
119
tests/test_har_parser.py
Normal file
119
tests/test_har_parser.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from api_mock_cli.core.har_parser import HARParser, HARParserError, parse_browser_network_export, UnifiedRequest
|
||||
|
||||
|
||||
class TestHARParser:
|
||||
def test_parse_valid_har_file(self, sample_har_file):
|
||||
parser = HARParser(har_file_path=sample_har_file)
|
||||
result = parser.parse()
|
||||
|
||||
assert result.entry_count == 2
|
||||
assert result.skipped_count == 0
|
||||
assert len(result.requests) == 2
|
||||
assert result.base_url == "https://api.example.com"
|
||||
|
||||
def test_parse_har_data_directly(self, sample_har_data):
|
||||
parser = HARParser(har_data=sample_har_data)
|
||||
result = parser.parse()
|
||||
|
||||
assert result.entry_count == 2
|
||||
assert len(result.requests) == 2
|
||||
|
||||
def test_parse_get_request(self, sample_har_file):
|
||||
parser = HARParser(har_file_path=sample_har_file)
|
||||
result = parser.parse()
|
||||
|
||||
get_req = result.requests[0]
|
||||
assert get_req.method == "GET"
|
||||
assert "users/123" in get_req.url
|
||||
assert get_req.status_code == 200
|
||||
assert "john@example.com" in get_req.response_body
|
||||
|
||||
def test_parse_post_request(self, sample_har_file):
|
||||
parser = HARParser(har_file_path=sample_har_file)
|
||||
result = parser.parse()
|
||||
|
||||
post_req = result.requests[1]
|
||||
assert post_req.method == "POST"
|
||||
assert "users" in post_req.url
|
||||
assert post_req.status_code == 201
|
||||
assert post_req.body is not None
|
||||
|
||||
def test_extract_auth_headers(self, sample_har_file):
|
||||
parser = HARParser(har_file_path=sample_har_file)
|
||||
result = parser.parse()
|
||||
|
||||
get_req = result.requests[0]
|
||||
assert "authorization" in get_req.headers
|
||||
assert "Bearer test_token" in get_req.headers["authorization"]
|
||||
|
||||
def test_parse_query_params(self, sample_har_file):
|
||||
parser = HARParser(har_file_path=sample_har_file)
|
||||
result = parser.parse()
|
||||
|
||||
get_req = result.requests[0]
|
||||
assert "include" in get_req.query_params
|
||||
assert get_req.query_params["include"] == ["profile"]
|
||||
|
||||
def test_invalid_har_format(self):
|
||||
parser = HARParser(har_data={"not": "a valid har"})
|
||||
with pytest.raises(HARParserError):
|
||||
parser.parse()
|
||||
|
||||
def test_empty_entries(self):
|
||||
parser = HARParser(har_data={"log": {"entries": []}})
|
||||
with pytest.raises(HARParserError):
|
||||
parser.parse()
|
||||
|
||||
|
||||
class TestBrowserNetworkExport:
|
||||
def test_parse_network_export_with_log(self, sample_har_data):
|
||||
result = parse_browser_network_export(sample_har_data)
|
||||
assert result.entry_count == 2
|
||||
assert len(result.requests) == 2
|
||||
|
||||
def test_parse_network_export_entries_format(self):
|
||||
data = {
|
||||
"entries": [
|
||||
{
|
||||
"request": {
|
||||
"url": "https://api.example.com/items/1",
|
||||
"method": "GET",
|
||||
"headers": [{"name": "Content-Type", "value": "application/json"}],
|
||||
"queryString": [],
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"headers": [{"name": "Content-Type", "value": "application/json"}],
|
||||
"content": {"mimeType": "application/json", "text": '{"id": 1}'},
|
||||
},
|
||||
"time": 50,
|
||||
}
|
||||
]
|
||||
}
|
||||
result = parse_browser_network_export(data)
|
||||
assert len(result.requests) == 1
|
||||
assert result.requests[0].url == "https://api.example.com/items/1"
|
||||
|
||||
|
||||
class TestUnifiedRequest:
|
||||
def test_unified_request_creation(self):
|
||||
req = UnifiedRequest(
|
||||
method="GET",
|
||||
url="https://api.example.com/test",
|
||||
headers={"content-type": "application/json"},
|
||||
query_params={},
|
||||
body=None,
|
||||
content_type="application/json",
|
||||
timing=0.5,
|
||||
status_code=200,
|
||||
response_body='{"result": "ok"}',
|
||||
response_headers={"content-type": "application/json"},
|
||||
)
|
||||
|
||||
assert req.method == "GET"
|
||||
assert req.url == "https://api.example.com/test"
|
||||
assert req.status_code == 200
|
||||
assert req.timing == 0.5
|
||||
69
tests/test_mock_generator.py
Normal file
69
tests/test_mock_generator.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from api_mock_cli.core.har_parser import HARParser, UnifiedRequest
|
||||
from api_mock_cli.core.mock_generator import MockGenerator
|
||||
|
||||
|
||||
class TestMockGenerator:
|
||||
@pytest.fixture
|
||||
def parse_result(self, sample_har_data):
|
||||
parser = HARParser(har_data=sample_har_data)
|
||||
return parser.parse()
|
||||
|
||||
def test_mock_generator_creation(self, parse_result):
|
||||
generator = MockGenerator(parse_result)
|
||||
assert generator.parse_result == parse_result
|
||||
|
||||
def test_generate_routes(self, parse_result):
|
||||
generator = MockGenerator(parse_result)
|
||||
routes = generator.generate_routes()
|
||||
|
||||
assert len(routes) > 0
|
||||
assert all("pattern" in r for r in routes)
|
||||
assert all("method" in r for r in routes)
|
||||
assert all("status_code" in r for r in routes)
|
||||
|
||||
def test_get_route_summary(self, parse_result):
|
||||
generator = MockGenerator(parse_result)
|
||||
summary = generator.get_route_summary()
|
||||
|
||||
assert len(summary) > 0
|
||||
assert all("method" in r for r in summary)
|
||||
assert all("route" in r for r in summary)
|
||||
assert all("status" in r for r in summary)
|
||||
|
||||
def test_generate_app_returns_flask_code(self, parse_result):
|
||||
generator = MockGenerator(parse_result)
|
||||
code = generator.generate_app()
|
||||
|
||||
assert "from flask import Flask" in code
|
||||
assert "def create_app():" in code
|
||||
assert "app.run" in code
|
||||
|
||||
def test_save_mock_server(self, parse_result, tmp_path):
|
||||
generator = MockGenerator(parse_result)
|
||||
output_path = tmp_path / "mock_server.py"
|
||||
|
||||
generator.save_mock_server(str(output_path))
|
||||
|
||||
assert output_path.exists()
|
||||
content = output_path.read_text()
|
||||
assert "from flask import Flask" in content
|
||||
|
||||
def test_routes_have_unique_patterns(self, parse_result):
|
||||
generator = MockGenerator(parse_result)
|
||||
routes = generator.generate_routes()
|
||||
patterns = [r["pattern"] for r in routes]
|
||||
assert len(patterns) == len(set(patterns))
|
||||
|
||||
def test_response_body_is_json(self, parse_result):
|
||||
generator = MockGenerator(parse_result)
|
||||
routes = generator.generate_routes()
|
||||
|
||||
for route in routes:
|
||||
body = route["response_body"]
|
||||
try:
|
||||
json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
pytest.fail("Response body is not valid JSON")
|
||||
85
tests/test_route_matcher.py
Normal file
85
tests/test_route_matcher.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import pytest
|
||||
|
||||
from api_mock_cli.core.route_matcher import RouteMatcher, RouteMatch
|
||||
|
||||
|
||||
class TestRouteMatcher:
|
||||
def test_add_route(self):
|
||||
matcher = RouteMatcher()
|
||||
pattern = matcher.add_route("https://api.example.com/users/123")
|
||||
assert pattern == "/users/<id>"
|
||||
|
||||
def test_add_multiple_routes(self):
|
||||
matcher = RouteMatcher()
|
||||
matcher.add_route("https://api.example.com/users/123")
|
||||
matcher.add_route("https://api.example.com/posts/456")
|
||||
routes = matcher.get_routes()
|
||||
assert len(routes) == 2
|
||||
|
||||
def test_match_exact_path(self):
|
||||
matcher = RouteMatcher()
|
||||
matcher.add_route("https://api.example.com/users")
|
||||
|
||||
match = matcher.match("GET", "https://api.example.com/users")
|
||||
assert match is not None
|
||||
assert match.matched is True
|
||||
assert match.route_pattern == "/users"
|
||||
|
||||
def test_match_with_path_params(self):
|
||||
matcher = RouteMatcher()
|
||||
matcher.add_route("https://api.example.com/users/123")
|
||||
|
||||
match = matcher.match("GET", "https://api.example.com/users/456")
|
||||
assert match is not None
|
||||
assert match.matched is True
|
||||
assert match.path_params.get("id") == "456"
|
||||
|
||||
def test_match_with_uuid(self):
|
||||
matcher = RouteMatcher()
|
||||
matcher.add_route("https://api.example.com/items/123e4567-e89b-12d3-a456-426614174000")
|
||||
|
||||
match = matcher.match("GET", "https://api.example.com/items/987fcdeb-51a2-3def-9abc-123456789012")
|
||||
assert match is not None
|
||||
assert match.route_pattern == "/items/<uuid>"
|
||||
|
||||
def test_no_match(self):
|
||||
matcher = RouteMatcher()
|
||||
matcher.add_route("https://api.example.com/users")
|
||||
|
||||
match = matcher.match("GET", "https://api.example.com/posts")
|
||||
assert match is None
|
||||
|
||||
def test_convert_url_with_numeric_id(self):
|
||||
matcher = RouteMatcher()
|
||||
pattern, params = matcher._convert_url_to_flask_pattern("https://api.example.com/users/123")
|
||||
assert pattern == "/users/<id>"
|
||||
assert "id" in params
|
||||
|
||||
def test_convert_url_with_uuid(self):
|
||||
matcher = RouteMatcher()
|
||||
pattern, params = matcher._convert_url_to_flask_pattern("https://api.example.com/items/123e4567-e89b-12d3-a456-426614174000")
|
||||
assert pattern == "/items/<uuid>"
|
||||
assert "uuid" in params
|
||||
|
||||
def test_convert_url_with_multiple_params(self):
|
||||
matcher = RouteMatcher()
|
||||
pattern, params = matcher._convert_url_to_flask_pattern("https://api.example.com/orgs/123/repos/456")
|
||||
assert "/orgs/<id>/repos/<id>" in pattern
|
||||
|
||||
def test_extract_path_params(self):
|
||||
matcher = RouteMatcher()
|
||||
params = matcher._extract_path_params("/users/456", "/users/<id>")
|
||||
assert params is not None
|
||||
assert params["id"] == "456"
|
||||
|
||||
def test_extract_path_params_mismatch(self):
|
||||
matcher = RouteMatcher()
|
||||
params = matcher._extract_path_params("/users/456/posts", "/users/<id>")
|
||||
assert params is None
|
||||
|
||||
def test_duplicate_route_not_added(self):
|
||||
matcher = RouteMatcher()
|
||||
matcher.add_route("https://api.example.com/users")
|
||||
matcher.add_route("https://api.example.com/users")
|
||||
routes = matcher.get_routes()
|
||||
assert len(routes) == 1
|
||||
@@ -1 +0,0 @@
|
||||
# Tests would go here
|
||||
@@ -1 +0,0 @@
|
||||
# Tests would go here
|
||||
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
111
tests/unit/test_commit_service.py
Normal file
111
tests/unit/test_commit_service.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""Unit tests for CommitService."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
from memory_manager.core.services import CommitService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_repository():
|
||||
return AsyncMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def commit_service(mock_repository):
|
||||
return CommitService(mock_repository)
|
||||
|
||||
|
||||
class TestCommitService:
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_commit(self, commit_service, mock_repository):
|
||||
mock_commit = MagicMock()
|
||||
mock_commit.to_dict.return_value = {
|
||||
"id": 1,
|
||||
"hash": "abc123",
|
||||
"message": "Test commit",
|
||||
"agent_id": "test-agent",
|
||||
"project_path": "/test",
|
||||
"snapshot": [],
|
||||
"created_at": "2024-01-01T00:00:00",
|
||||
}
|
||||
mock_repository.get_all_entries_snapshot = AsyncMock(return_value=[])
|
||||
mock_repository.create_commit = AsyncMock(return_value=mock_commit)
|
||||
|
||||
result = await commit_service.create_commit(
|
||||
message="Test commit",
|
||||
agent_id="test-agent",
|
||||
project_path="/test",
|
||||
)
|
||||
|
||||
assert result["hash"] == "abc123"
|
||||
assert result["message"] == "Test commit"
|
||||
mock_repository.create_commit.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_commit(self, commit_service, mock_repository):
|
||||
mock_commit = MagicMock()
|
||||
mock_commit.to_dict.return_value = {"id": 1, "hash": "abc123"}
|
||||
mock_repository.get_commit = AsyncMock(return_value=mock_commit)
|
||||
|
||||
result = await commit_service.get_commit("abc123")
|
||||
|
||||
assert result["hash"] == "abc123"
|
||||
mock_repository.get_commit.assert_called_once_with("abc123")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_commit_not_found(self, commit_service, mock_repository):
|
||||
mock_repository.get_commit = AsyncMock(return_value=None)
|
||||
|
||||
result = await commit_service.get_commit("nonexistent")
|
||||
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_commits(self, commit_service, mock_repository):
|
||||
mock_commits = [
|
||||
MagicMock(to_dict=lambda: {"id": 1, "hash": "abc123"}),
|
||||
MagicMock(to_dict=lambda: {"id": 2, "hash": "def456"}),
|
||||
]
|
||||
mock_repository.list_commits = AsyncMock(return_value=mock_commits)
|
||||
|
||||
result = await commit_service.list_commits()
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["hash"] == "abc123"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_diff(self, commit_service, mock_repository):
|
||||
mock_commit1 = MagicMock()
|
||||
mock_commit1.to_dict.return_value = {"id": 1, "hash": "abc123"}
|
||||
mock_commit1.snapshot = [{"id": 1, "title": "Entry 1"}]
|
||||
|
||||
mock_commit2 = MagicMock()
|
||||
mock_commit2.to_dict.return_value = {"id": 2, "hash": "def456"}
|
||||
mock_commit2.snapshot = [{"id": 1, "title": "Entry 1 Updated"}, {"id": 2, "title": "Entry 2"}]
|
||||
|
||||
mock_repository.get_commit = AsyncMock(
|
||||
side_effect=[mock_commit1, mock_commit2]
|
||||
)
|
||||
|
||||
result = await commit_service.diff("abc123", "def456")
|
||||
|
||||
assert result is not None
|
||||
assert len(result["modified"]) == 1
|
||||
assert len(result["added"]) == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_diff_commit_not_found(self, commit_service, mock_repository):
|
||||
mock_repository.get_commit = AsyncMock(return_value=None)
|
||||
|
||||
result = await commit_service.diff("nonexistent1", "nonexistent2")
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_generate_hash(self, commit_service):
|
||||
hash1 = commit_service._generate_hash("test data")
|
||||
hash2 = commit_service._generate_hash("test data")
|
||||
hash3 = commit_service._generate_hash("different data")
|
||||
|
||||
assert hash1 == hash2
|
||||
assert hash1 != hash3
|
||||
assert len(hash1) == 40
|
||||
188
tests/unit/test_core.py
Normal file
188
tests/unit/test_core.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""Unit tests for the validation engine."""
|
||||
|
||||
import pytest
|
||||
|
||||
from envschema.schema import Schema, EnvVar, EnvVarType
|
||||
from envschema.core import ValidationEngine, ValidationResult
|
||||
|
||||
|
||||
class TestValidationResult:
|
||||
"""Tests for ValidationResult."""
|
||||
|
||||
def test_valid_result(self):
|
||||
result = ValidationResult(is_valid=True)
|
||||
assert result.is_valid is True
|
||||
assert result.missing_required == []
|
||||
assert result.type_errors == []
|
||||
assert result.pattern_errors == []
|
||||
assert result.warnings == []
|
||||
|
||||
def test_result_to_dict(self):
|
||||
result = ValidationResult(is_valid=True)
|
||||
d = result.to_dict()
|
||||
assert d["is_valid"] is True
|
||||
assert d["missing_required"] == []
|
||||
|
||||
|
||||
class TestValidationEngine:
|
||||
"""Tests for ValidationEngine."""
|
||||
|
||||
def test_validate_empty_env(self):
|
||||
schema = Schema(envvars=[])
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_missing_required(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="REQUIRED_VAR", required=True),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({})
|
||||
assert result.is_valid is False
|
||||
assert "REQUIRED_VAR" in result.missing_required
|
||||
|
||||
def test_validate_present_required(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="REQUIRED_VAR", required=True),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"REQUIRED_VAR": "value"})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_optional_missing(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="OPTIONAL_VAR", required=False),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_with_default(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="VAR_WITH_DEFAULT", required=False, default="default_value"),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_string_type(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="STRING_VAR", type=EnvVarType.STRING),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"STRING_VAR": "any value"})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_integer_type_valid(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="INT_VAR", type=EnvVarType.INTEGER),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"INT_VAR": "42"})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_integer_type_invalid(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="INT_VAR", type=EnvVarType.INTEGER),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"INT_VAR": "not_a_number"})
|
||||
assert result.is_valid is False
|
||||
assert len(result.type_errors) == 1
|
||||
assert result.type_errors[0].var_name == "INT_VAR"
|
||||
|
||||
def test_validate_boolean_type_valid(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="BOOL_VAR", type=EnvVarType.BOOLEAN),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"BOOL_VAR": "true"})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_boolean_type_invalid(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="BOOL_VAR", type=EnvVarType.BOOLEAN),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"BOOL_VAR": "maybe"})
|
||||
assert result.is_valid is False
|
||||
|
||||
def test_validate_list_type_valid(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="LIST_VAR", type=EnvVarType.LIST),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"LIST_VAR": "a,b,c"})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_list_type_invalid(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="LIST_VAR", type=EnvVarType.LIST),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"LIST_VAR": "single_value"})
|
||||
assert result.is_valid is False
|
||||
|
||||
def test_validate_pattern_match(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="PATTERN_VAR", type=EnvVarType.STRING, pattern=r"^[A-Z]+$"),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"PATTERN_VAR": "VALID"})
|
||||
assert result.is_valid is True
|
||||
|
||||
def test_validate_pattern_no_match(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="PATTERN_VAR", type=EnvVarType.STRING, pattern=r"^[A-Z]+$"),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"PATTERN_VAR": "invalid"})
|
||||
assert result.is_valid is False
|
||||
|
||||
def test_validate_extra_var_warning(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="KNOWN_VAR", type=EnvVarType.STRING),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"KNOWN_VAR": "value", "UNKNOWN_VAR": "other"})
|
||||
assert result.is_valid is True
|
||||
assert "Unknown environment variable: UNKNOWN_VAR" in result.warnings
|
||||
|
||||
def test_validate_case_insensitive(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="TEST_VAR", required=True),
|
||||
]
|
||||
)
|
||||
engine = ValidationEngine(schema)
|
||||
result = engine.validate({"test_var": "value"})
|
||||
assert result.is_valid is True
|
||||
105
tests/unit/test_generator.py
Normal file
105
tests/unit/test_generator.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""Unit tests for the .env.example generator."""
|
||||
|
||||
import pytest
|
||||
|
||||
from envschema.schema import Schema, EnvVar, EnvVarType
|
||||
from envschema.generator import generate_env_example, generate_env_example_to_file
|
||||
|
||||
|
||||
class TestGenerateEnvExample:
|
||||
"""Tests for generate_env_example function."""
|
||||
|
||||
def test_empty_schema(self):
|
||||
schema = Schema()
|
||||
result = generate_env_example(schema)
|
||||
assert "# Environment Variables Schema" in result
|
||||
|
||||
def test_basic_variable(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="TEST_VAR", type=EnvVarType.STRING),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema)
|
||||
assert "TEST_VAR=" in result
|
||||
|
||||
def test_required_variable(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="REQUIRED_VAR", required=True),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema)
|
||||
assert "# REQUIRED" in result
|
||||
assert "REQUIRED_VAR=" in result
|
||||
|
||||
def test_variable_with_default(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="VAR_WITH_DEFAULT", default="default_value"),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema)
|
||||
assert "VAR_WITH_DEFAULT=default_value" in result
|
||||
|
||||
def test_variable_with_description(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(
|
||||
name="DESCRIBED_VAR",
|
||||
description="This is a description",
|
||||
),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema)
|
||||
assert "# This is a description" in result
|
||||
|
||||
def test_variable_with_type(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="INT_VAR", type=EnvVarType.INTEGER),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema)
|
||||
assert "INT_VAR=" in result
|
||||
|
||||
def test_no_descriptions(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(
|
||||
name="VAR",
|
||||
description="Some description",
|
||||
),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema, include_descriptions=False)
|
||||
assert "Some description" not in result
|
||||
|
||||
def test_multiple_variables(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="VAR1", required=True, description="First var"),
|
||||
EnvVar(name="VAR2", default="value"),
|
||||
EnvVar(name="VAR3", type=EnvVarType.INTEGER),
|
||||
]
|
||||
)
|
||||
result = generate_env_example(schema)
|
||||
assert "VAR1=" in result
|
||||
assert "VAR2=value" in result
|
||||
assert "VAR3=" in result
|
||||
|
||||
|
||||
class TestGenerateEnvExampleToFile:
|
||||
"""Tests for generate_env_example_to_file function."""
|
||||
|
||||
def test_write_to_file(self, tmp_path):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="TEST_VAR"),
|
||||
]
|
||||
)
|
||||
output_path = tmp_path / ".env.example"
|
||||
generate_env_example_to_file(schema, str(output_path))
|
||||
|
||||
content = output_path.read_text()
|
||||
assert "TEST_VAR=" in content
|
||||
99
tests/unit/test_memory_service.py
Normal file
99
tests/unit/test_memory_service.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Unit tests for MemoryService."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
from memory_manager.core.services import MemoryService, SearchService, CommitService
|
||||
from memory_manager.db.models import MemoryCategory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_repository():
|
||||
return AsyncMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def memory_service(mock_repository):
|
||||
return MemoryService(mock_repository)
|
||||
|
||||
|
||||
class TestMemoryService:
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_entry(self, memory_service, mock_repository):
|
||||
mock_entry = MagicMock()
|
||||
mock_entry.to_dict.return_value = {
|
||||
"id": 1,
|
||||
"title": "Test Entry",
|
||||
"content": "Test content",
|
||||
"category": "decision",
|
||||
"tags": ["test"],
|
||||
"agent_id": "test-agent",
|
||||
"project_path": "/test",
|
||||
"created_at": "2024-01-01T00:00:00",
|
||||
"updated_at": "2024-01-01T00:00:00",
|
||||
}
|
||||
mock_repository.create_entry = AsyncMock(return_value=mock_entry)
|
||||
|
||||
result = await memory_service.create_entry(
|
||||
title="Test Entry",
|
||||
content="Test content",
|
||||
category=MemoryCategory.DECISION,
|
||||
tags=["test"],
|
||||
agent_id="test-agent",
|
||||
project_path="/test",
|
||||
)
|
||||
|
||||
assert result["title"] == "Test Entry"
|
||||
assert result["id"] == 1
|
||||
mock_repository.create_entry.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_entry(self, memory_service, mock_repository):
|
||||
mock_entry = MagicMock()
|
||||
mock_entry.to_dict.return_value = {"id": 1, "title": "Test"}
|
||||
mock_repository.get_entry = AsyncMock(return_value=mock_entry)
|
||||
|
||||
result = await memory_service.get_entry(1)
|
||||
|
||||
assert result["id"] == 1
|
||||
mock_repository.get_entry.assert_called_once_with(1)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_entry_not_found(self, memory_service, mock_repository):
|
||||
mock_repository.get_entry = AsyncMock(return_value=None)
|
||||
|
||||
result = await memory_service.get_entry(999)
|
||||
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_entry(self, memory_service, mock_repository):
|
||||
mock_entry = MagicMock()
|
||||
mock_entry.to_dict.return_value = {"id": 1, "title": "Updated"}
|
||||
mock_repository.update_entry = AsyncMock(return_value=mock_entry)
|
||||
|
||||
result = await memory_service.update_entry(entry_id=1, title="Updated")
|
||||
|
||||
assert result["title"] == "Updated"
|
||||
mock_repository.update_entry.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_entry(self, memory_service, mock_repository):
|
||||
mock_repository.delete_entry = AsyncMock(return_value=True)
|
||||
|
||||
result = await memory_service.delete_entry(1)
|
||||
|
||||
assert result is True
|
||||
mock_repository.delete_entry.assert_called_once_with(1)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_entries(self, memory_service, mock_repository):
|
||||
mock_entries = [
|
||||
MagicMock(to_dict=lambda: {"id": 1, "title": "Entry 1"}),
|
||||
MagicMock(to_dict=lambda: {"id": 2, "title": "Entry 2"}),
|
||||
]
|
||||
mock_repository.list_entries = AsyncMock(return_value=mock_entries)
|
||||
|
||||
result = await memory_service.list_entries()
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == 1
|
||||
174
tests/unit/test_schema.py
Normal file
174
tests/unit/test_schema.py
Normal file
@@ -0,0 +1,174 @@
|
||||
"""Unit tests for schema parsing."""
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from envschema.schema import (
|
||||
Schema,
|
||||
EnvVar,
|
||||
EnvVarType,
|
||||
load_schema_from_file,
|
||||
load_json_schema,
|
||||
load_yaml_schema,
|
||||
)
|
||||
|
||||
|
||||
class TestEnvVar:
|
||||
"""Tests for EnvVar model."""
|
||||
|
||||
def test_env_var_creation(self):
|
||||
var = EnvVar(name="TEST_VAR", type=EnvVarType.STRING)
|
||||
assert var.name == "TEST_VAR"
|
||||
assert var.type == EnvVarType.STRING
|
||||
assert var.required is False
|
||||
assert var.default is None
|
||||
|
||||
def test_env_var_with_all_fields(self):
|
||||
var = EnvVar(
|
||||
name="DATABASE_URL",
|
||||
type=EnvVarType.STRING,
|
||||
required=True,
|
||||
default="postgres://localhost",
|
||||
description="Database connection string",
|
||||
pattern=r"^postgres://.*",
|
||||
)
|
||||
assert var.required is True
|
||||
assert var.default == "postgres://localhost"
|
||||
assert var.description == "Database connection string"
|
||||
assert var.pattern == r"^postgres://.*"
|
||||
|
||||
def test_env_var_name_uppercase(self):
|
||||
var = EnvVar(name="test_var")
|
||||
assert var.name == "TEST_VAR"
|
||||
|
||||
def test_env_var_invalid_name(self):
|
||||
with pytest.raises(ValueError):
|
||||
EnvVar(name="invalid name with spaces")
|
||||
|
||||
|
||||
class TestSchema:
|
||||
"""Tests for Schema model."""
|
||||
|
||||
def test_schema_creation(self):
|
||||
schema = Schema()
|
||||
assert schema.version == "1.0"
|
||||
assert schema.envvars == []
|
||||
|
||||
def test_schema_with_vars(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="VAR1", type=EnvVarType.STRING),
|
||||
EnvVar(name="VAR2", type=EnvVarType.INTEGER, required=True),
|
||||
]
|
||||
)
|
||||
assert len(schema.envvars) == 2
|
||||
|
||||
def test_get_var(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="DATABASE_URL", type=EnvVarType.STRING),
|
||||
]
|
||||
)
|
||||
var = schema.get_var("DATABASE_URL")
|
||||
assert var is not None
|
||||
assert var.name == "DATABASE_URL"
|
||||
|
||||
def test_get_var_case_insensitive(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="DATABASE_URL", type=EnvVarType.STRING),
|
||||
]
|
||||
)
|
||||
var = schema.get_var("database_url")
|
||||
assert var is not None
|
||||
|
||||
def test_get_var_not_found(self):
|
||||
schema = Schema()
|
||||
var = schema.get_var("NONEXISTENT")
|
||||
assert var is None
|
||||
|
||||
def test_get_required_vars(self):
|
||||
schema = Schema(
|
||||
envvars=[
|
||||
EnvVar(name="VAR1", required=True),
|
||||
EnvVar(name="VAR2", required=False),
|
||||
EnvVar(name="VAR3", required=True),
|
||||
]
|
||||
)
|
||||
required = schema.get_required_vars()
|
||||
assert len(required) == 2
|
||||
assert {v.name for v in required} == {"VAR1", "VAR3"}
|
||||
|
||||
|
||||
class TestLoadJsonSchema:
|
||||
"""Tests for JSON schema loading."""
|
||||
|
||||
def test_load_valid_json_schema(self):
|
||||
json_content = json.dumps({
|
||||
"version": "1.0",
|
||||
"envVars": [
|
||||
{"name": "TEST_VAR", "type": "str"}
|
||||
]
|
||||
})
|
||||
schema = load_json_schema(json_content)
|
||||
assert schema.version == "1.0"
|
||||
assert len(schema.envvars) == 1
|
||||
|
||||
def test_load_invalid_json(self):
|
||||
with pytest.raises(ValueError, match="Invalid JSON"):
|
||||
load_json_schema("not valid json")
|
||||
|
||||
def test_load_invalid_schema_structure(self):
|
||||
with pytest.raises((ValueError, Exception), match="Invalid schema"):
|
||||
load_json_schema('{"version": "1.0", "envVars": [{"name": "VAR", "type": "invalid_type"}]}')
|
||||
|
||||
|
||||
class TestLoadYamlSchema:
|
||||
"""Tests for YAML schema loading."""
|
||||
|
||||
def test_load_valid_yaml_schema(self):
|
||||
yaml_content = """
|
||||
version: "1.0"
|
||||
envVars:
|
||||
- name: TEST_VAR
|
||||
type: str
|
||||
"""
|
||||
schema = load_yaml_schema(yaml_content)
|
||||
assert schema.version == "1.0"
|
||||
assert len(schema.envvars) == 1
|
||||
|
||||
def test_load_invalid_yaml(self):
|
||||
with pytest.raises(ValueError, match="Invalid YAML"):
|
||||
load_yaml_schema("invalid: yaml: content:")
|
||||
|
||||
|
||||
class TestLoadSchemaFromFile:
|
||||
"""Tests for file-based schema loading."""
|
||||
|
||||
def test_load_json_file(self, tmp_path):
|
||||
schema_file = tmp_path / "schema.json"
|
||||
schema_file.write_text(json.dumps({
|
||||
"version": "1.0",
|
||||
"envVars": [{"name": "TEST", "type": "str"}]
|
||||
}))
|
||||
schema = load_schema_from_file(str(schema_file))
|
||||
assert schema.version == "1.0"
|
||||
|
||||
def test_load_yaml_file(self, tmp_path):
|
||||
schema_file = tmp_path / "schema.yaml"
|
||||
schema_file.write_text('version: "1.0"\nenvVars: []')
|
||||
schema = load_schema_from_file(str(schema_file))
|
||||
assert schema.version == "1.0"
|
||||
|
||||
def test_file_not_found(self):
|
||||
with pytest.raises(FileNotFoundError):
|
||||
load_schema_from_file("/nonexistent/path/schema.json")
|
||||
|
||||
def test_unsupported_format(self, tmp_path):
|
||||
schema_file = tmp_path / "schema.txt"
|
||||
schema_file.write_text("some content")
|
||||
with pytest.raises(ValueError, match="Unsupported schema format"):
|
||||
load_schema_from_file(str(schema_file))
|
||||
67
tests/unit/test_search.py
Normal file
67
tests/unit/test_search.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Unit tests for SearchService."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
from memory_manager.core.services import SearchService
|
||||
from memory_manager.db.models import MemoryCategory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_repository():
|
||||
return AsyncMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_service(mock_repository):
|
||||
return SearchService(mock_repository)
|
||||
|
||||
|
||||
class TestSearchService:
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_basic(self, search_service, mock_repository):
|
||||
mock_entries = [
|
||||
MagicMock(to_dict=lambda: {"id": 1, "title": "Test Entry", "content": "Test content"}),
|
||||
]
|
||||
mock_repository.search_entries = AsyncMock(return_value=mock_entries)
|
||||
|
||||
result = await search_service.search(query="test")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["title"] == "Test Entry"
|
||||
mock_repository.search_entries.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_with_category(self, search_service, mock_repository):
|
||||
mock_repository.search_entries = AsyncMock(return_value=[])
|
||||
|
||||
await search_service.search(
|
||||
query="test",
|
||||
category=MemoryCategory.DECISION,
|
||||
)
|
||||
|
||||
call_args = mock_repository.search_entries.call_args
|
||||
assert call_args.kwargs["category"] == MemoryCategory.DECISION
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_with_filters(self, search_service, mock_repository):
|
||||
mock_repository.search_entries = AsyncMock(return_value=[])
|
||||
|
||||
await search_service.search(
|
||||
query="test",
|
||||
agent_id="test-agent",
|
||||
project_path="/test",
|
||||
limit=50,
|
||||
)
|
||||
|
||||
call_args = mock_repository.search_entries.call_args
|
||||
assert call_args.kwargs["agent_id"] == "test-agent"
|
||||
assert call_args.kwargs["project_path"] == "/test"
|
||||
assert call_args.kwargs["limit"] == 50
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_empty_results(self, search_service, mock_repository):
|
||||
mock_repository.search_entries = AsyncMock(return_value=[])
|
||||
|
||||
result = await search_service.search(query="nonexistent")
|
||||
|
||||
assert len(result) == 0
|
||||
176
tests/unit/test_validators.py
Normal file
176
tests/unit/test_validators.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""Unit tests for type validators."""
|
||||
|
||||
import pytest
|
||||
|
||||
from envschema.schema import EnvVarType
|
||||
from envschema.validators import (
|
||||
StringValidator,
|
||||
IntegerValidator,
|
||||
BooleanValidator,
|
||||
ListValidator,
|
||||
PatternValidator,
|
||||
validate_value,
|
||||
)
|
||||
|
||||
|
||||
class TestStringValidator:
|
||||
"""Tests for StringValidator."""
|
||||
|
||||
def test_valid_string(self):
|
||||
is_valid, error = StringValidator.validate("any value")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_empty_string(self):
|
||||
is_valid, error = StringValidator.validate("")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_none_value(self):
|
||||
is_valid, error = StringValidator.validate(None)
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
|
||||
class TestIntegerValidator:
|
||||
"""Tests for IntegerValidator."""
|
||||
|
||||
def test_valid_integer(self):
|
||||
is_valid, error = IntegerValidator.validate("42")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_valid_negative_integer(self):
|
||||
is_valid, error = IntegerValidator.validate("-10")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_valid_zero(self):
|
||||
is_valid, error = IntegerValidator.validate("0")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_invalid_float(self):
|
||||
is_valid, error = IntegerValidator.validate("3.14")
|
||||
assert is_valid is False
|
||||
assert error is not None
|
||||
|
||||
def test_invalid_string(self):
|
||||
is_valid, error = IntegerValidator.validate("abc")
|
||||
assert is_valid is False
|
||||
assert error is not None
|
||||
|
||||
def test_none_value(self):
|
||||
is_valid, error = IntegerValidator.validate(None)
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
|
||||
class TestBooleanValidator:
|
||||
"""Tests for BooleanValidator."""
|
||||
|
||||
@pytest.mark.parametrize("value", ["true", "True", "TRUE", "1", "yes", "Yes", "YES", "on", "ON"])
|
||||
def test_valid_true_values(self, value):
|
||||
is_valid, error = BooleanValidator.validate(value)
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
@pytest.mark.parametrize("value", ["false", "False", "FALSE", "0", "no", "No", "NO", "off", "OFF"])
|
||||
def test_valid_false_values(self, value):
|
||||
is_valid, error = BooleanValidator.validate(value)
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
@pytest.mark.parametrize("value", ["maybe", "2", "truee", "yess"])
|
||||
def test_invalid_boolean_values(self, value):
|
||||
is_valid, error = BooleanValidator.validate(value)
|
||||
assert is_valid is False
|
||||
assert error is not None
|
||||
|
||||
def test_none_value(self):
|
||||
is_valid, error = BooleanValidator.validate(None)
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
|
||||
class TestListValidator:
|
||||
"""Tests for ListValidator."""
|
||||
|
||||
def test_valid_list(self):
|
||||
is_valid, error = ListValidator.validate("item1,item2,item3")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_single_item_list(self):
|
||||
is_valid, error = ListValidator.validate("single")
|
||||
assert is_valid is False
|
||||
assert error is not None
|
||||
|
||||
def test_empty_string(self):
|
||||
is_valid, error = ListValidator.validate("")
|
||||
assert is_valid is False
|
||||
|
||||
def test_none_value(self):
|
||||
is_valid, error = ListValidator.validate(None)
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_parse_list(self):
|
||||
result = ListValidator.parse("item1, item2 , item3")
|
||||
assert result == ["item1", "item2", "item3"]
|
||||
|
||||
def test_parse_list_with_empty_items(self):
|
||||
result = ListValidator.parse("item1,,item2")
|
||||
assert result == ["item1", "item2"]
|
||||
|
||||
|
||||
class TestPatternValidator:
|
||||
"""Tests for PatternValidator."""
|
||||
|
||||
def test_valid_pattern_match(self):
|
||||
is_valid, error = PatternValidator.validate("ABC123", r"^[A-Z]+[0-9]+$")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
def test_invalid_pattern_match(self):
|
||||
is_valid, error = PatternValidator.validate("abc123", r"^[A-Z]+[0-9]+$")
|
||||
assert is_valid is False
|
||||
assert error is not None
|
||||
|
||||
def test_invalid_regex_pattern(self):
|
||||
is_valid, error = PatternValidator.validate("test", r"[invalid")
|
||||
assert is_valid is False
|
||||
assert error is not None
|
||||
|
||||
def test_none_value(self):
|
||||
is_valid, error = PatternValidator.validate(None, r"^[A-Z]+$")
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
|
||||
class TestValidateValue:
|
||||
"""Tests for the main validate_value function."""
|
||||
|
||||
def test_validate_string(self):
|
||||
is_valid, error = validate_value("test", EnvVarType.STRING)
|
||||
assert is_valid is True
|
||||
|
||||
def test_validate_integer(self):
|
||||
is_valid, error = validate_value("42", EnvVarType.INTEGER)
|
||||
assert is_valid is True
|
||||
|
||||
def test_validate_boolean(self):
|
||||
is_valid, error = validate_value("true", EnvVarType.BOOLEAN)
|
||||
assert is_valid is True
|
||||
|
||||
def test_validate_list(self):
|
||||
is_valid, error = validate_value("a,b,c", EnvVarType.LIST)
|
||||
assert is_valid is True
|
||||
|
||||
def test_validate_with_pattern(self):
|
||||
is_valid, error = validate_value("ABC123", EnvVarType.STRING, r"^[A-Z]+[0-9]+$")
|
||||
assert is_valid is True
|
||||
|
||||
def test_validate_with_invalid_pattern(self):
|
||||
is_valid, error = validate_value("abc123", EnvVarType.STRING, r"^[A-Z]+[0-9]+$")
|
||||
assert is_valid is False
|
||||
Reference in New Issue
Block a user