Initial upload: gitignore-generator-cli v1.0.0 with CI/CD workflow

This commit is contained in:
2026-01-30 15:34:04 +00:00
parent 8951f27181
commit 99c43bcff9

View File

@@ -0,0 +1,181 @@
"""Caching system for gitignore patterns."""
import json
from datetime import datetime, timedelta
from pathlib import Path
from typing import Optional
CACHE_DIR = Path.home() / ".cache" / "gitignore-generator"
CACHE_METADATA_FILE = CACHE_DIR / "cache.json"
CACHE_EXPIRY_DAYS = 7
class CacheManager:
"""Manages caching of gitignore patterns."""
def __init__(self, expiry_days: int = CACHE_EXPIRY_DAYS):
self.expiry_days = expiry_days
self._ensure_cache_dir()
def _ensure_cache_dir(self) -> None:
"""Ensure cache directory exists."""
CACHE_DIR.mkdir(parents=True, exist_ok=True)
def _load_metadata(self) -> dict:
"""Load cache metadata."""
if CACHE_METADATA_FILE.exists():
try:
with open(CACHE_METADATA_FILE, 'r') as f:
return json.load(f)
except (json.JSONDecodeError, IOError):
return {}
return {}
def _save_metadata(self, metadata: dict) -> None:
"""Save cache metadata."""
with open(CACHE_METADATA_FILE, 'w') as f:
json.dump(metadata, f, indent=2)
def is_valid(self, key: str) -> bool:
"""Check if a cached item is still valid.
Args:
key: Cache key (technology name)
Returns:
True if cache is valid and not expired
"""
metadata = self._load_metadata()
if key not in metadata:
return False
cached_at = datetime.fromisoformat(metadata[key]['cached_at'])
expiry = cached_at + timedelta(days=self.expiry_days)
return datetime.now() < expiry
def get_age(self, key: str) -> Optional[int]:
"""Get age of cached item in days.
Args:
key: Cache key (technology name)
Returns:
Age in days or None if not cached
"""
metadata = self._load_metadata()
if key not in metadata:
return None
cached_at = datetime.fromisoformat(metadata[key]['cached_at'])
delta = datetime.now() - cached_at
return delta.days
def set(self, key: str, content: str) -> None:
"""Cache an item.
Args:
key: Cache key (technology name)
content: Content to cache
"""
self._ensure_cache_dir()
cache_file = CACHE_DIR / f"{key}.txt"
cache_file.write_text(content)
metadata = self._load_metadata()
metadata[key] = {
'cached_at': datetime.now().isoformat(),
'size': len(content)
}
self._save_metadata(metadata)
def get(self, key: str) -> Optional[str]:
"""Retrieve a cached item.
Args:
key: Cache key (technology name)
Returns:
Cached content or None if not found or expired
"""
if not self.is_valid(key):
return None
cache_file = CACHE_DIR / f"{key}.txt"
if cache_file.exists():
return cache_file.read_text()
return None
def invalidate(self, key: str) -> bool:
"""Invalidate a cached item.
Args:
key: Cache key (technology name)
Returns:
True if item was invalidated
"""
cache_file = CACHE_DIR / f"{key}.txt"
metadata = self._load_metadata()
if key in metadata:
del metadata[key]
self._save_metadata(metadata)
if cache_file.exists():
cache_file.unlink()
return True
return False
def clear(self) -> int:
"""Clear all cached items.
Returns:
Number of items cleared
"""
metadata = self._load_metadata()
count = len(metadata)
for key in list(metadata.keys()):
cache_file = CACHE_DIR / f"{key}.txt"
if cache_file.exists():
cache_file.unlink()
if CACHE_METADATA_FILE.exists():
CACHE_METADATA_FILE.unlink()
return count
def get_stats(self) -> dict:
"""Get cache statistics.
Returns:
Dictionary with cache statistics
"""
metadata = self._load_metadata()
valid_count = sum(1 for key in metadata if self.is_valid(key))
expired_count = len(metadata) - valid_count
return {
'total_items': len(metadata),
'valid_items': valid_count,
'expired_items': expired_count,
'cache_dir': str(CACHE_DIR)
}
def cleanup_expired(self) -> int:
"""Remove expired cache entries.
Returns:
Number of entries cleaned up
"""
metadata = self._load_metadata()
to_remove = [key for key in metadata if not self.is_valid(key)]
for key in to_remove:
cache_file = CACHE_DIR / f"{key}.txt"
if cache_file.exists():
cache_file.unlink()
del metadata[key]
self._save_metadata(metadata)
return len(to_remove)