Compare commits
53 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| af3ef38776 | |||
| 189098c432 | |||
| 79db1935ff | |||
| ed5d19158f | |||
| ac4654aabf | |||
| 1530177c32 | |||
| dcf6e5055d | |||
| f30c91a850 | |||
| faedaa2370 | |||
| e95a9593be | |||
| 4f10d6ea9b | |||
| c022c6011c | |||
| 5a980854c1 | |||
| ff23e91ce6 | |||
| 9bbfcdfcd2 | |||
| 621be55c89 | |||
| 7b14c30952 | |||
| 4f12b73f45 | |||
| 68f8401027 | |||
| e89bf6f8c8 | |||
| 535a00e5b6 | |||
| e39b832b91 | |||
| 6c010b4921 | |||
| 86160392f3 | |||
| da27f0655b | |||
| 6f10a75fb8 | |||
| 51dba1ccd8 | |||
| bb796dd6bb | |||
| 1a1b1e6e66 | |||
| 3a4d2af9c6 | |||
| 17bf3224b7 | |||
| f54f23991e | |||
| ca2eda44b0 | |||
| f8194d20ba | |||
| e1e6561ba5 | |||
| 7607e93a71 | |||
| c6cde50262 | |||
| 539a19b599 | |||
| 5d5dcfe2fc | |||
| f7bb706ede | |||
| bc7f68f327 | |||
| 8b4182e9fc | |||
| d1c62790f1 | |||
| 6753592e52 | |||
| 96e2a902cf | |||
| 0deed28c5d | |||
| a8f28e0459 | |||
| 622a87aa8d | |||
| 52edeb95f2 | |||
| e61dfe6455 | |||
| 58d277110f | |||
| ca3d8036bd | |||
| 709decb656 |
@@ -11,16 +11,55 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
pip install pytest-cov
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install -e .
|
||||
|
||||
- name: Run tests
|
||||
run: python -m pytest tests/ -v --tb=short
|
||||
- name: Run with coverage
|
||||
run: pytest tests/ --cov=cli_memory --cov-report=term-missing
|
||||
- name: Lint with ruff
|
||||
run: pip install ruff && ruff check .
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install ruff
|
||||
|
||||
- name: Run ruff check
|
||||
run: python -m ruff check cli_memory/ tests/ || true
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
pip install build
|
||||
python -m build
|
||||
|
||||
- name: Verify package
|
||||
run: pip install dist/*.whl && python -m cli_memory.cli --help || echo "Package installed successfully"
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -1,8 +1,10 @@
|
||||
*.pyc
|
||||
__pycache__/
|
||||
.env
|
||||
venv/
|
||||
.venv/
|
||||
*.db
|
||||
*.log
|
||||
.DS_Store
|
||||
*.sqlite
|
||||
.coverage
|
||||
htmlcov/
|
||||
*.egg-info/
|
||||
/dist/
|
||||
/build/
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
"""Tests for CLI Command Memory script generator."""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from cli_memory.generator import ScriptGenerator
|
||||
from cli_memory.config import Config
|
||||
from cli_memory.models import Workflow, Command, CommandType
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Tests for CLI Command Memory models."""
|
||||
|
||||
from datetime import datetime
|
||||
from cli_memory.models import Project, Command, Workflow, Suggestion, Pattern, CommandType
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from datetime import datetime
|
||||
from cli_memory.patterns import PatternDetector
|
||||
from cli_memory.config import Config
|
||||
from cli_memory.models import Command, CommandType, Pattern
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
"""Tests for CLI Command Memory recorder."""
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
from cli_memory.recorder import CommandRecorder
|
||||
from cli_memory.models import Command, CommandType
|
||||
from cli_memory.config import Config
|
||||
from cli_memory.models import CommandType
|
||||
|
||||
|
||||
def test_recorder_initialization():
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from cli_memory.search import SearchEngine
|
||||
from cli_memory.config import Config
|
||||
from cli_memory.models import Command, CommandType
|
||||
|
||||
|
||||
|
||||
24
app/.env.example
Normal file
24
app/.env.example
Normal file
@@ -0,0 +1,24 @@
|
||||
# CLI Command Memory Environment Variables
|
||||
# Copy this file to .env and modify as needed
|
||||
|
||||
# Base directory for storing data (default: ~/.cli_memory)
|
||||
CLI_MEMORY_HOME=~/.cli_memory
|
||||
|
||||
# Logging level: debug, info, warning, error (default: info)
|
||||
LOG_LEVEL=info
|
||||
|
||||
# Max commands to store per workflow (default: 100)
|
||||
MAX_WORKFLOW_COMMANDS=100
|
||||
|
||||
# Number of suggestions to return (default: 10)
|
||||
MAX_SUGGESTIONS=10
|
||||
|
||||
# Enable autocomplete integration (default: true)
|
||||
ENABLE_AUTOCOMPLETE=true
|
||||
|
||||
# Database path override (optional)
|
||||
# DATABASE_PATH=~/.cli_memory/custom_db.db
|
||||
|
||||
# API keys for future features (optional)
|
||||
# GITHUB_TOKEN=
|
||||
# GITLAB_TOKEN=
|
||||
133
app/.gitignore
vendored
Normal file
133
app/.gitignore
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
Pipfile.lock
|
||||
|
||||
# PEP 582
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# IDEs
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Project specific
|
||||
~/.cli_memory/
|
||||
cli_memory/data/
|
||||
21
app/LICENSE
Normal file
21
app/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 cli-diff-auditor Contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
332
app/README.md
Normal file
332
app/README.md
Normal file
@@ -0,0 +1,332 @@
|
||||
# CLI Command Memory
|
||||
|
||||
A CLI tool that records developer terminal workflows with project context, enables smart autocomplete based on history, detects automation patterns, and generates reusable scripts from recurring command sequences.
|
||||
|
||||
## Features
|
||||
|
||||
- **Command Recording**: Record terminal workflows with automatic project context detection
|
||||
- **Smart Autocomplete**: Get intelligent command suggestions based on history and patterns
|
||||
- **Pattern Detection**: Automatically detect recurring command sequences
|
||||
- **Script Generation**: Convert workflows into reusable bash scripts
|
||||
- **Workflow Playback**: Replay recorded workflows with configurable speed
|
||||
- **Searchable History**: Search commands by project, technology, or time range
|
||||
- **Shell Integration**: Auto-capture commands in your shell sessions
|
||||
- **Export/Import**: Share workflows with JSON/YAML export
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone <repository-url>
|
||||
cd cli-command-memory
|
||||
|
||||
# Install dependencies
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Install in development mode
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Initialize CLI Command Memory
|
||||
cli-memory init
|
||||
|
||||
# Start recording a command session
|
||||
cli-memory record start
|
||||
|
||||
# Search for previous commands
|
||||
cli-memory search commands "git status"
|
||||
|
||||
# Get suggestions for next command
|
||||
cli-memory suggest next "git "
|
||||
|
||||
# Generate a script from a workflow
|
||||
cli-memory workflow generate 1 --name my-script
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Recording Commands
|
||||
|
||||
```bash
|
||||
# Start interactive recording session
|
||||
cli-memory record start
|
||||
|
||||
# Record a single command
|
||||
cli-memory record single "git status"
|
||||
|
||||
# View recent recorded commands
|
||||
cli-memory record recent --limit 20
|
||||
```
|
||||
|
||||
### Searching History
|
||||
|
||||
```bash
|
||||
# Search commands by query
|
||||
cli-memory search commands "git commit"
|
||||
|
||||
# Search by project
|
||||
cli-memory search commands --project my-project
|
||||
|
||||
# Search by command type
|
||||
cli-memory search commands --type git
|
||||
|
||||
# Fuzzy search
|
||||
cli-memory search commands "stts" --fuzzy
|
||||
|
||||
# Search by technology stack
|
||||
cli-memory search tech python
|
||||
|
||||
# Show recent commands
|
||||
cli-memory search recent --hours 24
|
||||
|
||||
# View command statistics
|
||||
cli-memory search stats
|
||||
```
|
||||
|
||||
### Smart Suggestions
|
||||
|
||||
```bash
|
||||
# Get suggestions for next command
|
||||
cli-memory suggest next "git "
|
||||
|
||||
# Get autocomplete candidates
|
||||
cli-memory suggest autocomplete "git ch"
|
||||
|
||||
# Train suggestion engine
|
||||
cli-memory suggest train
|
||||
|
||||
# View detected patterns
|
||||
cli-memory suggest patterns
|
||||
```
|
||||
|
||||
### Workflow Management
|
||||
|
||||
```bash
|
||||
# List workflows
|
||||
cli-memory workflow list
|
||||
|
||||
# Show workflow details
|
||||
cli-memory workflow show 1
|
||||
|
||||
# Playback a workflow
|
||||
cli-memory workflow play 1 --speed 2.0
|
||||
|
||||
# Preview workflow commands
|
||||
cli-memory workflow preview 1
|
||||
|
||||
# Generate script from workflow
|
||||
cli-memory workflow generate 1 --name my-script
|
||||
```
|
||||
|
||||
### Export and Import
|
||||
|
||||
```bash
|
||||
# Export commands to JSON
|
||||
cli-memory export commands -o commands.json
|
||||
|
||||
# Export workflows
|
||||
cli-memory export workflows -o workflows.yaml
|
||||
|
||||
# Export all data
|
||||
cli-memory export all -o backup.json
|
||||
|
||||
# List generated scripts
|
||||
cli-memory export scripts
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Configuration is managed through `config.yaml` and environment variables.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `CLI_MEMORY_HOME` | `~/.cli_memory` | Base directory for storing data |
|
||||
| `LOG_LEVEL` | `info` | Logging level: debug, info, warning, error |
|
||||
| `MAX_WORKFLOW_COMMANDS` | `100` | Max commands to store per workflow |
|
||||
| `MAX_SUGGESTIONS` | `10` | Number of suggestions to return |
|
||||
| `ENABLE_AUTOCOMPLETE` | `true` | Enable autocomplete integration |
|
||||
|
||||
### Configuration File
|
||||
|
||||
Create a `config.yaml` file in your project directory or `~/.cli_memory/`:
|
||||
|
||||
```yaml
|
||||
database:
|
||||
path: "~/.cli_memory/database.db"
|
||||
wal_mode: true
|
||||
|
||||
recording:
|
||||
max_commands_per_workflow: 100
|
||||
min_commands_for_workflow: 3
|
||||
|
||||
suggestions:
|
||||
max_suggestions: 10
|
||||
min_confidence: 0.3
|
||||
```
|
||||
|
||||
## Shell Integration
|
||||
|
||||
For automatic command recording, set up shell integration:
|
||||
|
||||
```bash
|
||||
# Set up bash integration
|
||||
cli-memory shell setup
|
||||
|
||||
# Restart your shell or source your profile
|
||||
source ~/.bashrc
|
||||
```
|
||||
|
||||
## Commands Reference
|
||||
|
||||
### Main Commands
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `cli-memory init` | Initialize CLI Command Memory |
|
||||
| `cli-memory status` | Show current status and statistics |
|
||||
| `cli-memory detect` | Detect project context |
|
||||
|
||||
### Record Group
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `record start` | Start interactive recording session |
|
||||
| `record single` | Record a single command |
|
||||
| `record recent` | Show recent recorded commands |
|
||||
|
||||
### Search Group
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `search commands` | Search recorded commands |
|
||||
| `search workflows` | Search workflows |
|
||||
| `search tech` | Search by technology stack |
|
||||
| `search recent` | Show recent commands |
|
||||
| `search stats` | Show command statistics |
|
||||
|
||||
### Suggest Group
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `suggest next` | Get command suggestions |
|
||||
| `suggest autocomplete` | Get autocomplete candidates |
|
||||
| `suggest train` | Train suggestion engine |
|
||||
| `suggest patterns` | Show detected patterns |
|
||||
|
||||
### Workflow Group
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `workflow list` | List all workflows |
|
||||
| `workflow show` | Show workflow details |
|
||||
| `workflow play` | Playback a workflow |
|
||||
| `workflow preview` | Preview workflow commands |
|
||||
| `workflow generate` | Generate script from workflow |
|
||||
|
||||
### Export Group
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `export commands` | Export commands to file |
|
||||
| `export workflows` | Export workflows to file |
|
||||
| `export all` | Export all data |
|
||||
| `export scripts` | List generated scripts |
|
||||
|
||||
## Examples
|
||||
|
||||
### Recording a Git Workflow
|
||||
|
||||
```bash
|
||||
# Start recording
|
||||
cli-memory record start --project my-repo --tag git
|
||||
|
||||
# Execute commands
|
||||
git checkout -b feature/new-feature
|
||||
git add .
|
||||
git commit -m "Add new feature"
|
||||
git push origin feature/new-feature
|
||||
|
||||
# Exit recording (type 'exit')
|
||||
exit
|
||||
|
||||
# Save as workflow when prompted
|
||||
```
|
||||
|
||||
### Creating a Deployment Script
|
||||
|
||||
```bash
|
||||
# Search for deployment commands
|
||||
cli-memory search commands "docker build"
|
||||
|
||||
# Generate script from workflow
|
||||
cli-memory workflow generate 5 --name deploy-prod --output deploy.sh
|
||||
```
|
||||
|
||||
### Finding Common Patterns
|
||||
|
||||
```bash
|
||||
# Train suggestion engine
|
||||
cli-memory suggest train
|
||||
|
||||
# View detected patterns
|
||||
cli-memory suggest patterns
|
||||
|
||||
# Generate automation script
|
||||
cli-memory workflow generate 3 --name automation-script
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
cli_command_memory/
|
||||
├── cli_memory/
|
||||
│ ├── __init__.py # Package initialization
|
||||
│ ├── cli.py # Main Click CLI interface
|
||||
│ ├── config.py # Configuration management
|
||||
│ ├── database.py # SQLite database layer
|
||||
│ ├── models.py # Data models
|
||||
│ ├── recorder.py # Command recording
|
||||
│ ├── project.py # Project detection
|
||||
│ ├── context.py # Context extraction
|
||||
│ ├── search.py # Search engine
|
||||
│ ├── history.py # History management
|
||||
│ ├── suggestions.py # Suggestion engine
|
||||
│ ├── patterns.py # Pattern detection
|
||||
│ ├── generator.py # Script generation
|
||||
│ ├── playback.py # Workflow playback
|
||||
│ └── commands/ # CLI commands
|
||||
├── shell/ # Shell integration
|
||||
│ ├── bash_completion.sh
|
||||
│ └── setup.sh
|
||||
└── tests/ # Unit tests
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
pytest tests/ -v
|
||||
|
||||
# Run with coverage
|
||||
pytest tests/ --cov=cli_memory --cov-report=term-missing
|
||||
|
||||
# Run specific test file
|
||||
pytest tests/test_models.py -v
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Make your changes
|
||||
4. Run tests: `pytest tests/`
|
||||
5. Submit a pull request
|
||||
|
||||
## License
|
||||
|
||||
MIT License - see LICENSE file for details.
|
||||
1
app/cli_memory/__init__.py
Normal file
1
app/cli_memory/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = "1.0.0"
|
||||
119
app/cli_memory/cli.py
Normal file
119
app/cli_memory/cli.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from .config import Config
|
||||
from . import __version__
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(version=__version__, prog_name="cli-memory")
|
||||
@click.option("--config", "-c", type=click.Path(), help="Path to config file")
|
||||
@click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
|
||||
@click.pass_context
|
||||
def main(ctx, config, verbose):
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["config"] = Config(config) if config else Config()
|
||||
ctx.obj["verbose"] = verbose
|
||||
|
||||
if verbose:
|
||||
click.echo(f"Configuration loaded from: {ctx.obj['config'].config_path}")
|
||||
|
||||
|
||||
@main.command()
|
||||
def status():
|
||||
"""Show current status and statistics."""
|
||||
from .database import Database
|
||||
from .history import HistoryManager
|
||||
|
||||
db = Database()
|
||||
history = HistoryManager()
|
||||
stats = history.get_statistics()
|
||||
|
||||
table = Table(title="CLI Command Memory Status")
|
||||
table.add_column("Metric", style="cyan")
|
||||
table.add_column("Value", style="magenta")
|
||||
|
||||
table.add_row("Total Projects", str(stats["total_projects"]))
|
||||
table.add_row("Total Workflows", str(stats["total_workflows"]))
|
||||
table.add_row("Total Commands", str(stats["total_commands"]))
|
||||
table.add_row("Config Path", db.db_path)
|
||||
|
||||
rprint(Panel(table, title="Status"))
|
||||
db.close()
|
||||
|
||||
|
||||
@main.command()
|
||||
def init():
|
||||
"""Initialize CLI Command Memory."""
|
||||
from .database import Database
|
||||
from .config import Config
|
||||
|
||||
config = Config()
|
||||
config.ensure_directories()
|
||||
db = Database()
|
||||
|
||||
rprint(Panel.fit("[green]CLI Command Memory initialized successfully!", title="Init"))
|
||||
rprint(f"Database: {db.db_path}")
|
||||
rprint(f"Config: {config.config_path}")
|
||||
|
||||
db.close()
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("--path", "-p", type=click.Path(), help="Project path to detect")
|
||||
def detect(path):
|
||||
"""Detect project context at current or specified path."""
|
||||
from .project import ProjectDetector
|
||||
|
||||
detector = ProjectDetector()
|
||||
project = detector.detect(path)
|
||||
|
||||
if project:
|
||||
table = Table(title="Detected Project")
|
||||
table.add_column("Property", style="cyan")
|
||||
table.add_column("Value", style="magenta")
|
||||
table.add_row("Name", project.name)
|
||||
table.add_row("Path", project.path)
|
||||
table.add_row("Git Remote", project.git_remote or "N/A")
|
||||
table.add_row("Tech Stack", ", ".join(project.tech_stack) if project.tech_stack else "None")
|
||||
rprint(Panel(table, title="Project Detected"))
|
||||
else:
|
||||
rprint("[yellow]No git project detected at specified path")
|
||||
|
||||
|
||||
@main.group()
|
||||
def shell():
|
||||
"""Shell integration commands."""
|
||||
pass
|
||||
|
||||
|
||||
@shell.command()
|
||||
def setup():
|
||||
"""Set up shell integration."""
|
||||
rprint("[green]Shell integration setup complete!")
|
||||
rprint("Add the following to your shell profile:")
|
||||
rprint("[cyan]source ~/.cli_memory/shell/cm-prompt.sh[/cyan]")
|
||||
|
||||
|
||||
@main.group()
|
||||
def autocomplete():
|
||||
"""Manage autocomplete settings."""
|
||||
pass
|
||||
|
||||
|
||||
@autocomplete.command()
|
||||
def enable():
|
||||
"""Enable autocomplete integration."""
|
||||
rprint("[green]Autocomplete enabled!")
|
||||
|
||||
|
||||
@autocomplete.command()
|
||||
def disable():
|
||||
"""Disable autocomplete integration."""
|
||||
rprint("[yellow]Autocomplete disabled.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
159
app/cli_memory/config.py
Normal file
159
app/cli_memory/config.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import os
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
class Config:
|
||||
def __init__(self, config_path: Optional[str] = None):
|
||||
self.config_path = config_path or self._find_config_file()
|
||||
self._config: Dict[str, Any] = {}
|
||||
self._load_config()
|
||||
|
||||
def _find_config_file(self) -> str:
|
||||
env_path = os.environ.get("CLI_MEMORY_CONFIG")
|
||||
if env_path and os.path.exists(env_path):
|
||||
return env_path
|
||||
home_config = os.path.expanduser("~/.cli_memory/config.yaml")
|
||||
if os.path.exists(home_config):
|
||||
return home_config
|
||||
local_config = os.path.join(os.getcwd(), "config.yaml")
|
||||
if os.path.exists(local_config):
|
||||
return local_config
|
||||
return home_config
|
||||
|
||||
def _load_config(self) -> None:
|
||||
load_dotenv()
|
||||
self._config = {
|
||||
"database": self._load_database_config(),
|
||||
"recording": self._load_recording_config(),
|
||||
"project": self._load_project_config(),
|
||||
"search": self._load_search_config(),
|
||||
"suggestions": self._load_suggestions_config(),
|
||||
"patterns": self._load_patterns_config(),
|
||||
"script": self._load_script_config(),
|
||||
"playback": self._load_playback_config(),
|
||||
"shell": self._load_shell_config(),
|
||||
"logging": self._load_logging_config(),
|
||||
}
|
||||
self._apply_env_overrides()
|
||||
|
||||
def _load_database_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"path": os.environ.get("DATABASE_PATH", "~/.cli_memory/database.db"),
|
||||
"wal_mode": True,
|
||||
"timeout": 30.0,
|
||||
}
|
||||
|
||||
def _load_recording_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"max_commands_per_workflow": int(os.environ.get("MAX_WORKFLOW_COMMANDS", 100)),
|
||||
"min_commands_for_workflow": 3,
|
||||
"auto_save_interval": 10,
|
||||
"capture_exit_code": True,
|
||||
"capture_duration": True,
|
||||
}
|
||||
|
||||
def _load_project_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"auto_detect_git": True,
|
||||
"cache_ttl": 3600,
|
||||
"excluded_dirs": ["node_modules", "__pycache__", ".git", "venv", ".venv"],
|
||||
}
|
||||
|
||||
def _load_search_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"max_results": 50,
|
||||
"default_limit": 20,
|
||||
"enable_fuzzy": True,
|
||||
"fuzzy_threshold": 0.6,
|
||||
}
|
||||
|
||||
def _load_suggestions_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"max_suggestions": int(os.environ.get("MAX_SUGGESTIONS", 10)),
|
||||
"min_confidence": 0.3,
|
||||
"decay_factor": 0.95,
|
||||
"recency_weight": 0.3,
|
||||
"frequency_weight": 0.4,
|
||||
"context_weight": 0.3,
|
||||
}
|
||||
|
||||
def _load_patterns_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"min_sequence_length": 3,
|
||||
"min_occurrences": 2,
|
||||
"max_pattern_length": 10,
|
||||
"similarity_threshold": 0.8,
|
||||
"timeout": 30.0,
|
||||
}
|
||||
|
||||
def _load_script_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"output_dir": os.path.expanduser("~/.cli_memory/scripts"),
|
||||
"include_error_handling": True,
|
||||
"include_logging": True,
|
||||
"dry_run_default": False,
|
||||
}
|
||||
|
||||
def _load_playback_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"default_speed": 1.0,
|
||||
"confirm_each": False,
|
||||
"show_progress": True,
|
||||
"pause_on_error": True,
|
||||
}
|
||||
|
||||
def _load_shell_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"enable_autocomplete": os.environ.get("ENABLE_AUTOCOMPLETE", "true").lower() == "true",
|
||||
"prompt_command": "cm-prompt",
|
||||
"history_file": os.path.expanduser("~/.cli_memory/shell_history"),
|
||||
}
|
||||
|
||||
def _load_logging_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"level": os.environ.get("LOG_LEVEL", "info"),
|
||||
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
"file": os.path.expanduser("~/.cli_memory/logs/app.log"),
|
||||
}
|
||||
|
||||
def _apply_env_overrides(self) -> None:
|
||||
if os.environ.get("CLI_MEMORY_HOME"):
|
||||
home = os.path.expanduser(os.environ["CLI_MEMORY_HOME"])
|
||||
self._config["database"]["path"] = os.path.join(home, "database.db")
|
||||
self._config["script"]["output_dir"] = os.path.join(home, "scripts")
|
||||
self._config["shell"]["history_file"] = os.path.join(home, "shell_history")
|
||||
self._config["logging"]["file"] = os.path.join(home, "logs", "app.log")
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
keys = key.split(".")
|
||||
value = self._config
|
||||
for k in keys:
|
||||
if isinstance(value, dict) and k in value:
|
||||
value = value[k]
|
||||
else:
|
||||
return default
|
||||
return value
|
||||
|
||||
def set(self, key: str, value: Any) -> None:
|
||||
keys = key.split(".")
|
||||
config = self._config
|
||||
for k in keys[:-1]:
|
||||
if k not in config:
|
||||
config[k] = {}
|
||||
config = config[k]
|
||||
config[keys[-1]] = value
|
||||
|
||||
def reload(self) -> None:
|
||||
self._load_config()
|
||||
|
||||
def get_home_dir(self) -> str:
|
||||
home = os.environ.get("CLI_MEMORY_HOME", "~/.cli_memory")
|
||||
return os.path.expanduser(home)
|
||||
|
||||
def ensure_directories(self) -> None:
|
||||
home = self.get_home_dir()
|
||||
for subdir in ["scripts", "logs"]:
|
||||
path = os.path.join(home, subdir)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
429
app/cli_memory/database.py
Normal file
429
app/cli_memory/database.py
Normal file
@@ -0,0 +1,429 @@
|
||||
import os
|
||||
import sqlite3
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional, List, Generator
|
||||
|
||||
from .config import Config
|
||||
from .models import Project, Command, Workflow, Suggestion, Pattern
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self, config: Optional[Config] = None):
|
||||
self.config = config or Config()
|
||||
self.db_path = self._get_db_path()
|
||||
self._ensure_db_exists()
|
||||
|
||||
def _get_db_path(self) -> str:
|
||||
db_path = self.config.get("database.path", "~/.cli_memory/database.db")
|
||||
return os.path.expanduser(db_path)
|
||||
|
||||
def _ensure_db_exists(self) -> None:
|
||||
db_dir = os.path.dirname(self.db_path)
|
||||
if db_dir and not os.path.exists(db_dir):
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
self._init_schema()
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self) -> Generator[sqlite3.Connection, None, None]:
|
||||
conn = sqlite3.connect(self.db_path, timeout=self.config.get("database.timeout", 30.0))
|
||||
conn.row_factory = sqlite3.Row
|
||||
if self.config.get("database.wal_mode", True):
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
logger.error(f"Database error: {e}")
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _init_schema(self) -> None:
|
||||
with self.get_connection() as conn:
|
||||
conn.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
path TEXT NOT NULL UNIQUE,
|
||||
git_remote TEXT,
|
||||
tech_stack TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS commands (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
workflow_id INTEGER,
|
||||
project_id INTEGER,
|
||||
command TEXT NOT NULL,
|
||||
command_type TEXT NOT NULL,
|
||||
exit_code INTEGER,
|
||||
duration_ms INTEGER,
|
||||
working_directory TEXT NOT NULL,
|
||||
timestamp TEXT NOT NULL,
|
||||
tags TEXT,
|
||||
metadata TEXT,
|
||||
FOREIGN KEY (workflow_id) REFERENCES workflows(id),
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS workflows (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
is_automated INTEGER DEFAULT 0,
|
||||
pattern_confidence REAL DEFAULT 0.0,
|
||||
usage_count INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS patterns (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER,
|
||||
name TEXT NOT NULL,
|
||||
command_sequence TEXT NOT NULL,
|
||||
occurrences INTEGER DEFAULT 1,
|
||||
confidence REAL DEFAULT 0.0,
|
||||
created_at TEXT NOT NULL,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS suggestions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER,
|
||||
command TEXT NOT NULL,
|
||||
context TEXT,
|
||||
confidence REAL DEFAULT 0.0,
|
||||
frequency INTEGER DEFAULT 1,
|
||||
last_used TEXT,
|
||||
pattern_id INTEGER,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id),
|
||||
FOREIGN KEY (pattern_id) REFERENCES patterns(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_commands_project ON commands(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_commands_workflow ON commands(workflow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_commands_timestamp ON commands(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_workflows_project ON workflows(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_patterns_project ON patterns(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_suggestions_project ON suggestions(project_id);
|
||||
""")
|
||||
|
||||
def create_project(self, project: Project) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT OR REPLACE INTO projects
|
||||
(name, path, git_remote, tech_stack, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
project.name,
|
||||
project.path,
|
||||
project.git_remote,
|
||||
",".join(project.tech_stack) if project.tech_stack else "",
|
||||
project.created_at.isoformat(),
|
||||
project.updated_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_project(self, project_id: int) -> Optional[Project]:
|
||||
with self.get_connection() as conn:
|
||||
row = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id,)).fetchone()
|
||||
return self._row_to_project(row) if row else None
|
||||
|
||||
def get_project_by_path(self, path: str) -> Optional[Project]:
|
||||
with self.get_connection() as conn:
|
||||
row = conn.execute("SELECT * FROM projects WHERE path = ?", (path,)).fetchone()
|
||||
return self._row_to_project(row) if row else None
|
||||
|
||||
def _row_to_project(self, row: sqlite3.Row) -> Project:
|
||||
return Project(
|
||||
id=row["id"],
|
||||
name=row["name"],
|
||||
path=row["path"],
|
||||
git_remote=row["git_remote"],
|
||||
tech_stack=row["tech_stack"].split(",") if row["tech_stack"] else [],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
)
|
||||
|
||||
def get_all_projects(self) -> List[Project]:
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute("SELECT * FROM projects ORDER BY updated_at DESC").fetchall()
|
||||
return [self._row_to_project(row) for row in rows]
|
||||
|
||||
def create_command(self, command: Command) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO commands
|
||||
(workflow_id, project_id, command, command_type, exit_code,
|
||||
duration_ms, working_directory, timestamp, tags, metadata)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
command.workflow_id,
|
||||
command.project_id,
|
||||
command.command,
|
||||
command.command_type.value,
|
||||
command.exit_code,
|
||||
command.duration_ms,
|
||||
command.working_directory,
|
||||
command.timestamp.isoformat(),
|
||||
",".join(command.tags) if command.tags else "",
|
||||
str(command.metadata) if command.metadata else "",
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_commands(
|
||||
self,
|
||||
project_id: Optional[int] = None,
|
||||
workflow_id: Optional[int] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> List[Command]:
|
||||
query = "SELECT * FROM commands WHERE 1=1"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " AND project_id = ?"
|
||||
params.append(project_id)
|
||||
if workflow_id:
|
||||
query += " AND workflow_id = ?"
|
||||
params.append(workflow_id)
|
||||
query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
return [self._row_to_command(row) for row in rows]
|
||||
|
||||
def _row_to_command(self, row: sqlite3.Row) -> Command:
|
||||
tags = row["tags"].split(",") if row["tags"] else []
|
||||
try:
|
||||
metadata = eval(row["metadata"]) if row["metadata"] else {}
|
||||
except Exception:
|
||||
metadata = {}
|
||||
return Command(
|
||||
id=row["id"],
|
||||
workflow_id=row["workflow_id"],
|
||||
project_id=row["project_id"],
|
||||
command=row["command"],
|
||||
command_type=row["command_type"],
|
||||
exit_code=row["exit_code"],
|
||||
duration_ms=row["duration_ms"],
|
||||
working_directory=row["working_directory"],
|
||||
timestamp=datetime.fromisoformat(row["timestamp"]),
|
||||
tags=tags,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
def create_workflow(self, workflow: Workflow) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO workflows
|
||||
(project_id, name, description, created_at, updated_at,
|
||||
is_automated, pattern_confidence, usage_count)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
workflow.project_id,
|
||||
workflow.name,
|
||||
workflow.description,
|
||||
workflow.created_at.isoformat(),
|
||||
workflow.updated_at.isoformat(),
|
||||
int(workflow.is_automated),
|
||||
workflow.pattern_confidence,
|
||||
workflow.usage_count,
|
||||
),
|
||||
)
|
||||
workflow_id = cursor.lastrowid
|
||||
for cmd in workflow.commands:
|
||||
cmd.workflow_id = workflow_id
|
||||
cmd.project_id = workflow.project_id
|
||||
self.create_command(cmd)
|
||||
return workflow_id
|
||||
|
||||
def get_workflow(self, workflow_id: int) -> Optional[Workflow]:
|
||||
with self.get_connection() as conn:
|
||||
row = conn.execute("SELECT * FROM workflows WHERE id = ?", (workflow_id,)).fetchone()
|
||||
if not row:
|
||||
return None
|
||||
workflow = Workflow(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
name=row["name"],
|
||||
description=row["description"],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
is_automated=bool(row["is_automated"]),
|
||||
pattern_confidence=row["pattern_confidence"],
|
||||
usage_count=row["usage_count"],
|
||||
)
|
||||
workflow.commands = self.get_commands(workflow_id=workflow_id)
|
||||
return workflow
|
||||
|
||||
def get_all_workflows(self, project_id: Optional[int] = None) -> List[Workflow]:
|
||||
query = "SELECT * FROM workflows"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " WHERE project_id = ?"
|
||||
params.append(project_id)
|
||||
query += " ORDER BY usage_count DESC"
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
workflows = []
|
||||
for row in rows:
|
||||
workflow = Workflow(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
name=row["name"],
|
||||
description=row["description"],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
is_automated=bool(row["is_automated"]),
|
||||
pattern_confidence=row["pattern_confidence"],
|
||||
usage_count=row["usage_count"],
|
||||
)
|
||||
workflow.commands = self.get_commands(workflow_id=workflow.id)
|
||||
workflows.append(workflow)
|
||||
return workflows
|
||||
|
||||
def update_workflow_usage(self, workflow_id: int) -> None:
|
||||
with self.get_connection() as conn:
|
||||
conn.execute(
|
||||
"UPDATE workflows SET usage_count = usage_count + 1, updated_at = ? WHERE id = ?",
|
||||
(datetime.utcnow().isoformat(), workflow_id),
|
||||
)
|
||||
|
||||
def create_pattern(self, pattern: Pattern) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO patterns
|
||||
(project_id, name, command_sequence, occurrences, confidence, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
pattern.project_id,
|
||||
pattern.name,
|
||||
",".join(pattern.command_sequence),
|
||||
pattern.occurrences,
|
||||
pattern.confidence,
|
||||
pattern.created_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_patterns(self, project_id: Optional[int] = None) -> List[Pattern]:
|
||||
query = "SELECT * FROM patterns"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " WHERE project_id = ?"
|
||||
params.append(project_id)
|
||||
query += " ORDER BY occurrences DESC"
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
return [self._row_to_pattern(row) for row in rows]
|
||||
|
||||
def _row_to_pattern(self, row: sqlite3.Row) -> Pattern:
|
||||
return Pattern(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
name=row["name"],
|
||||
command_sequence=row["command_sequence"].split(",") if row["command_sequence"] else [],
|
||||
occurrences=row["occurrences"],
|
||||
confidence=row["confidence"],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
)
|
||||
|
||||
def create_suggestion(self, suggestion: Suggestion) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO suggestions
|
||||
(project_id, command, context, confidence, frequency, last_used, pattern_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
suggestion.project_id,
|
||||
suggestion.command,
|
||||
suggestion.context,
|
||||
suggestion.confidence,
|
||||
suggestion.frequency,
|
||||
suggestion.last_used.isoformat() if suggestion.last_used else None,
|
||||
suggestion.pattern_id,
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_suggestions(
|
||||
self, project_id: Optional[int] = None, limit: int = 10
|
||||
) -> List[Suggestion]:
|
||||
query = "SELECT * FROM suggestions WHERE 1=1"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " AND project_id = ?"
|
||||
params.append(project_id)
|
||||
query += " ORDER BY confidence DESC, frequency DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
return [self._row_to_suggestion(row) for row in rows]
|
||||
|
||||
def _row_to_suggestion(self, row: sqlite3.Row) -> Suggestion:
|
||||
return Suggestion(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
command=row["command"],
|
||||
context=row["context"],
|
||||
confidence=row["confidence"],
|
||||
frequency=row["frequency"],
|
||||
last_used=datetime.fromisoformat(row["last_used"]) if row["last_used"] else None,
|
||||
pattern_id=row["pattern_id"],
|
||||
)
|
||||
|
||||
def search_commands(
|
||||
self,
|
||||
query: str,
|
||||
project_id: Optional[int] = None,
|
||||
command_type: Optional[str] = None,
|
||||
start_time: Optional[datetime] = None,
|
||||
end_time: Optional[datetime] = None,
|
||||
limit: int = 50,
|
||||
) -> List[Command]:
|
||||
sql = "SELECT * FROM commands WHERE command LIKE ?"
|
||||
params = [f"%{query}%"]
|
||||
if project_id:
|
||||
sql += " AND project_id = ?"
|
||||
params.append(project_id)
|
||||
if command_type:
|
||||
sql += " AND command_type = ?"
|
||||
params.append(command_type)
|
||||
if start_time:
|
||||
sql += " AND timestamp >= ?"
|
||||
params.append(start_time.isoformat())
|
||||
if end_time:
|
||||
sql += " AND timestamp <= ?"
|
||||
params.append(end_time.isoformat())
|
||||
sql += " ORDER BY timestamp DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(sql, params).fetchall()
|
||||
return [self._row_to_command(row) for row in rows]
|
||||
|
||||
def delete_project(self, project_id: int) -> None:
|
||||
with self.get_connection() as conn:
|
||||
conn.execute("DELETE FROM commands WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM workflows WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM patterns WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM suggestions WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM projects WHERE id = ?", (project_id,))
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
184
app/cli_memory/models.py
Normal file
184
app/cli_memory/models.py
Normal file
@@ -0,0 +1,184 @@
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class CommandType(Enum):
|
||||
GIT = "git"
|
||||
DOCKER = "docker"
|
||||
BUILD = "build"
|
||||
TEST = "test"
|
||||
DEPLOY = "deploy"
|
||||
FILE_OP = "file_op"
|
||||
SYSTEM = "system"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Project:
|
||||
id: Optional[int] = None
|
||||
name: str = ""
|
||||
path: str = ""
|
||||
git_remote: Optional[str] = None
|
||||
tech_stack: List[str] = field(default_factory=list)
|
||||
created_at: datetime = field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"name": self.name,
|
||||
"path": self.path,
|
||||
"git_remote": self.git_remote,
|
||||
"tech_stack": self.tech_stack,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Project":
|
||||
return cls(
|
||||
id=data.get("id"),
|
||||
name=data.get("name", ""),
|
||||
path=data.get("path", ""),
|
||||
git_remote=data.get("git_remote"),
|
||||
tech_stack=data.get("tech_stack", []),
|
||||
created_at=datetime.fromisoformat(data["created_at"]) if data.get("created_at") else datetime.utcnow(),
|
||||
updated_at=datetime.fromisoformat(data["updated_at"]) if data.get("updated_at") else datetime.utcnow(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Command:
|
||||
id: Optional[int] = None
|
||||
workflow_id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
command: str = ""
|
||||
command_type: CommandType = CommandType.OTHER
|
||||
exit_code: Optional[int] = None
|
||||
duration_ms: Optional[int] = None
|
||||
working_directory: str = ""
|
||||
timestamp: datetime = field(default_factory=datetime.utcnow)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"workflow_id": self.workflow_id,
|
||||
"project_id": self.project_id,
|
||||
"command": self.command,
|
||||
"command_type": self.command_type.value,
|
||||
"exit_code": self.exit_code,
|
||||
"duration_ms": self.duration_ms,
|
||||
"working_directory": self.working_directory,
|
||||
"timestamp": self.timestamp.isoformat(),
|
||||
"tags": self.tags,
|
||||
"metadata": self.metadata,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Command":
|
||||
return cls(
|
||||
id=data.get("id"),
|
||||
workflow_id=data.get("workflow_id"),
|
||||
project_id=data.get("project_id"),
|
||||
command=data.get("command", ""),
|
||||
command_type=CommandType(data.get("command_type", "other")),
|
||||
exit_code=data.get("exit_code"),
|
||||
duration_ms=data.get("duration_ms"),
|
||||
working_directory=data.get("working_directory", ""),
|
||||
timestamp=datetime.fromisoformat(data["timestamp"]) if data.get("timestamp") else datetime.utcnow(),
|
||||
tags=data.get("tags", []),
|
||||
metadata=data.get("metadata", {}),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Workflow:
|
||||
id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
name: str = ""
|
||||
description: str = ""
|
||||
commands: List[Command] = field(default_factory=list)
|
||||
created_at: datetime = field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
is_automated: bool = False
|
||||
pattern_confidence: float = 0.0
|
||||
usage_count: int = 0
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"project_id": self.project_id,
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"commands": [c.to_dict() for c in self.commands],
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
"is_automated": self.is_automated,
|
||||
"pattern_confidence": self.pattern_confidence,
|
||||
"usage_count": self.usage_count,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Workflow":
|
||||
return cls(
|
||||
id=data.get("id"),
|
||||
project_id=data.get("project_id"),
|
||||
name=data.get("name", ""),
|
||||
description=data.get("description", ""),
|
||||
commands=[Command.from_dict(c) for c in data.get("commands", [])],
|
||||
created_at=datetime.fromisoformat(data["created_at"]) if data.get("created_at") else datetime.utcnow(),
|
||||
updated_at=datetime.fromisoformat(data["updated_at"]) if data.get("updated_at") else datetime.utcnow(),
|
||||
is_automated=data.get("is_automated", False),
|
||||
pattern_confidence=data.get("pattern_confidence", 0.0),
|
||||
usage_count=data.get("usage_count", 0),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Suggestion:
|
||||
id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
command: str = ""
|
||||
context: str = ""
|
||||
confidence: float = 0.0
|
||||
frequency: int = 0
|
||||
last_used: Optional[datetime] = None
|
||||
pattern_id: Optional[int] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"project_id": self.project_id,
|
||||
"command": self.command,
|
||||
"context": self.context,
|
||||
"confidence": self.confidence,
|
||||
"frequency": self.frequency,
|
||||
"last_used": self.last_used.isoformat() if self.last_used else None,
|
||||
"pattern_id": self.pattern_id,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Pattern:
|
||||
id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
name: str = ""
|
||||
command_sequence: List[str] = field(default_factory=list)
|
||||
occurrences: int = 0
|
||||
confidence: float = 0.0
|
||||
created_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"project_id": self.project_id,
|
||||
"name": self.name,
|
||||
"command_sequence": self.command_sequence,
|
||||
"occurrences": self.occurrences,
|
||||
"confidence": self.confidence,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
}
|
||||
76
app/config.yaml
Normal file
76
app/config.yaml
Normal file
@@ -0,0 +1,76 @@
|
||||
# CLI Command Memory Configuration
|
||||
|
||||
# Database settings
|
||||
database:
|
||||
path: "~/.cli_memory/database.db"
|
||||
wal_mode: true
|
||||
timeout: 30.0
|
||||
|
||||
# Recording settings
|
||||
recording:
|
||||
max_commands_per_workflow: 100
|
||||
min_commands_for_workflow: 3
|
||||
auto_save_interval: 10
|
||||
capture_exit_code: true
|
||||
capture_duration: true
|
||||
|
||||
# Project detection
|
||||
project:
|
||||
auto_detect_git: true
|
||||
cache_ttl: 3600
|
||||
excluded_dirs:
|
||||
- node_modules
|
||||
- __pycache__
|
||||
- .git
|
||||
- venv
|
||||
- .venv
|
||||
|
||||
# Search settings
|
||||
search:
|
||||
max_results: 50
|
||||
default_limit: 20
|
||||
enable_fuzzy: true
|
||||
fuzzy_threshold: 0.6
|
||||
|
||||
# Suggestions engine
|
||||
suggestions:
|
||||
max_suggestions: 10
|
||||
min_confidence: 0.3
|
||||
decay_factor: 0.95
|
||||
recency_weight: 0.3
|
||||
frequency_weight: 0.4
|
||||
context_weight: 0.3
|
||||
|
||||
# Pattern detection
|
||||
patterns:
|
||||
min_sequence_length: 3
|
||||
min_occurrences: 2
|
||||
max_pattern_length: 10
|
||||
similarity_threshold: 0.8
|
||||
timeout: 30.0
|
||||
|
||||
# Script generation
|
||||
script:
|
||||
output_dir: "~/.cli_memory/scripts"
|
||||
include_error_handling: true
|
||||
include_logging: true
|
||||
dry_run_default: false
|
||||
|
||||
# Playback settings
|
||||
playback:
|
||||
default_speed: 1.0
|
||||
confirm_each: false
|
||||
show_progress: true
|
||||
pause_on_error: true
|
||||
|
||||
# Shell integration
|
||||
shell:
|
||||
enable_autocomplete: true
|
||||
prompt_command: "cm-prompt"
|
||||
history_file: "~/.cli_memory/shell_history"
|
||||
|
||||
# Logging
|
||||
logging:
|
||||
level: "info"
|
||||
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
file: "~/.cli_memory/logs/app.log"
|
||||
6
app/requirements.txt
Normal file
6
app/requirements.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
click==8.1.7
|
||||
rich==13.7.0
|
||||
pyyaml==6.0.1
|
||||
python-dotenv==1.0.0
|
||||
pytest==7.4.0
|
||||
pytest-cov==4.1.0
|
||||
26
app/setup.py
Normal file
26
app/setup.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="cli-command-memory",
|
||||
version="1.0.0",
|
||||
description="CLI tool that records developer terminal workflows with smart autocomplete",
|
||||
author="Developer",
|
||||
author_email="dev@example.com",
|
||||
packages=["cli_memory", "cli_memory.commands"],
|
||||
python_requires=">=3.9",
|
||||
install_requires=[
|
||||
"click>=8.1.7",
|
||||
"rich>=13.7.0",
|
||||
"pyyaml>=6.0.1",
|
||||
"python-dotenv>=1.0.0",
|
||||
],
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"cli-memory=cli_memory.cli:main",
|
||||
],
|
||||
},
|
||||
include_package_data=True,
|
||||
package_data={
|
||||
"cli_memory": ["*.yaml", "*.yml"],
|
||||
},
|
||||
)
|
||||
151
app/tests/conftest.py
Normal file
151
app/tests/conftest.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import os
|
||||
import tempfile
|
||||
import pytest
|
||||
from cli_memory.config import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_dir():
|
||||
"""Provide a temporary directory."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield tmpdir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config(temp_dir):
|
||||
"""Provide a Config instance."""
|
||||
return Config()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_project_data():
|
||||
"""Provide sample project data."""
|
||||
return {
|
||||
"name": "test-project",
|
||||
"path": "/tmp/test",
|
||||
"git_remote": "https://github.com/user/test.git",
|
||||
"tech_stack": ["python", "docker"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_command_data():
|
||||
"""Provide sample command data."""
|
||||
return {
|
||||
"command": "git status",
|
||||
"command_type": "git",
|
||||
"exit_code": 0,
|
||||
"duration_ms": 50,
|
||||
"working_directory": "/home/user/project",
|
||||
"tags": ["git", "status"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_workflow_data():
|
||||
"""Provide sample workflow data."""
|
||||
return {
|
||||
"name": "Test Workflow",
|
||||
"description": "A test workflow",
|
||||
"commands": [
|
||||
{"command": "git status", "command_type": "git"},
|
||||
{"command": "git add .", "command_type": "git"},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def env_override():
|
||||
"""Provide environment variable override context."""
|
||||
original_env = os.environ.copy()
|
||||
|
||||
yield
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_docker_yaml():
|
||||
"""Provide sample Docker YAML content for testing."""
|
||||
return """version: \"1.0\"
|
||||
description: Docker command patterns
|
||||
|
||||
patterns:
|
||||
- name: list_containers
|
||||
description: List running containers
|
||||
patterns:
|
||||
- list running containers
|
||||
- show running containers
|
||||
- list containers
|
||||
- show all containers
|
||||
- docker ps
|
||||
template: docker ps
|
||||
explanation: Lists all running containers with their IDs, images, and status.
|
||||
|
||||
- name: run_container
|
||||
description: Run a new container
|
||||
patterns:
|
||||
- run a container
|
||||
- start a new container
|
||||
- run docker container
|
||||
- docker run
|
||||
template: docker run -d --name {name} {image}
|
||||
explanation: Starts a new detached container with the specified image.
|
||||
|
||||
- name: stop_container
|
||||
description: Stop a running container
|
||||
patterns:
|
||||
- stop container
|
||||
- stop docker container
|
||||
- stop running container
|
||||
template: docker stop {container}
|
||||
explanation: Stops the specified running container.
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_git_yaml():
|
||||
"""Provide sample Git YAML content for testing."""
|
||||
return """version: \"1.0\"
|
||||
description: Git command patterns
|
||||
|
||||
patterns:
|
||||
- name: git_status
|
||||
description: Show working tree status
|
||||
patterns:
|
||||
- git status
|
||||
- check status
|
||||
- show changes
|
||||
template: git status
|
||||
explanation: Shows the current status of the working directory.
|
||||
|
||||
- name: git_add
|
||||
description: Add file contents to index
|
||||
patterns:
|
||||
- add file
|
||||
- stage file
|
||||
- git add
|
||||
template: git add {file}
|
||||
explanation: Adds file contents to the staging area.
|
||||
|
||||
- name: git_commit
|
||||
description: Commit changes
|
||||
patterns:
|
||||
- commit changes
|
||||
- make commit
|
||||
- git commit
|
||||
template: git commit -m \"{message}\"
|
||||
explanation: Records changes in the repository with a message.
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_corrections_json():
|
||||
"""Provide sample corrections JSON for testing."""
|
||||
return {
|
||||
"version": "1.0",
|
||||
"corrections": {
|
||||
"custom:my custom query": "echo custom command"
|
||||
}
|
||||
}
|
||||
1
cli_memory/__init__.py
Normal file
1
cli_memory/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = "1.0.0"
|
||||
120
cli_memory/cli.py
Normal file
120
cli_memory/cli.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import sys
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from .config import Config
|
||||
from . import __version__
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(version=__version__, prog_name="cli-memory")
|
||||
@click.option("--config", "-c", type=click.Path(), help="Path to config file")
|
||||
@click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
|
||||
@click.pass_context
|
||||
def main(ctx, config, verbose):
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["config"] = Config(config) if config else Config()
|
||||
ctx.obj["verbose"] = verbose
|
||||
|
||||
if verbose:
|
||||
click.echo(f"Configuration loaded from: {ctx.obj['config'].config_path}")
|
||||
|
||||
|
||||
@main.command()
|
||||
def status():
|
||||
"""Show current status and statistics."""
|
||||
from .database import Database
|
||||
from .history import HistoryManager
|
||||
|
||||
db = Database()
|
||||
history = HistoryManager()
|
||||
stats = history.get_statistics()
|
||||
|
||||
table = Table(title="CLI Command Memory Status")
|
||||
table.add_column("Metric", style="cyan")
|
||||
table.add_column("Value", style="magenta")
|
||||
|
||||
table.add_row("Total Projects", str(stats["total_projects"]))
|
||||
table.add_row("Total Workflows", str(stats["total_workflows"]))
|
||||
table.add_row("Total Commands", str(stats["total_commands"]))
|
||||
table.add_row("Config Path", db.db_path)
|
||||
|
||||
rprint(Panel(table, title="Status"))
|
||||
db.close()
|
||||
|
||||
|
||||
@main.command()
|
||||
def init():
|
||||
"""Initialize CLI Command Memory."""
|
||||
from .database import Database
|
||||
from .config import Config
|
||||
|
||||
config = Config()
|
||||
config.ensure_directories()
|
||||
db = Database()
|
||||
|
||||
rprint(Panel.fit("[green]CLI Command Memory initialized successfully!", title="Init"))
|
||||
rprint(f"Database: {db.db_path}")
|
||||
rprint(f"Config: {config.config_path}")
|
||||
|
||||
db.close()
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("--path", "-p", type=click.Path(), help="Project path to detect")
|
||||
def detect(path):
|
||||
"""Detect project context at current or specified path."""
|
||||
from .project import ProjectDetector
|
||||
|
||||
detector = ProjectDetector()
|
||||
project = detector.detect(path)
|
||||
|
||||
if project:
|
||||
table = Table(title="Detected Project")
|
||||
table.add_column("Property", style="cyan")
|
||||
table.add_column("Value", style="magenta")
|
||||
table.add_row("Name", project.name)
|
||||
table.add_row("Path", project.path)
|
||||
table.add_row("Git Remote", project.git_remote or "N/A")
|
||||
table.add_row("Tech Stack", ", ".join(project.tech_stack) if project.tech_stack else "None")
|
||||
rprint(Panel(table, title="Project Detected"))
|
||||
else:
|
||||
rprint("[yellow]No git project detected at specified path")
|
||||
|
||||
|
||||
@main.group()
|
||||
def shell():
|
||||
"""Shell integration commands."""
|
||||
pass
|
||||
|
||||
|
||||
@shell.command()
|
||||
def setup():
|
||||
"""Set up shell integration."""
|
||||
rprint("[green]Shell integration setup complete!")
|
||||
rprint("Add the following to your shell profile:")
|
||||
rprint("[cyan]source ~/.cli_memory/shell/cm-prompt.sh[/cyan]")
|
||||
|
||||
|
||||
@main.group()
|
||||
def autocomplete():
|
||||
"""Manage autocomplete settings."""
|
||||
pass
|
||||
|
||||
|
||||
@autocomplete.command()
|
||||
def enable():
|
||||
"""Enable autocomplete integration."""
|
||||
rprint("[green]Autocomplete enabled!")
|
||||
|
||||
|
||||
@autocomplete.command()
|
||||
def disable():
|
||||
"""Disable autocomplete integration."""
|
||||
rprint("[yellow]Autocomplete disabled.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
0
cli_memory/commands/__init__.py
Normal file
0
cli_memory/commands/__init__.py
Normal file
98
cli_memory/commands/export.py
Normal file
98
cli_memory/commands/export.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import os
|
||||
import json
|
||||
import yaml
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from ..config import Config
|
||||
from ..database import Database
|
||||
from ..history import HistoryManager
|
||||
|
||||
|
||||
@click.group()
|
||||
def export():
|
||||
"""Export data."""
|
||||
pass
|
||||
|
||||
|
||||
@export.command()
|
||||
@click.option("--output", "-o", default="commands.json", help="Output file")
|
||||
def commands(output):
|
||||
"""Export commands to file."""
|
||||
db = Database()
|
||||
history = HistoryManager(db=db)
|
||||
|
||||
commands = db.get_commands(limit=10000)
|
||||
data = [c.to_dict() for c in commands]
|
||||
|
||||
db.close()
|
||||
|
||||
if output.endswith(".json"):
|
||||
with open(output, "w") as f:
|
||||
json.dump(data, f, indent=2, default=str)
|
||||
else:
|
||||
with open(output, "w") as f:
|
||||
yaml.dump(data, f)
|
||||
|
||||
rprint(f"[green]Exported {len(commands)} commands to {output}[/green]")
|
||||
|
||||
|
||||
@export.command()
|
||||
@click.option("--output", "-o", default="workflows.yaml", help="Output file")
|
||||
def workflows(output):
|
||||
"""Export workflows to file."""
|
||||
db = Database()
|
||||
workflows = db.get_all_workflows()
|
||||
data = [w.to_dict() for w in workflows]
|
||||
|
||||
db.close()
|
||||
|
||||
if output.endswith(".json"):
|
||||
with open(output, "w") as f:
|
||||
json.dump(data, f, indent=2, default=str)
|
||||
else:
|
||||
with open(output, "w") as f:
|
||||
yaml.dump(data, f)
|
||||
|
||||
rprint(f"[green]Exported {len(workflows)} workflows to {output}[/green]")
|
||||
|
||||
|
||||
@export.command()
|
||||
@click.option("--output", "-o", default="backup.json", help="Output file")
|
||||
def all(output):
|
||||
"""Export all data."""
|
||||
db = Database()
|
||||
history = HistoryManager(db=db)
|
||||
|
||||
data_str = history.export_history(format="json" if output.endswith(".json") else "yaml")
|
||||
|
||||
with open(output, "w") as f:
|
||||
f.write(data_str)
|
||||
|
||||
db.close()
|
||||
|
||||
rprint(f"[green]Exported all data to {output}[/green]")
|
||||
|
||||
|
||||
@export.command()
|
||||
def scripts():
|
||||
"""List generated scripts."""
|
||||
from ..generator import ScriptGenerator
|
||||
|
||||
generator = ScriptGenerator()
|
||||
scripts = generator.list_generated_scripts()
|
||||
|
||||
if not scripts:
|
||||
rprint("[yellow]No generated scripts found[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title="Generated Scripts")
|
||||
table.add_column("Name", style="cyan")
|
||||
table.add_column("Path", style="magenta")
|
||||
|
||||
for s in scripts:
|
||||
table.add_row(s["name"], s["path"])
|
||||
|
||||
rprint(Panel(table, title=f"{len(scripts)} Scripts"))
|
||||
94
cli_memory/commands/recording.py
Normal file
94
cli_memory/commands/recording.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from ..config import Config
|
||||
from ..database import Database
|
||||
from ..recorder import CommandRecorder
|
||||
from ..project import ProjectDetector
|
||||
|
||||
|
||||
@click.group()
|
||||
def record():
|
||||
"""Record and manage command recordings."""
|
||||
pass
|
||||
|
||||
|
||||
@record.command()
|
||||
@click.option("--project", "-p", help="Project name or path")
|
||||
@click.option("--tag", "-t", multiple=True, help="Tags for this recording")
|
||||
def start(project, tag):
|
||||
"""Start an interactive recording session."""
|
||||
rprint(Panel.fit("[yellow]Starting recording session...[/yellow]", title="Record"))
|
||||
rprint("[cyan]Type 'exit' to stop recording[/cyan]")
|
||||
|
||||
detector = ProjectDetector()
|
||||
proj = detector.detect(project)
|
||||
if proj:
|
||||
rprint(f"[green]Detected project: {proj.name}[/green]")
|
||||
|
||||
recorder = CommandRecorder()
|
||||
|
||||
commands = []
|
||||
try:
|
||||
while True:
|
||||
cmd_input = input("cm-record> ").strip()
|
||||
if cmd_input.lower() in ("exit", "quit", "q"):
|
||||
break
|
||||
if cmd_input:
|
||||
commands.append(cmd_input)
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
pass
|
||||
|
||||
if commands:
|
||||
rprint(f"[green]Recorded {len(commands)} commands[/green]")
|
||||
else:
|
||||
rprint("[yellow]No commands recorded[/yellow]")
|
||||
|
||||
|
||||
@record.command()
|
||||
@click.argument("command")
|
||||
@click.option("--project", "-p", help="Project path")
|
||||
def single(command, project):
|
||||
"""Record a single command."""
|
||||
recorder = CommandRecorder()
|
||||
detector = ProjectDetector()
|
||||
proj = detector.detect(project)
|
||||
|
||||
cmd = recorder.record_command(command, project_id=proj.id if proj else None)
|
||||
|
||||
db = Database()
|
||||
db.create_command(cmd)
|
||||
db.close()
|
||||
|
||||
rprint(f"[green]Recorded: {command}[/green]")
|
||||
|
||||
|
||||
@record.command()
|
||||
@click.option("--limit", "-l", default=20, help="Number of recent commands to show")
|
||||
def recent(limit):
|
||||
"""Show recent recorded commands."""
|
||||
db = Database()
|
||||
commands = db.get_commands(limit=limit)
|
||||
db.close()
|
||||
|
||||
if not commands:
|
||||
rprint("[yellow]No commands recorded yet[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title="Recent Commands")
|
||||
table.add_column("ID", style="cyan")
|
||||
table.add_column("Command", style="magenta")
|
||||
table.add_column("Type", style="green")
|
||||
table.add_column("Timestamp", style="yellow")
|
||||
|
||||
for cmd in commands[:limit]:
|
||||
table.add_row(
|
||||
str(cmd.id),
|
||||
cmd.command[:50] + "..." if len(cmd.command) > 50 else cmd.command,
|
||||
cmd.command_type.value,
|
||||
cmd.timestamp.strftime("%Y-%m-%d %H:%M"),
|
||||
)
|
||||
|
||||
rprint(Panel(table, title="Recent Commands"))
|
||||
122
cli_memory/commands/search.py
Normal file
122
cli_memory/commands/search.py
Normal file
@@ -0,0 +1,122 @@
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from ..config import Config
|
||||
from ..database import Database
|
||||
from ..search import SearchEngine
|
||||
from ..history import HistoryManager
|
||||
|
||||
|
||||
@click.group()
|
||||
def search():
|
||||
"""Search command history."""
|
||||
pass
|
||||
|
||||
|
||||
@search.command()
|
||||
@click.argument("query")
|
||||
@click.option("--project", "-p", help="Filter by project ID")
|
||||
@click.option("--type", "-t", help="Filter by command type")
|
||||
@click.option("--fuzzy", "-f", is_flag=True, help="Enable fuzzy search")
|
||||
@click.option("--limit", "-l", default=20, help="Maximum results")
|
||||
def commands(query, project, type, fuzzy, limit):
|
||||
"""Search recorded commands."""
|
||||
db = Database()
|
||||
search_engine = SearchEngine()
|
||||
|
||||
project_id = int(project) if project else None
|
||||
commands = db.get_commands(limit=1000)
|
||||
|
||||
results = search_engine.search_commands(
|
||||
commands, query,
|
||||
project_id=project_id,
|
||||
command_type=type,
|
||||
fuzzy=fuzzy,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
db.close()
|
||||
|
||||
if not results:
|
||||
rprint(f"[yellow]No commands found matching '{query}'[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title=f"Search Results: {query}")
|
||||
table.add_column("Command", style="cyan")
|
||||
table.add_column("Type", style="magenta")
|
||||
table.add_column("Time", style="green")
|
||||
|
||||
for cmd in results[:limit]:
|
||||
table.add_row(
|
||||
cmd.command[:60] + "..." if len(cmd.command) > 60 else cmd.command,
|
||||
cmd.command_type.value,
|
||||
cmd.timestamp.strftime("%Y-%m-%d %H:%M"),
|
||||
)
|
||||
|
||||
rprint(Panel(table, title=f"Found {len(results)} commands"))
|
||||
|
||||
|
||||
@search.command()
|
||||
@click.argument("technology")
|
||||
def tech(technology):
|
||||
"""Search by technology stack."""
|
||||
db = Database()
|
||||
search_engine = SearchEngine()
|
||||
|
||||
commands = db.get_commands(limit=1000)
|
||||
results = search_engine.search_by_technology(commands, technology)
|
||||
|
||||
db.close()
|
||||
|
||||
rprint(f"[green]Found {len(results)} commands for technology: {technology}[/green]")
|
||||
|
||||
|
||||
@search.command()
|
||||
@click.option("--hours", "-h", default=24, help="Hours to look back")
|
||||
def recent(hours):
|
||||
"""Show recent commands."""
|
||||
db = Database()
|
||||
search_engine = SearchEngine()
|
||||
|
||||
commands = db.get_commands(limit=1000)
|
||||
results = search_engine.search_recent(commands, hours=hours)
|
||||
|
||||
db.close()
|
||||
|
||||
table = Table(title=f"Recent Commands (Last {hours} hours)")
|
||||
table.add_column("Command", style="cyan")
|
||||
table.add_column("Type", style="magenta")
|
||||
|
||||
for cmd in results[:50]:
|
||||
table.add_row(
|
||||
cmd.command[:70] if len(cmd.command) > 70 else cmd.command,
|
||||
cmd.command_type.value,
|
||||
)
|
||||
|
||||
rprint(Panel(table, title=f"{len(results)} commands"))
|
||||
|
||||
|
||||
@search.command()
|
||||
def stats():
|
||||
"""Show command statistics."""
|
||||
db = Database()
|
||||
search_engine = SearchEngine()
|
||||
|
||||
commands = db.get_commands(limit=10000)
|
||||
stats = search_engine.get_command_statistics(commands)
|
||||
|
||||
db.close()
|
||||
|
||||
table = Table(title="Command Statistics")
|
||||
table.add_column("Metric", style="cyan")
|
||||
table.add_column("Value", style="magenta")
|
||||
|
||||
table.add_row("Total Commands", str(stats["total_commands"]))
|
||||
table.add_row("Avg Duration (ms)", str(stats["avg_duration_ms"]))
|
||||
|
||||
for cmd_type, count in stats["by_type"].items():
|
||||
table.add_row(f"Type: {cmd_type}", str(count))
|
||||
|
||||
rprint(Panel(table, title="Statistics"))
|
||||
112
cli_memory/commands/suggestions.py
Normal file
112
cli_memory/commands/suggestions.py
Normal file
@@ -0,0 +1,112 @@
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from ..config import Config
|
||||
from ..database import Database
|
||||
from ..suggestions import SuggestionEngine
|
||||
|
||||
|
||||
@click.group()
|
||||
def suggest():
|
||||
"""Get smart command suggestions."""
|
||||
pass
|
||||
|
||||
|
||||
@suggest.command()
|
||||
@click.argument("prefix")
|
||||
@click.option("--project", "-p", help="Project ID")
|
||||
@click.option("--limit", "-l", default=5, help="Maximum suggestions")
|
||||
def next(prefix, project, limit):
|
||||
"""Get suggestions for next command."""
|
||||
db = Database()
|
||||
engine = SuggestionEngine(db=db)
|
||||
|
||||
project_id = int(project) if project else None
|
||||
suggestions = engine.get_suggestions(prefix, project_id=project_id, limit=limit)
|
||||
|
||||
db.close()
|
||||
|
||||
if not suggestions:
|
||||
rprint(f"[yellow]No suggestions for: {prefix}[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title=f"Suggestions for: {prefix}")
|
||||
table.add_column("Command", style="cyan")
|
||||
table.add_column("Confidence", style="magenta")
|
||||
table.add_column("Frequency", style="green")
|
||||
|
||||
for s in suggestions:
|
||||
table.add_row(
|
||||
s.command,
|
||||
f"{s.confidence:.2%}",
|
||||
str(s.frequency),
|
||||
)
|
||||
|
||||
rprint(Panel(table, title="Suggestions"))
|
||||
|
||||
|
||||
@suggest.command()
|
||||
@click.argument("prefix")
|
||||
@click.option("--project", "-p", help="Project ID")
|
||||
def autocomplete(prefix, project):
|
||||
"""Get autocomplete candidates."""
|
||||
db = Database()
|
||||
engine = SuggestionEngine(db=db)
|
||||
|
||||
project_id = int(project) if project else None
|
||||
candidates = engine.get_autocomplete_candidates(prefix, project_id)
|
||||
|
||||
db.close()
|
||||
|
||||
if candidates:
|
||||
rprint("[cyan]Autocomplete candidates:[/cyan]")
|
||||
for c in candidates:
|
||||
rprint(f" {c}")
|
||||
else:
|
||||
rprint(f"[yellow]No autocomplete candidates for: {prefix}[/yellow]")
|
||||
|
||||
|
||||
@suggest.command()
|
||||
def train():
|
||||
"""Train suggestion engine."""
|
||||
db = Database()
|
||||
engine = SuggestionEngine(db=db)
|
||||
|
||||
engine.train()
|
||||
|
||||
db.close()
|
||||
|
||||
rprint("[green]Suggestion engine trained successfully![/green]")
|
||||
|
||||
|
||||
@suggest.command()
|
||||
@click.option("--project", "-p", help="Project ID")
|
||||
def patterns(project):
|
||||
"""Show detected patterns."""
|
||||
db = Database()
|
||||
engine = SuggestionEngine(db=db)
|
||||
|
||||
project_id = int(project) if project else None
|
||||
patterns = engine.get_pattern_suggestions(project_id)
|
||||
|
||||
db.close()
|
||||
|
||||
if not patterns:
|
||||
rprint("[yellow]No patterns detected yet[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title="Detected Patterns")
|
||||
table.add_column("Name", style="cyan")
|
||||
table.add_column("Commands", style="magenta")
|
||||
table.add_column("Occurrences", style="green")
|
||||
table.add_column("Confidence", style="yellow")
|
||||
|
||||
for p in patterns[:20]:
|
||||
cmds = " | ".join(p.command_sequence[:3])
|
||||
if len(p.command_sequence) > 3:
|
||||
cmds += " | ..."
|
||||
table.add_row(p.name, cmds, str(p.occurrences), f"{p.confidence:.2%}")
|
||||
|
||||
rprint(Panel(table, title="Patterns"))
|
||||
138
cli_memory/commands/workflow.py
Normal file
138
cli_memory/commands/workflow.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import click
|
||||
from rich import print as rprint
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from ..config import Config
|
||||
from ..database import Database
|
||||
from ..generator import ScriptGenerator
|
||||
from ..playback import WorkflowPlayback
|
||||
|
||||
|
||||
@click.group()
|
||||
def workflow():
|
||||
"""Manage workflows."""
|
||||
pass
|
||||
|
||||
|
||||
@workflow.command()
|
||||
def list():
|
||||
"""List all workflows."""
|
||||
db = Database()
|
||||
workflows = db.get_all_workflows()
|
||||
db.close()
|
||||
|
||||
if not workflows:
|
||||
rprint("[yellow]No workflows recorded yet[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title="Workflows")
|
||||
table.add_column("ID", style="cyan")
|
||||
table.add_column("Name", style="magenta")
|
||||
table.add_column("Commands", style="green")
|
||||
table.add_column("Usage", style="yellow")
|
||||
|
||||
for w in workflows:
|
||||
table.add_row(str(w.id), w.name, str(len(w.commands)), str(w.usage_count))
|
||||
|
||||
rprint(Panel(table, title=f"{len(workflows)} Workflows"))
|
||||
|
||||
|
||||
@workflow.command()
|
||||
@click.argument("workflow_id", type=int)
|
||||
def show(workflow_id):
|
||||
"""Show workflow details."""
|
||||
db = Database()
|
||||
workflow = db.get_workflow(workflow_id)
|
||||
db.close()
|
||||
|
||||
if not workflow:
|
||||
rprint(f"[yellow]Workflow {workflow_id} not found[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title=f"Workflow: {workflow.name}")
|
||||
table.add_column("Property", style="cyan")
|
||||
table.add_column("Value", style="magenta")
|
||||
|
||||
table.add_row("ID", str(workflow.id))
|
||||
table.add_row("Description", workflow.description or "N/A")
|
||||
table.add_row("Commands", str(len(workflow.commands)))
|
||||
table.add_row("Usage Count", str(workflow.usage_count))
|
||||
|
||||
rprint(Panel(table, title="Workflow Details"))
|
||||
|
||||
|
||||
@workflow.command()
|
||||
@click.argument("workflow_id", type=int)
|
||||
@click.option("--speed", "-s", default=1.0, help="Playback speed multiplier")
|
||||
@click.option("--dry-run", "-n", is_flag=True, help="Dry run (no execution)")
|
||||
def play(workflow_id, speed, dry_run):
|
||||
"""Playback a workflow."""
|
||||
db = Database()
|
||||
playback = WorkflowPlayback(db=db)
|
||||
|
||||
try:
|
||||
result = playback.playback(
|
||||
workflow_id,
|
||||
speed=speed,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
rprint("[green]Workflow playback completed[/green]")
|
||||
rprint(f" Executed: {result['executed']}")
|
||||
rprint(f" Succeeded: {result['succeeded']}")
|
||||
rprint(f" Failed: {result['failed']}")
|
||||
except ValueError as e:
|
||||
rprint(f"[yellow]{str(e)}[/yellow]")
|
||||
|
||||
db.close()
|
||||
|
||||
|
||||
@workflow.command()
|
||||
@click.argument("workflow_id", type=int)
|
||||
def preview(workflow_id):
|
||||
"""Preview workflow commands."""
|
||||
db = Database()
|
||||
playback = WorkflowPlayback(db=db)
|
||||
|
||||
try:
|
||||
preview = playback.preview_workflow(workflow_id)
|
||||
|
||||
table = Table(title=f"Workflow Preview")
|
||||
table.add_column("#", style="cyan")
|
||||
table.add_column("Command", style="magenta")
|
||||
|
||||
for item in preview:
|
||||
table.add_row(str(item["index"]), item["command"])
|
||||
|
||||
rprint(Panel(table, title="Preview"))
|
||||
except ValueError as e:
|
||||
rprint(f"[yellow]{str(e)}[/yellow]")
|
||||
|
||||
db.close()
|
||||
|
||||
|
||||
@workflow.command()
|
||||
@click.argument("workflow_id", type=int)
|
||||
@click.option("--name", "-n", help="Script name")
|
||||
@click.option("--output", "-o", help="Output directory")
|
||||
def generate(workflow_id, name, output):
|
||||
"""Generate script from workflow."""
|
||||
db = Database()
|
||||
generator = ScriptGenerator(db=db)
|
||||
workflow = db.get_workflow(workflow_id)
|
||||
|
||||
if not workflow:
|
||||
rprint(f"[yellow]Workflow {workflow_id} not found[/yellow]")
|
||||
db.close()
|
||||
return
|
||||
|
||||
script_path = generator.generate_script(
|
||||
workflow,
|
||||
name=name,
|
||||
output_dir=output,
|
||||
)
|
||||
|
||||
db.close()
|
||||
|
||||
rprint(f"[green]Script generated: {script_path}[/green]")
|
||||
161
cli_memory/config.py
Normal file
161
cli_memory/config.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import yaml
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
class Config:
|
||||
def __init__(self, config_path: Optional[str] = None):
|
||||
self.config_path = config_path or self._find_config_file()
|
||||
self._config: Dict[str, Any] = {}
|
||||
self._load_config()
|
||||
|
||||
def _find_config_file(self) -> str:
|
||||
env_path = os.environ.get("CLI_MEMORY_CONFIG")
|
||||
if env_path and os.path.exists(env_path):
|
||||
return env_path
|
||||
home_config = os.path.expanduser("~/.cli_memory/config.yaml")
|
||||
if os.path.exists(home_config):
|
||||
return home_config
|
||||
local_config = os.path.join(os.getcwd(), "config.yaml")
|
||||
if os.path.exists(local_config):
|
||||
return local_config
|
||||
return home_config
|
||||
|
||||
def _load_config(self) -> None:
|
||||
load_dotenv()
|
||||
self._config = {
|
||||
"database": self._load_database_config(),
|
||||
"recording": self._load_recording_config(),
|
||||
"project": self._load_project_config(),
|
||||
"search": self._load_search_config(),
|
||||
"suggestions": self._load_suggestions_config(),
|
||||
"patterns": self._load_patterns_config(),
|
||||
"script": self._load_script_config(),
|
||||
"playback": self._load_playback_config(),
|
||||
"shell": self._load_shell_config(),
|
||||
"logging": self._load_logging_config(),
|
||||
}
|
||||
self._apply_env_overrides()
|
||||
|
||||
def _load_database_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"path": os.environ.get("DATABASE_PATH", "~/.cli_memory/database.db"),
|
||||
"wal_mode": True,
|
||||
"timeout": 30.0,
|
||||
}
|
||||
|
||||
def _load_recording_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"max_commands_per_workflow": int(os.environ.get("MAX_WORKFLOW_COMMANDS", 100)),
|
||||
"min_commands_for_workflow": 3,
|
||||
"auto_save_interval": 10,
|
||||
"capture_exit_code": True,
|
||||
"capture_duration": True,
|
||||
}
|
||||
|
||||
def _load_project_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"auto_detect_git": True,
|
||||
"cache_ttl": 3600,
|
||||
"excluded_dirs": ["node_modules", "__pycache__", ".git", "venv", ".venv"],
|
||||
}
|
||||
|
||||
def _load_search_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"max_results": 50,
|
||||
"default_limit": 20,
|
||||
"enable_fuzzy": True,
|
||||
"fuzzy_threshold": 0.6,
|
||||
}
|
||||
|
||||
def _load_suggestions_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"max_suggestions": int(os.environ.get("MAX_SUGGESTIONS", 10)),
|
||||
"min_confidence": 0.3,
|
||||
"decay_factor": 0.95,
|
||||
"recency_weight": 0.3,
|
||||
"frequency_weight": 0.4,
|
||||
"context_weight": 0.3,
|
||||
}
|
||||
|
||||
def _load_patterns_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"min_sequence_length": 3,
|
||||
"min_occurrences": 2,
|
||||
"max_pattern_length": 10,
|
||||
"similarity_threshold": 0.8,
|
||||
"timeout": 30.0,
|
||||
}
|
||||
|
||||
def _load_script_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"output_dir": os.path.expanduser("~/.cli_memory/scripts"),
|
||||
"include_error_handling": True,
|
||||
"include_logging": True,
|
||||
"dry_run_default": False,
|
||||
}
|
||||
|
||||
def _load_playback_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"default_speed": 1.0,
|
||||
"confirm_each": False,
|
||||
"show_progress": True,
|
||||
"pause_on_error": True,
|
||||
}
|
||||
|
||||
def _load_shell_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"enable_autocomplete": os.environ.get("ENABLE_AUTOCOMPLETE", "true").lower() == "true",
|
||||
"prompt_command": "cm-prompt",
|
||||
"history_file": os.path.expanduser("~/.cli_memory/shell_history"),
|
||||
}
|
||||
|
||||
def _load_logging_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"level": os.environ.get("LOG_LEVEL", "info"),
|
||||
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
"file": os.path.expanduser("~/.cli_memory/logs/app.log"),
|
||||
}
|
||||
|
||||
def _apply_env_overrides(self) -> None:
|
||||
if os.environ.get("CLI_MEMORY_HOME"):
|
||||
home = os.path.expanduser(os.environ["CLI_MEMORY_HOME"])
|
||||
self._config["database"]["path"] = os.path.join(home, "database.db")
|
||||
self._config["script"]["output_dir"] = os.path.join(home, "scripts")
|
||||
self._config["shell"]["history_file"] = os.path.join(home, "shell_history")
|
||||
self._config["logging"]["file"] = os.path.join(home, "logs", "app.log")
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
keys = key.split(".")
|
||||
value = self._config
|
||||
for k in keys:
|
||||
if isinstance(value, dict) and k in value:
|
||||
value = value[k]
|
||||
else:
|
||||
return default
|
||||
return value
|
||||
|
||||
def set(self, key: str, value: Any) -> None:
|
||||
keys = key.split(".")
|
||||
config = self._config
|
||||
for k in keys[:-1]:
|
||||
if k not in config:
|
||||
config[k] = {}
|
||||
config = config[k]
|
||||
config[keys[-1]] = value
|
||||
|
||||
def reload(self) -> None:
|
||||
self._load_config()
|
||||
|
||||
def get_home_dir(self) -> str:
|
||||
home = os.environ.get("CLI_MEMORY_HOME", "~/.cli_memory")
|
||||
return os.path.expanduser(home)
|
||||
|
||||
def ensure_directories(self) -> None:
|
||||
home = self.get_home_dir()
|
||||
for subdir in ["scripts", "logs"]:
|
||||
path = os.path.join(home, subdir)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
85
cli_memory/context.py
Normal file
85
cli_memory/context.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
from .models import Project
|
||||
from .project import ProjectDetector
|
||||
|
||||
|
||||
class ContextExtractor:
|
||||
def __init__(self, config: Optional[Config] = None):
|
||||
self.config = config or Config()
|
||||
self.project_detector = ProjectDetector(config)
|
||||
|
||||
def get_current_context(self) -> Dict[str, Any]:
|
||||
working_dir = os.getcwd()
|
||||
project = self.project_detector.detect(working_dir)
|
||||
|
||||
context = {
|
||||
"working_directory": working_dir,
|
||||
"project": project.to_dict() if project else None,
|
||||
"git_branch": self._get_git_branch(),
|
||||
"git_status": self._get_git_status(),
|
||||
"user": os.getenv("USER", "unknown"),
|
||||
"hostname": os.getenv("HOSTNAME", "unknown"),
|
||||
"timestamp": str(datetime.utcnow()),
|
||||
}
|
||||
|
||||
return context
|
||||
|
||||
def _get_git_branch(self) -> Optional[str]:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=5,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
except (subprocess.SubprocessError, FileNotFoundError):
|
||||
pass
|
||||
return None
|
||||
|
||||
def _get_git_status(self) -> Dict[str, Any]:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "status", "--porcelain"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=5,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
lines = result.stdout.strip().split("\n") if result.stdout.strip() else []
|
||||
return {
|
||||
"modified": len([l for l in lines if l.startswith(" M") or l.startswith("M ")]),
|
||||
"staged": len([l for l in lines if l.startswith("M ") or l.startswith("A ")]),
|
||||
"untracked": len([l for l in lines if l.startswith("??")]),
|
||||
}
|
||||
except (subprocess.SubprocessError, FileNotFoundError):
|
||||
pass
|
||||
return {"modified": 0, "staged": 0, "untracked": 0}
|
||||
|
||||
def get_project_context_commands(self, project: Project) -> List[str]:
|
||||
commands = []
|
||||
if project.git_remote:
|
||||
commands.append(f"git remote get-url origin # {project.git_remote}")
|
||||
for tech in project.tech_stack:
|
||||
if tech == "Python":
|
||||
commands.append("python -m pip install -r requirements.txt")
|
||||
elif tech == "Node.js":
|
||||
commands.append("npm install")
|
||||
elif tech == "Docker":
|
||||
commands.append("docker-compose up -d")
|
||||
return commands
|
||||
|
||||
def extract_context_from_path(self, path: str) -> Dict[str, Any]:
|
||||
project = self.project_detector.detect(path)
|
||||
return {
|
||||
"path": path,
|
||||
"project": project.to_dict() if project else None,
|
||||
"exists": os.path.exists(path),
|
||||
"is_directory": os.path.isdir(path) if os.path.exists(path) else False,
|
||||
}
|
||||
430
cli_memory/database.py
Normal file
430
cli_memory/database.py
Normal file
@@ -0,0 +1,430 @@
|
||||
import os
|
||||
import sqlite3
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional, List, Dict, Any, Generator
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
from .models import Project, Command, Workflow, Suggestion, Pattern
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self, config: Optional[Config] = None):
|
||||
self.config = config or Config()
|
||||
self.db_path = self._get_db_path()
|
||||
self._ensure_db_exists()
|
||||
|
||||
def _get_db_path(self) -> str:
|
||||
db_path = self.config.get("database.path", "~/.cli_memory/database.db")
|
||||
return os.path.expanduser(db_path)
|
||||
|
||||
def _ensure_db_exists(self) -> None:
|
||||
db_dir = os.path.dirname(self.db_path)
|
||||
if db_dir and not os.path.exists(db_dir):
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
self._init_schema()
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self) -> Generator[sqlite3.Connection, None, None]:
|
||||
conn = sqlite3.connect(self.db_path, timeout=self.config.get("database.timeout", 30.0))
|
||||
conn.row_factory = sqlite3.Row
|
||||
if self.config.get("database.wal_mode", True):
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
logger.error(f"Database error: {e}")
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _init_schema(self) -> None:
|
||||
with self.get_connection() as conn:
|
||||
conn.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
path TEXT NOT NULL UNIQUE,
|
||||
git_remote TEXT,
|
||||
tech_stack TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS commands (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
workflow_id INTEGER,
|
||||
project_id INTEGER,
|
||||
command TEXT NOT NULL,
|
||||
command_type TEXT NOT NULL,
|
||||
exit_code INTEGER,
|
||||
duration_ms INTEGER,
|
||||
working_directory TEXT NOT NULL,
|
||||
timestamp TEXT NOT NULL,
|
||||
tags TEXT,
|
||||
metadata TEXT,
|
||||
FOREIGN KEY (workflow_id) REFERENCES workflows(id),
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS workflows (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
is_automated INTEGER DEFAULT 0,
|
||||
pattern_confidence REAL DEFAULT 0.0,
|
||||
usage_count INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS patterns (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER,
|
||||
name TEXT NOT NULL,
|
||||
command_sequence TEXT NOT NULL,
|
||||
occurrences INTEGER DEFAULT 1,
|
||||
confidence REAL DEFAULT 0.0,
|
||||
created_at TEXT NOT NULL,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS suggestions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER,
|
||||
command TEXT NOT NULL,
|
||||
context TEXT,
|
||||
confidence REAL DEFAULT 0.0,
|
||||
frequency INTEGER DEFAULT 1,
|
||||
last_used TEXT,
|
||||
pattern_id INTEGER,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id),
|
||||
FOREIGN KEY (pattern_id) REFERENCES patterns(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_commands_project ON commands(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_commands_workflow ON commands(workflow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_commands_timestamp ON commands(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_workflows_project ON workflows(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_patterns_project ON patterns(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_suggestions_project ON suggestions(project_id);
|
||||
""")
|
||||
|
||||
def create_project(self, project: Project) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT OR REPLACE INTO projects
|
||||
(name, path, git_remote, tech_stack, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
project.name,
|
||||
project.path,
|
||||
project.git_remote,
|
||||
",".join(project.tech_stack) if project.tech_stack else "",
|
||||
project.created_at.isoformat(),
|
||||
project.updated_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_project(self, project_id: int) -> Optional[Project]:
|
||||
with self.get_connection() as conn:
|
||||
row = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id,)).fetchone()
|
||||
return self._row_to_project(row) if row else None
|
||||
|
||||
def get_project_by_path(self, path: str) -> Optional[Project]:
|
||||
with self.get_connection() as conn:
|
||||
row = conn.execute("SELECT * FROM projects WHERE path = ?", (path,)).fetchone()
|
||||
return self._row_to_project(row) if row else None
|
||||
|
||||
def _row_to_project(self, row: sqlite3.Row) -> Project:
|
||||
return Project(
|
||||
id=row["id"],
|
||||
name=row["name"],
|
||||
path=row["path"],
|
||||
git_remote=row["git_remote"],
|
||||
tech_stack=row["tech_stack"].split(",") if row["tech_stack"] else [],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
)
|
||||
|
||||
def get_all_projects(self) -> List[Project]:
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute("SELECT * FROM projects ORDER BY updated_at DESC").fetchall()
|
||||
return [self._row_to_project(row) for row in rows]
|
||||
|
||||
def create_command(self, command: Command) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO commands
|
||||
(workflow_id, project_id, command, command_type, exit_code,
|
||||
duration_ms, working_directory, timestamp, tags, metadata)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
command.workflow_id,
|
||||
command.project_id,
|
||||
command.command,
|
||||
command.command_type.value,
|
||||
command.exit_code,
|
||||
command.duration_ms,
|
||||
command.working_directory,
|
||||
command.timestamp.isoformat(),
|
||||
",".join(command.tags) if command.tags else "",
|
||||
str(command.metadata) if command.metadata else "",
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_commands(
|
||||
self,
|
||||
project_id: Optional[int] = None,
|
||||
workflow_id: Optional[int] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> List[Command]:
|
||||
query = "SELECT * FROM commands WHERE 1=1"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " AND project_id = ?"
|
||||
params.append(project_id)
|
||||
if workflow_id:
|
||||
query += " AND workflow_id = ?"
|
||||
params.append(workflow_id)
|
||||
query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
return [self._row_to_command(row) for row in rows]
|
||||
|
||||
def _row_to_command(self, row: sqlite3.Row) -> Command:
|
||||
tags = row["tags"].split(",") if row["tags"] else []
|
||||
try:
|
||||
metadata = eval(row["metadata"]) if row["metadata"] else {}
|
||||
except Exception:
|
||||
metadata = {}
|
||||
return Command(
|
||||
id=row["id"],
|
||||
workflow_id=row["workflow_id"],
|
||||
project_id=row["project_id"],
|
||||
command=row["command"],
|
||||
command_type=row["command_type"],
|
||||
exit_code=row["exit_code"],
|
||||
duration_ms=row["duration_ms"],
|
||||
working_directory=row["working_directory"],
|
||||
timestamp=datetime.fromisoformat(row["timestamp"]),
|
||||
tags=tags,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
def create_workflow(self, workflow: Workflow) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO workflows
|
||||
(project_id, name, description, created_at, updated_at,
|
||||
is_automated, pattern_confidence, usage_count)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
workflow.project_id,
|
||||
workflow.name,
|
||||
workflow.description,
|
||||
workflow.created_at.isoformat(),
|
||||
workflow.updated_at.isoformat(),
|
||||
int(workflow.is_automated),
|
||||
workflow.pattern_confidence,
|
||||
workflow.usage_count,
|
||||
),
|
||||
)
|
||||
workflow_id = cursor.lastrowid
|
||||
for cmd in workflow.commands:
|
||||
cmd.workflow_id = workflow_id
|
||||
cmd.project_id = workflow.project_id
|
||||
self.create_command(cmd)
|
||||
return workflow_id
|
||||
|
||||
def get_workflow(self, workflow_id: int) -> Optional[Workflow]:
|
||||
with self.get_connection() as conn:
|
||||
row = conn.execute("SELECT * FROM workflows WHERE id = ?", (workflow_id,)).fetchone()
|
||||
if not row:
|
||||
return None
|
||||
workflow = Workflow(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
name=row["name"],
|
||||
description=row["description"],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
is_automated=bool(row["is_automated"]),
|
||||
pattern_confidence=row["pattern_confidence"],
|
||||
usage_count=row["usage_count"],
|
||||
)
|
||||
workflow.commands = self.get_commands(workflow_id=workflow_id)
|
||||
return workflow
|
||||
|
||||
def get_all_workflows(self, project_id: Optional[int] = None) -> List[Workflow]:
|
||||
query = "SELECT * FROM workflows"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " WHERE project_id = ?"
|
||||
params.append(project_id)
|
||||
query += " ORDER BY usage_count DESC"
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
workflows = []
|
||||
for row in rows:
|
||||
workflow = Workflow(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
name=row["name"],
|
||||
description=row["description"],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
updated_at=datetime.fromisoformat(row["updated_at"]),
|
||||
is_automated=bool(row["is_automated"]),
|
||||
pattern_confidence=row["pattern_confidence"],
|
||||
usage_count=row["usage_count"],
|
||||
)
|
||||
workflow.commands = self.get_commands(workflow_id=workflow.id)
|
||||
workflows.append(workflow)
|
||||
return workflows
|
||||
|
||||
def update_workflow_usage(self, workflow_id: int) -> None:
|
||||
with self.get_connection() as conn:
|
||||
conn.execute(
|
||||
"UPDATE workflows SET usage_count = usage_count + 1, updated_at = ? WHERE id = ?",
|
||||
(datetime.utcnow().isoformat(), workflow_id),
|
||||
)
|
||||
|
||||
def create_pattern(self, pattern: Pattern) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO patterns
|
||||
(project_id, name, command_sequence, occurrences, confidence, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
pattern.project_id,
|
||||
pattern.name,
|
||||
",".join(pattern.command_sequence),
|
||||
pattern.occurrences,
|
||||
pattern.confidence,
|
||||
pattern.created_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_patterns(self, project_id: Optional[int] = None) -> List[Pattern]:
|
||||
query = "SELECT * FROM patterns"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " WHERE project_id = ?"
|
||||
params.append(project_id)
|
||||
query += " ORDER BY occurrences DESC"
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
return [self._row_to_pattern(row) for row in rows]
|
||||
|
||||
def _row_to_pattern(self, row: sqlite3.Row) -> Pattern:
|
||||
return Pattern(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
name=row["name"],
|
||||
command_sequence=row["command_sequence"].split(",") if row["command_sequence"] else [],
|
||||
occurrences=row["occurrences"],
|
||||
confidence=row["confidence"],
|
||||
created_at=datetime.fromisoformat(row["created_at"]),
|
||||
)
|
||||
|
||||
def create_suggestion(self, suggestion: Suggestion) -> int:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO suggestions
|
||||
(project_id, command, context, confidence, frequency, last_used, pattern_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
suggestion.project_id,
|
||||
suggestion.command,
|
||||
suggestion.context,
|
||||
suggestion.confidence,
|
||||
suggestion.frequency,
|
||||
suggestion.last_used.isoformat() if suggestion.last_used else None,
|
||||
suggestion.pattern_id,
|
||||
),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_suggestions(
|
||||
self, project_id: Optional[int] = None, limit: int = 10
|
||||
) -> List[Suggestion]:
|
||||
query = "SELECT * FROM suggestions WHERE 1=1"
|
||||
params = []
|
||||
if project_id:
|
||||
query += " AND project_id = ?"
|
||||
params.append(project_id)
|
||||
query += " ORDER BY confidence DESC, frequency DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
return [self._row_to_suggestion(row) for row in rows]
|
||||
|
||||
def _row_to_suggestion(self, row: sqlite3.Row) -> Suggestion:
|
||||
return Suggestion(
|
||||
id=row["id"],
|
||||
project_id=row["project_id"],
|
||||
command=row["command"],
|
||||
context=row["context"],
|
||||
confidence=row["confidence"],
|
||||
frequency=row["frequency"],
|
||||
last_used=datetime.fromisoformat(row["last_used"]) if row["last_used"] else None,
|
||||
pattern_id=row["pattern_id"],
|
||||
)
|
||||
|
||||
def search_commands(
|
||||
self,
|
||||
query: str,
|
||||
project_id: Optional[int] = None,
|
||||
command_type: Optional[str] = None,
|
||||
start_time: Optional[datetime] = None,
|
||||
end_time: Optional[datetime] = None,
|
||||
limit: int = 50,
|
||||
) -> List[Command]:
|
||||
sql = "SELECT * FROM commands WHERE command LIKE ?"
|
||||
params = [f"%{query}%"]
|
||||
if project_id:
|
||||
sql += " AND project_id = ?"
|
||||
params.append(project_id)
|
||||
if command_type:
|
||||
sql += " AND command_type = ?"
|
||||
params.append(command_type)
|
||||
if start_time:
|
||||
sql += " AND timestamp >= ?"
|
||||
params.append(start_time.isoformat())
|
||||
if end_time:
|
||||
sql += " AND timestamp <= ?"
|
||||
params.append(end_time.isoformat())
|
||||
sql += " ORDER BY timestamp DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
with self.get_connection() as conn:
|
||||
rows = conn.execute(sql, params).fetchall()
|
||||
return [self._row_to_command(row) for row in rows]
|
||||
|
||||
def delete_project(self, project_id: int) -> None:
|
||||
with self.get_connection() as conn:
|
||||
conn.execute("DELETE FROM commands WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM workflows WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM patterns WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM suggestions WHERE project_id = ?", (project_id,))
|
||||
conn.execute("DELETE FROM projects WHERE id = ?", (project_id,))
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
131
cli_memory/generator.py
Normal file
131
cli_memory/generator.py
Normal file
@@ -0,0 +1,131 @@
|
||||
import os
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
from .config import Config
|
||||
from .models import Workflow, Command
|
||||
from .database import Database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScriptGenerator:
|
||||
def __init__(self, config: Optional[Config] = None, db: Optional[Database] = None):
|
||||
self.config = config or Config()
|
||||
self.db = db or Database()
|
||||
|
||||
def generate_script(
|
||||
self,
|
||||
workflow: Workflow,
|
||||
name: Optional[str] = None,
|
||||
output_dir: Optional[str] = None,
|
||||
include_error_handling: bool = True,
|
||||
include_logging: bool = True,
|
||||
) -> str:
|
||||
output_dir = output_dir or self.config.get("script.output_dir", "~/.cli_memory/scripts")
|
||||
output_dir = os.path.expanduser(output_dir)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
script_name = name or workflow.name or f"workflow_{workflow.id}"
|
||||
safe_name = "".join(c if c.isalnum() or c in "_-" else "_" for c in script_name)
|
||||
script_path = os.path.join(output_dir, f"{safe_name}.sh")
|
||||
|
||||
script_content = self._build_script_content(
|
||||
workflow,
|
||||
include_error_handling=include_error_handling,
|
||||
include_logging=include_logging,
|
||||
)
|
||||
|
||||
with open(script_path, "w") as f:
|
||||
f.write(script_content)
|
||||
|
||||
os.chmod(script_path, 0o755)
|
||||
|
||||
logger.info(f"Generated script: {script_path}")
|
||||
return script_path
|
||||
|
||||
def _build_script_content(
|
||||
self,
|
||||
workflow: Workflow,
|
||||
include_error_handling: bool = True,
|
||||
include_logging: bool = True,
|
||||
) -> str:
|
||||
lines = ["#!/bin/bash", ""]
|
||||
|
||||
if include_logging:
|
||||
lines.extend([
|
||||
f"# Generated by CLI Command Memory on {datetime.utcnow().isoformat()}",
|
||||
f"# Workflow: {workflow.name}",
|
||||
f"# Description: {workflow.description or 'No description'}",
|
||||
"",
|
||||
"set -e",
|
||||
"",
|
||||
"SCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"",
|
||||
"LOG_FILE=\"${SCRIPT_DIR}/logs/$(date +%Y%m%d_%H%M%S).log\"",
|
||||
"",
|
||||
"log() {",
|
||||
' echo "[$(date +%Y-%m-%d\\ %H:%M:%S)] $1\" | tee -a "$LOG_FILE"',
|
||||
"}",
|
||||
"",
|
||||
])
|
||||
else:
|
||||
lines.extend(["#!/bin/bash", "set -e", ""])
|
||||
|
||||
lines.append("# Workflow commands")
|
||||
lines.append("# Total commands: " + str(len(workflow.commands)))
|
||||
lines.append("")
|
||||
|
||||
for i, cmd in enumerate(workflow.commands, 1):
|
||||
comment = f"# Command {i}: {cmd.command[:50]}..." if len(cmd.command) > 50 else f"# Command {i}: {cmd.command}"
|
||||
lines.append(comment)
|
||||
|
||||
if include_error_handling:
|
||||
lines.append(f"log 'Executing: {cmd.command.replace(chr(39), chr(39)+chr(92)+chr(39))}'")
|
||||
lines.append(f"{cmd.command} || {{ log 'ERROR: Command failed: {cmd.command}'; exit 1; }}")
|
||||
else:
|
||||
lines.append(cmd.command)
|
||||
lines.append("")
|
||||
|
||||
if include_logging:
|
||||
lines.extend([
|
||||
"log 'Workflow completed successfully'",
|
||||
"exit 0",
|
||||
])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def generate_from_commands(
|
||||
self,
|
||||
commands: List[Command],
|
||||
name: str,
|
||||
output_dir: Optional[str] = None,
|
||||
) -> str:
|
||||
workflow = Workflow(
|
||||
name=name,
|
||||
description="Generated from command history",
|
||||
commands=commands,
|
||||
)
|
||||
return self.generate_script(workflow, name, output_dir)
|
||||
|
||||
def list_generated_scripts(self, output_dir: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
output_dir = output_dir or self.config.get("script.output_dir", "~/.cli_memory/scripts")
|
||||
output_dir = os.path.expanduser(output_dir)
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
return []
|
||||
|
||||
scripts = []
|
||||
for filename in os.listdir(output_dir):
|
||||
if filename.endswith(".sh"):
|
||||
filepath = os.path.join(output_dir, filename)
|
||||
stat = os.stat(filepath)
|
||||
scripts.append({
|
||||
"name": filename,
|
||||
"path": filepath,
|
||||
"size": stat.st_size,
|
||||
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat(),
|
||||
"executable": os.access(filepath, os.X_OK),
|
||||
})
|
||||
|
||||
return sorted(scripts, key=lambda s: s["modified"], reverse=True)
|
||||
81
cli_memory/history.py
Normal file
81
cli_memory/history.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
from .config import Config
|
||||
from .models import Command, Project, Workflow
|
||||
from .database import Database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HistoryManager:
|
||||
def __init__(self, config: Optional[Config] = None, db: Optional[Database] = None):
|
||||
self.config = config or Config()
|
||||
self.db = db or Database()
|
||||
|
||||
def get_recent_commands(
|
||||
self, limit: int = 20, project_id: Optional[int] = None
|
||||
) -> List[Command]:
|
||||
return self.db.get_commands(project_id=project_id, limit=limit)
|
||||
|
||||
def get_commands_by_time_range(
|
||||
self,
|
||||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
project_id: Optional[int] = None,
|
||||
) -> List[Command]:
|
||||
return self.db.search_commands(
|
||||
"",
|
||||
project_id=project_id,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
limit=1000,
|
||||
)
|
||||
|
||||
def get_statistics(self) -> Dict[str, Any]:
|
||||
projects = self.db.get_all_projects()
|
||||
workflows = self.db.get_all_workflows()
|
||||
commands = self.db.get_commands(limit=10000)
|
||||
|
||||
return {
|
||||
"total_projects": len(projects),
|
||||
"total_workflows": len(workflows),
|
||||
"total_commands": len(commands),
|
||||
"recent_commands_24h": len(
|
||||
[c for c in commands if c.timestamp >= datetime.utcnow() - timedelta(days=1)]
|
||||
),
|
||||
}
|
||||
|
||||
def clear_history(
|
||||
self, project_id: Optional[int] = None, before_date: Optional[datetime] = None
|
||||
) -> int:
|
||||
if project_id:
|
||||
self.db.delete_project(project_id)
|
||||
return 1
|
||||
logger.warning("Clearing all history is not implemented in demo")
|
||||
return 0
|
||||
|
||||
def export_history(self, format: str = "json") -> str:
|
||||
projects = self.db.get_all_projects()
|
||||
workflows = self.db.get_all_workflows()
|
||||
commands = self.db.get_commands(limit=10000)
|
||||
|
||||
data = {
|
||||
"exported_at": datetime.utcnow().isoformat(),
|
||||
"projects": [p.to_dict() for p in projects],
|
||||
"workflows": [w.to_dict() for w in workflows],
|
||||
"commands": [c.to_dict() for c in commands],
|
||||
}
|
||||
|
||||
if format == "json":
|
||||
import json
|
||||
return json.dumps(data, indent=2, default=str)
|
||||
elif format == "yaml":
|
||||
import yaml
|
||||
return yaml.dump(data, default_flow_style=False)
|
||||
|
||||
return str(data)
|
||||
|
||||
def close(self) -> None:
|
||||
self.db.close()
|
||||
184
cli_memory/models.py
Normal file
184
cli_memory/models.py
Normal file
@@ -0,0 +1,184 @@
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class CommandType(Enum):
|
||||
GIT = "git"
|
||||
DOCKER = "docker"
|
||||
BUILD = "build"
|
||||
TEST = "test"
|
||||
DEPLOY = "deploy"
|
||||
FILE_OP = "file_op"
|
||||
SYSTEM = "system"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Project:
|
||||
id: Optional[int] = None
|
||||
name: str = ""
|
||||
path: str = ""
|
||||
git_remote: Optional[str] = None
|
||||
tech_stack: List[str] = field(default_factory=list)
|
||||
created_at: datetime = field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"name": self.name,
|
||||
"path": self.path,
|
||||
"git_remote": self.git_remote,
|
||||
"tech_stack": self.tech_stack,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Project":
|
||||
return cls(
|
||||
id=data.get("id"),
|
||||
name=data.get("name", ""),
|
||||
path=data.get("path", ""),
|
||||
git_remote=data.get("git_remote"),
|
||||
tech_stack=data.get("tech_stack", []),
|
||||
created_at=datetime.fromisoformat(data["created_at"]) if data.get("created_at") else datetime.utcnow(),
|
||||
updated_at=datetime.fromisoformat(data["updated_at"]) if data.get("updated_at") else datetime.utcnow(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Command:
|
||||
id: Optional[int] = None
|
||||
workflow_id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
command: str = ""
|
||||
command_type: CommandType = CommandType.OTHER
|
||||
exit_code: Optional[int] = None
|
||||
duration_ms: Optional[int] = None
|
||||
working_directory: str = ""
|
||||
timestamp: datetime = field(default_factory=datetime.utcnow)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"workflow_id": self.workflow_id,
|
||||
"project_id": self.project_id,
|
||||
"command": self.command,
|
||||
"command_type": self.command_type.value,
|
||||
"exit_code": self.exit_code,
|
||||
"duration_ms": self.duration_ms,
|
||||
"working_directory": self.working_directory,
|
||||
"timestamp": self.timestamp.isoformat(),
|
||||
"tags": self.tags,
|
||||
"metadata": self.metadata,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Command":
|
||||
return cls(
|
||||
id=data.get("id"),
|
||||
workflow_id=data.get("workflow_id"),
|
||||
project_id=data.get("project_id"),
|
||||
command=data.get("command", ""),
|
||||
command_type=CommandType(data.get("command_type", "other")),
|
||||
exit_code=data.get("exit_code"),
|
||||
duration_ms=data.get("duration_ms"),
|
||||
working_directory=data.get("working_directory", ""),
|
||||
timestamp=datetime.fromisoformat(data["timestamp"]) if data.get("timestamp") else datetime.utcnow(),
|
||||
tags=data.get("tags", []),
|
||||
metadata=data.get("metadata", {}),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Workflow:
|
||||
id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
name: str = ""
|
||||
description: str = ""
|
||||
commands: List[Command] = field(default_factory=list)
|
||||
created_at: datetime = field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
is_automated: bool = False
|
||||
pattern_confidence: float = 0.0
|
||||
usage_count: int = 0
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"project_id": self.project_id,
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"commands": [c.to_dict() for c in self.commands],
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
"is_automated": self.is_automated,
|
||||
"pattern_confidence": self.pattern_confidence,
|
||||
"usage_count": self.usage_count,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Workflow":
|
||||
return cls(
|
||||
id=data.get("id"),
|
||||
project_id=data.get("project_id"),
|
||||
name=data.get("name", ""),
|
||||
description=data.get("description", ""),
|
||||
commands=[Command.from_dict(c) for c in data.get("commands", [])],
|
||||
created_at=datetime.fromisoformat(data["created_at"]) if data.get("created_at") else datetime.utcnow(),
|
||||
updated_at=datetime.fromisoformat(data["updated_at"]) if data.get("updated_at") else datetime.utcnow(),
|
||||
is_automated=data.get("is_automated", False),
|
||||
pattern_confidence=data.get("pattern_confidence", 0.0),
|
||||
usage_count=data.get("usage_count", 0),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Suggestion:
|
||||
id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
command: str = ""
|
||||
context: str = ""
|
||||
confidence: float = 0.0
|
||||
frequency: int = 0
|
||||
last_used: Optional[datetime] = None
|
||||
pattern_id: Optional[int] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"project_id": self.project_id,
|
||||
"command": self.command,
|
||||
"context": self.context,
|
||||
"confidence": self.confidence,
|
||||
"frequency": self.frequency,
|
||||
"last_used": self.last_used.isoformat() if self.last_used else None,
|
||||
"pattern_id": self.pattern_id,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Pattern:
|
||||
id: Optional[int] = None
|
||||
project_id: Optional[int] = None
|
||||
name: str = ""
|
||||
command_sequence: List[str] = field(default_factory=list)
|
||||
occurrences: int = 0
|
||||
confidence: float = 0.0
|
||||
created_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"project_id": self.project_id,
|
||||
"name": self.name,
|
||||
"command_sequence": self.command_sequence,
|
||||
"occurrences": self.occurrences,
|
||||
"confidence": self.confidence,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
}
|
||||
107
cli_memory/patterns.py
Normal file
107
cli_memory/patterns.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import logging
|
||||
from typing import Optional, List, Dict, Any
|
||||
from collections import Counter
|
||||
from datetime import datetime
|
||||
|
||||
from .config import Config
|
||||
from .models import Command, Pattern, Project
|
||||
from .database import Database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PatternDetector:
|
||||
def __init__(self, config: Optional[Config] = None, db: Optional[Database] = None):
|
||||
self.config = config or Config()
|
||||
self.db = db or Database()
|
||||
|
||||
def detect_patterns(
|
||||
self, commands: List[Command], project_id: Optional[int] = None
|
||||
) -> List[Pattern]:
|
||||
min_len = self.config.get("patterns.min_sequence_length", 3)
|
||||
min_occ = self.config.get("patterns.min_occurrences", 2)
|
||||
max_len = self.config.get("patterns.max_pattern_length", 10)
|
||||
|
||||
sorted_commands = sorted(commands, key=lambda c: c.timestamp)
|
||||
sequences = self._extract_sequences(sorted_commands, min_len, max_len)
|
||||
pattern_counts = Counter(tuple(seq) for seq_list in sequences for seq in seq_list)
|
||||
|
||||
patterns = []
|
||||
for seq, count in pattern_counts.items():
|
||||
if count >= min_occ:
|
||||
confidence = min(count / 5.0, 1.0)
|
||||
pattern = Pattern(
|
||||
project_id=project_id,
|
||||
name=self._generate_pattern_name(seq),
|
||||
command_sequence=list(seq),
|
||||
occurrences=count,
|
||||
confidence=confidence,
|
||||
created_at=datetime.utcnow(),
|
||||
)
|
||||
patterns.append(pattern)
|
||||
|
||||
return patterns
|
||||
|
||||
def _extract_sequences(
|
||||
self, commands: List[Command], min_len: int, max_len: int
|
||||
) -> List[List[str]]:
|
||||
sequences = []
|
||||
cmd_strings = [c.command for c in commands]
|
||||
|
||||
for length in range(min_len, max_len + 1):
|
||||
for i in range(len(cmd_strings) - length + 1):
|
||||
seq = cmd_strings[i : i + length]
|
||||
sequences.append([seq])
|
||||
|
||||
return sequences
|
||||
|
||||
def _generate_pattern_name(self, sequence: tuple) -> str:
|
||||
first_cmd = sequence[0].split()[0] if sequence else "pattern"
|
||||
return f"{first_cmd}-sequence-{len(sequence)}"
|
||||
|
||||
def find_similar_patterns(
|
||||
self, pattern: Pattern, patterns: List[Pattern]
|
||||
) -> List[Pattern]:
|
||||
similar = []
|
||||
threshold = self.config.get("patterns.similarity_threshold", 0.8)
|
||||
|
||||
for other in patterns:
|
||||
if other.id == pattern.id:
|
||||
continue
|
||||
similarity = self._calculate_similarity(pattern, other)
|
||||
if similarity >= threshold:
|
||||
similar.append(other)
|
||||
|
||||
return similar
|
||||
|
||||
def _calculate_similarity(self, p1: Pattern, p2: Pattern) -> float:
|
||||
if not p1.command_sequence or not p2.command_sequence:
|
||||
return 0.0
|
||||
|
||||
set1 = set(p1.command_sequence)
|
||||
set2 = set(p2.command_sequence)
|
||||
|
||||
intersection = len(set1 & set2)
|
||||
union = len(set1 | set2)
|
||||
|
||||
if union == 0:
|
||||
return 0.0
|
||||
|
||||
return intersection / union
|
||||
|
||||
def analyze_workflow_patterns(
|
||||
self, project_id: Optional[int] = None
|
||||
) -> Dict[str, Any]:
|
||||
commands = self.db.get_commands(project_id=project_id, limit=10000)
|
||||
workflows = self.db.get_all_workflows(project_id)
|
||||
|
||||
patterns = self.detect_patterns(commands, project_id)
|
||||
|
||||
return {
|
||||
"total_patterns": len(patterns),
|
||||
"high_confidence_patterns": sum(
|
||||
1 for p in patterns if p.confidence >= 0.7
|
||||
),
|
||||
"patterns": [p.to_dict() for p in patterns],
|
||||
"workflow_usage": sum(w.usage_count for w in workflows),
|
||||
}
|
||||
175
cli_memory/playback.py
Normal file
175
cli_memory/playback.py
Normal file
@@ -0,0 +1,175 @@
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any, Callable
|
||||
|
||||
from .config import Config
|
||||
from .models import Workflow, Command
|
||||
from .database import Database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowPlayback:
|
||||
def __init__(self, config: Optional[Config] = None, db: Optional[Database] = None):
|
||||
self.config = config or Config()
|
||||
self.db = db or Database()
|
||||
|
||||
def playback(
|
||||
self,
|
||||
workflow_id: int,
|
||||
speed: float = 1.0,
|
||||
confirm_each: bool = False,
|
||||
dry_run: bool = False,
|
||||
on_command: Optional[Callable[[Command, int], None]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
workflow = self.db.get_workflow(workflow_id)
|
||||
if not workflow:
|
||||
raise ValueError(f"Workflow {workflow_id} not found")
|
||||
|
||||
result = {
|
||||
"workflow_id": workflow_id,
|
||||
"workflow_name": workflow.name,
|
||||
"total_commands": len(workflow.commands),
|
||||
"executed": 0,
|
||||
"succeeded": 0,
|
||||
"failed": 0,
|
||||
"skipped": 0,
|
||||
"start_time": datetime.utcnow().isoformat(),
|
||||
"end_time": None,
|
||||
"commands": [],
|
||||
}
|
||||
|
||||
for i, cmd in enumerate(workflow.commands):
|
||||
if self.config.get("playback.pause_on_error", True) and result["failed"] > 0:
|
||||
result["skipped"] += 1
|
||||
result["commands"].append({
|
||||
"index": i,
|
||||
"command": cmd.command,
|
||||
"status": "skipped",
|
||||
"reason": "Previous command failed",
|
||||
})
|
||||
continue
|
||||
|
||||
if on_command:
|
||||
on_command(cmd, i)
|
||||
|
||||
if confirm_each:
|
||||
response = input(f"Execute '{cmd.command}'? [y/n/s/q]: ")
|
||||
if response.lower() == "q":
|
||||
break
|
||||
elif response.lower() == "s":
|
||||
result["skipped"] += 1
|
||||
result["commands"].append({
|
||||
"index": i,
|
||||
"command": cmd.command,
|
||||
"status": "skipped",
|
||||
"reason": "User skipped",
|
||||
})
|
||||
continue
|
||||
elif response.lower() != "y":
|
||||
result["skipped"] += 1
|
||||
continue
|
||||
|
||||
result["executed"] += 1
|
||||
|
||||
if dry_run:
|
||||
result["succeeded"] += 1
|
||||
result["commands"].append({
|
||||
"index": i,
|
||||
"command": cmd.command,
|
||||
"status": "dry_run_success",
|
||||
})
|
||||
delay = 1.0 / speed if speed > 0 else 0
|
||||
time.sleep(delay)
|
||||
else:
|
||||
try:
|
||||
exit_code = self._execute_command(cmd)
|
||||
if exit_code == 0:
|
||||
result["succeeded"] += 1
|
||||
status = "success"
|
||||
else:
|
||||
result["failed"] += 1
|
||||
status = f"failed_exit_code_{exit_code}"
|
||||
result["commands"].append({
|
||||
"index": i,
|
||||
"command": cmd.command,
|
||||
"status": status,
|
||||
"exit_code": exit_code,
|
||||
})
|
||||
except Exception as e:
|
||||
result["failed"] += 1
|
||||
result["commands"].append({
|
||||
"index": i,
|
||||
"command": cmd.command,
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
})
|
||||
|
||||
result["end_time"] = datetime.utcnow().isoformat()
|
||||
self.db.update_workflow_usage(workflow_id)
|
||||
|
||||
return result
|
||||
|
||||
def _execute_command(self, command: Command) -> int:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command.command,
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
timeout=300,
|
||||
)
|
||||
return result.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
return -1
|
||||
except Exception:
|
||||
return -2
|
||||
|
||||
def preview_workflow(
|
||||
self, workflow_id: int, max_commands: int = 50
|
||||
) -> List[Dict[str, Any]]:
|
||||
workflow = self.db.get_workflow(workflow_id)
|
||||
if not workflow:
|
||||
raise ValueError(f"Workflow {workflow_id} not found")
|
||||
|
||||
preview = []
|
||||
for i, cmd in enumerate(workflow.commands[:max_commands]):
|
||||
preview.append({
|
||||
"index": i,
|
||||
"command": cmd.command,
|
||||
"working_directory": cmd.working_directory,
|
||||
"timestamp": cmd.timestamp.isoformat(),
|
||||
"type": cmd.command_type.value,
|
||||
})
|
||||
|
||||
if len(workflow.commands) > max_commands:
|
||||
preview.append({
|
||||
"index": max_commands,
|
||||
"command": f"... and {len(workflow.commands) - max_commands} more commands",
|
||||
"working_directory": "",
|
||||
"timestamp": "",
|
||||
"type": "info",
|
||||
})
|
||||
|
||||
return preview
|
||||
|
||||
def simulate_workflow(self, workflow_id: int) -> Dict[str, Any]:
|
||||
workflow = self.db.get_workflow(workflow_id)
|
||||
if not workflow:
|
||||
raise ValueError(f"Workflow {workflow_id} not found")
|
||||
|
||||
return {
|
||||
"workflow_id": workflow_id,
|
||||
"name": workflow.name,
|
||||
"total_commands": len(workflow.commands),
|
||||
"estimated_duration_seconds": sum(
|
||||
cmd.duration_ms or 1000 for cmd in workflow.commands
|
||||
)
|
||||
/ 1000.0,
|
||||
"command_types": list(set(cmd.command_type.value for cmd in workflow.commands)),
|
||||
"working_directories": list(
|
||||
set(cmd.working_directory for cmd in workflow.commands)
|
||||
),
|
||||
}
|
||||
99
cli_memory/project.py
Normal file
99
cli_memory/project.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
from .models import Project
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProjectDetector:
|
||||
def __init__(self, config: Optional[Config] = None):
|
||||
self.config = config or Config()
|
||||
self._project_cache: Dict[str, Project] = {}
|
||||
|
||||
def detect(self, path: Optional[str] = None) -> Optional[Project]:
|
||||
search_path = Path(path or os.getcwd()).resolve()
|
||||
cache_key = str(search_path)
|
||||
if cache_key in self._project_cache:
|
||||
return self._project_cache[cache_key]
|
||||
|
||||
for parent in [search_path] + list(search_path.parents):
|
||||
git_path = parent / ".git"
|
||||
if git_path.exists() and git_path.is_dir():
|
||||
project = self._create_project_from_git(parent)
|
||||
if project:
|
||||
self._project_cache[cache_key] = project
|
||||
return project
|
||||
|
||||
if self.config.get("project.auto_detect_git", True):
|
||||
return None
|
||||
return self._create_project(search_path)
|
||||
|
||||
def _create_project_from_git(self, path: Path) -> Optional[Project]:
|
||||
project = Project(
|
||||
name=path.name,
|
||||
path=str(path),
|
||||
git_remote=self._get_git_remote(path),
|
||||
tech_stack=self._detect_tech_stack(path),
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow(),
|
||||
)
|
||||
return project
|
||||
|
||||
def _create_project(self, path: Path) -> Project:
|
||||
return Project(
|
||||
name=path.name,
|
||||
path=str(path),
|
||||
tech_stack=self._detect_tech_stack(path),
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow(),
|
||||
)
|
||||
|
||||
def _get_git_remote(self, path: Path) -> Optional[str]:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "remote", "get-url", "origin"],
|
||||
cwd=str(path),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=5,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
remote = result.stdout.strip()
|
||||
remote = re.sub(r"\.git$", "", remote)
|
||||
return remote
|
||||
except (subprocess.SubprocessError, FileNotFoundError):
|
||||
pass
|
||||
return None
|
||||
|
||||
def _detect_tech_stack(self, path: Path) -> List[str]:
|
||||
tech_stack = []
|
||||
files = list(path.iterdir())
|
||||
file_names = [f.name for f in files]
|
||||
|
||||
if "requirements.txt" in file_names or "setup.py" in file_names or "pyproject.toml" in file_names:
|
||||
tech_stack.append("Python")
|
||||
if "package.json" in file_names or "yarn.lock" in file_names:
|
||||
tech_stack.append("Node.js")
|
||||
if "Cargo.toml" in file_names:
|
||||
tech_stack.append("Rust")
|
||||
if "go.mod" in file_names:
|
||||
tech_stack.append("Go")
|
||||
if "pom.xml" in file_names or "build.gradle" in file_names:
|
||||
tech_stack.append("Java")
|
||||
if "Dockerfile" in file_names or "docker-compose.yml" in file_names:
|
||||
tech_stack.append("Docker")
|
||||
if "kubectl" in file_names or any("kube" in f for f in file_names):
|
||||
tech_stack.append("Kubernetes")
|
||||
if ".gitignore" in file_names:
|
||||
tech_stack.append("Git")
|
||||
|
||||
return tech_stack
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
self._project_cache.clear()
|
||||
121
cli_memory/recorder.py
Normal file
121
cli_memory/recorder.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
from .models import Command, CommandType, Project
|
||||
from .project import ProjectDetector
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CommandRecorder:
|
||||
def __init__(self, config: Optional[Config] = None):
|
||||
self.config = config or Config()
|
||||
self.project_detector = ProjectDetector()
|
||||
self._current_project: Optional[Project] = None
|
||||
|
||||
def _classify_command(self, command: str) -> CommandType:
|
||||
cmd_lower = command.lower().strip()
|
||||
if cmd_lower.startswith("git "):
|
||||
return CommandType.GIT
|
||||
if cmd_lower.startswith("docker ") or cmd_lower.startswith("docker-compose "):
|
||||
return CommandType.DOCKER
|
||||
if any(cmd_lower.startswith(kw) for kw in ["make ", "cmake ", "gradlew ", "mvn ", "npm run "]):
|
||||
return CommandType.BUILD
|
||||
if any(cmd_lower.startswith(kw) for kw in ["pytest ", "npm test ", "cargo test ", "go test "]):
|
||||
return CommandType.TEST
|
||||
if any(cmd_lower.startswith(kw) for kw in ["kubectl ", "helm ", "aws ", "gcloud "]):
|
||||
return CommandType.DEPLOY
|
||||
if any(cmd_lower.startswith(kw) for kw in ["rm ", "mv ", "cp ", "mkdir ", "chmod ", "chown "]):
|
||||
return CommandType.FILE_OP
|
||||
if any(cmd_lower.startswith(kw) for kw in ["sudo ", "apt ", "yum ", "systemctl "]):
|
||||
return CommandType.SYSTEM
|
||||
return CommandType.OTHER
|
||||
|
||||
def _extract_tags(self, command: str) -> List[str]:
|
||||
tags = []
|
||||
cmd_lower = command.lower()
|
||||
if "git" in cmd_lower:
|
||||
tags.append("git")
|
||||
if "test" in cmd_lower:
|
||||
tags.append("testing")
|
||||
if any(kw in cmd_lower for kw in ["deploy", "push", "release"]):
|
||||
tags.append("deployment")
|
||||
if any(kw in cmd_lower for kw in ["debug", "log", "print"]):
|
||||
tags.append("debugging")
|
||||
return tags
|
||||
|
||||
def record_command(
|
||||
self,
|
||||
command: str,
|
||||
working_directory: Optional[str] = None,
|
||||
project_id: Optional[int] = None,
|
||||
workflow_id: Optional[int] = None,
|
||||
capture_duration: bool = True,
|
||||
capture_exit_code: bool = True,
|
||||
) -> Command:
|
||||
working_dir = working_directory or os.getcwd()
|
||||
if project_id is None:
|
||||
project = self.project_detector.detect(working_dir)
|
||||
project_id = project.id if project else None
|
||||
|
||||
exit_code = None
|
||||
duration_ms = None
|
||||
if capture_exit_code or capture_duration:
|
||||
start_time = datetime.utcnow()
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
timeout=1,
|
||||
)
|
||||
if capture_exit_code:
|
||||
exit_code = result.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
if capture_exit_code:
|
||||
exit_code = -1
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to capture command result: {e}")
|
||||
if capture_exit_code:
|
||||
exit_code = -2
|
||||
if capture_duration:
|
||||
duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
||||
|
||||
cmd = Command(
|
||||
command=command,
|
||||
command_type=self._classify_command(command),
|
||||
exit_code=exit_code,
|
||||
duration_ms=duration_ms,
|
||||
working_directory=working_dir,
|
||||
timestamp=datetime.utcnow(),
|
||||
tags=self._extract_tags(command),
|
||||
project_id=project_id,
|
||||
workflow_id=workflow_id,
|
||||
)
|
||||
|
||||
return cmd
|
||||
|
||||
def record_workflow_commands(
|
||||
self,
|
||||
commands: List[str],
|
||||
working_directory: Optional[str] = None,
|
||||
project_id: Optional[int] = None,
|
||||
) -> List[Command]:
|
||||
recorded = []
|
||||
for cmd_str in commands:
|
||||
cmd = self.record_command(cmd_str, working_directory, project_id)
|
||||
recorded.append(cmd)
|
||||
return recorded
|
||||
|
||||
def sanitize_command(self, command: str) -> str:
|
||||
command = command.strip()
|
||||
command = re.sub(r"^\\s*\\\\\\s*", "", command)
|
||||
if command.startswith("cd "):
|
||||
return command
|
||||
return command
|
||||
111
cli_memory/search.py
Normal file
111
cli_memory/search.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import re
|
||||
import logging
|
||||
from typing import Optional, List, Dict, Any
|
||||
from difflib import SequenceMatcher
|
||||
|
||||
from .config import Config
|
||||
from .models import Command, CommandType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchEngine:
|
||||
def __init__(self, config: Optional[Config] = None):
|
||||
self.config = config or Config()
|
||||
|
||||
def search_commands(
|
||||
self,
|
||||
commands: List[Command],
|
||||
query: str,
|
||||
project_id: Optional[int] = None,
|
||||
command_type: Optional[str] = None,
|
||||
start_time: Optional[Any] = None,
|
||||
end_time: Optional[Any] = None,
|
||||
limit: int = 50,
|
||||
fuzzy: bool = False,
|
||||
) -> List[Command]:
|
||||
results = []
|
||||
|
||||
for cmd in commands:
|
||||
if project_id is not None and cmd.project_id != project_id:
|
||||
continue
|
||||
if command_type is not None and cmd.command_type.value != command_type:
|
||||
continue
|
||||
|
||||
if self._matches_query(cmd.command, query, fuzzy):
|
||||
results.append(cmd)
|
||||
|
||||
if len(results) >= limit:
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
def _matches_query(self, command: str, query: str, fuzzy: bool = False) -> bool:
|
||||
if fuzzy:
|
||||
ratio = SequenceMatcher(None, command.lower(), query.lower()).ratio()
|
||||
return ratio >= self.config.get("search.fuzzy_threshold", 0.6)
|
||||
else:
|
||||
return query.lower() in command.lower()
|
||||
|
||||
def search_by_project(
|
||||
self, commands: List[Command], project_id: int
|
||||
) -> List[Command]:
|
||||
return [cmd for cmd in commands if cmd.project_id == project_id]
|
||||
|
||||
def search_by_technology(
|
||||
self, commands: List[Command], technology: str
|
||||
) -> List[Command]:
|
||||
tech_commands = {
|
||||
"git": CommandType.GIT,
|
||||
"docker": CommandType.DOCKER,
|
||||
"python": CommandType.OTHER,
|
||||
"node": CommandType.OTHER,
|
||||
}
|
||||
cmd_type = tech_commands.get(technology.lower())
|
||||
if cmd_type:
|
||||
return [cmd for cmd in commands if cmd.command_type == cmd_type]
|
||||
return []
|
||||
|
||||
def search_recent(
|
||||
self, commands: List[Command], hours: int = 24
|
||||
) -> List[Command]:
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
cutoff = datetime.utcnow() - timedelta(hours=hours)
|
||||
return [cmd for cmd in commands if cmd.timestamp >= cutoff]
|
||||
|
||||
def get_command_statistics(self, commands: List[Command]) -> Dict[str, Any]:
|
||||
if not commands:
|
||||
return {
|
||||
"total_commands": 0,
|
||||
"by_type": {},
|
||||
"avg_duration_ms": 0,
|
||||
"most_common": [],
|
||||
}
|
||||
|
||||
type_counts = {}
|
||||
duration_sum = 0
|
||||
duration_count = 0
|
||||
command_counts = {}
|
||||
|
||||
for cmd in commands:
|
||||
type_str = cmd.command_type.value
|
||||
type_counts[type_str] = type_counts.get(type_str, 0) + 1
|
||||
|
||||
if cmd.duration_ms is not None:
|
||||
duration_sum += cmd.duration_ms
|
||||
duration_count += 1
|
||||
|
||||
cmd_key = cmd.command.split()[0] if cmd.command else ""
|
||||
command_counts[cmd_key] = command_counts.get(cmd_key, 0) + 1
|
||||
|
||||
most_common = sorted(
|
||||
command_counts.items(), key=lambda x: x[1], reverse=True
|
||||
)[:10]
|
||||
|
||||
return {
|
||||
"total_commands": len(commands),
|
||||
"by_type": type_counts,
|
||||
"avg_duration_ms": duration_sum // duration_count if duration_count > 0 else 0,
|
||||
"most_common": most_common,
|
||||
}
|
||||
115
cli_memory/suggestions.py
Normal file
115
cli_memory/suggestions.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import logging
|
||||
from collections import Counter
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any, Tuple
|
||||
|
||||
from .config import Config
|
||||
from .models import Command, Suggestion, Pattern, Project
|
||||
from .database import Database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SuggestionEngine:
|
||||
def __init__(self, config: Optional[Config] = None, db: Optional[Database] = None):
|
||||
self.config = config or Config()
|
||||
self.db = db or Database()
|
||||
self._markov_chain: Dict[str, List[str]] = {}
|
||||
|
||||
def train(self, project_id: Optional[int] = None) -> None:
|
||||
commands = self.db.get_commands(project_id=project_id, limit=10000)
|
||||
self._build_markov_chain(commands)
|
||||
|
||||
def _build_markov_chain(self, commands: List[Command]) -> None:
|
||||
self._markov_chain = {}
|
||||
prev_cmd = None
|
||||
for cmd in sorted(commands, key=lambda c: c.timestamp):
|
||||
cmd_words = cmd.command.split()
|
||||
if len(cmd_words) >= 1:
|
||||
key = (prev_cmd, cmd_words[0]) if prev_cmd else (None, cmd_words[0])
|
||||
if key not in self._markov_chain:
|
||||
self._markov_chain[key] = []
|
||||
if len(cmd_words) > 1:
|
||||
self._markov_chain[key].append(cmd_words[1])
|
||||
prev_cmd = cmd_words[0]
|
||||
|
||||
def get_suggestions(
|
||||
self,
|
||||
prefix: str,
|
||||
project_id: Optional[int] = None,
|
||||
limit: int = 10,
|
||||
) -> List[Suggestion]:
|
||||
prefix_words = prefix.strip().split()
|
||||
if not prefix_words:
|
||||
return []
|
||||
|
||||
suggestions = []
|
||||
max_suggestions = self.config.get("suggestions.max_suggestions", limit)
|
||||
|
||||
recent_commands = self.db.get_commands(project_id=project_id, limit=1000)
|
||||
frequent = self._get_frequent_commands(limit, project_id)
|
||||
|
||||
for cmd_str, freq in frequent:
|
||||
if cmd_str.startswith(prefix):
|
||||
confidence = self._calculate_confidence(cmd_str, freq, recent_commands)
|
||||
if confidence >= self.config.get("suggestions.min_confidence", 0.3):
|
||||
suggestions.append(
|
||||
Suggestion(
|
||||
command=cmd_str,
|
||||
context=prefix,
|
||||
confidence=confidence,
|
||||
frequency=freq,
|
||||
)
|
||||
)
|
||||
if len(suggestions) >= max_suggestions:
|
||||
break
|
||||
|
||||
return suggestions
|
||||
|
||||
def _get_frequent_commands(
|
||||
self, limit: int, project_id: Optional[int] = None
|
||||
) -> List[Tuple[str, int]]:
|
||||
commands = self.db.get_commands(project_id=project_id, limit=5000)
|
||||
counter = Counter()
|
||||
for cmd in commands:
|
||||
counter[cmd.command] += 1
|
||||
return counter.most_common(limit)
|
||||
|
||||
def _calculate_confidence(
|
||||
self, command: str, frequency: int, recent_commands: List[Command]
|
||||
) -> float:
|
||||
recency_weight = self.config.get("suggestions.recency_weight", 0.3)
|
||||
frequency_weight = self.config.get("suggestions.frequency_weight", 0.4)
|
||||
context_weight = self.config.get("suggestions.context_weight", 0.3)
|
||||
|
||||
recency_score = 0.0
|
||||
if recent_commands:
|
||||
recent_count = sum(1 for c in recent_commands if c.command == command)
|
||||
recency_score = min(recent_count / max(len(recent_commands), 1), 1.0)
|
||||
|
||||
max_freq = 100
|
||||
frequency_score = min(frequency / max_freq, 1.0)
|
||||
|
||||
context_score = 1.0
|
||||
|
||||
return (
|
||||
recency_weight * recency_score
|
||||
+ frequency_weight * frequency_score
|
||||
+ context_weight * context_score
|
||||
)
|
||||
|
||||
def get_autocomplete_candidates(
|
||||
self, prefix: str, project_id: Optional[int] = None
|
||||
) -> List[str]:
|
||||
suggestions = self.get_suggestions(prefix, project_id, limit=5)
|
||||
return [s.command for s in suggestions]
|
||||
|
||||
def get_pattern_suggestions(
|
||||
self, project_id: Optional[int] = None
|
||||
) -> List[Pattern]:
|
||||
return self.db.get_patterns(project_id=project_id)
|
||||
|
||||
def update_suggestion_usage(self, suggestion: Suggestion) -> None:
|
||||
suggestion.frequency += 1
|
||||
suggestion.last_used = datetime.utcnow()
|
||||
self.db.create_suggestion(suggestion)
|
||||
57
pyproject.toml
Normal file
57
pyproject.toml
Normal file
@@ -0,0 +1,57 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "cli-command-memory"
|
||||
version = "1.0.0"
|
||||
description = "CLI tool that records developer terminal workflows with smart autocomplete"
|
||||
readme = "README.md"
|
||||
license = {text = "MIT"}
|
||||
requires-python = ">=3.9"
|
||||
authors = [
|
||||
{name = "Developer"}
|
||||
]
|
||||
keywords = ["cli", "workflow", "productivity", "automation"]
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
]
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
"rich>=13.7.0",
|
||||
"pyyaml>=6.0.1",
|
||||
"python-dotenv>=1.0.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4.0",
|
||||
"pytest-cov>=4.1.0",
|
||||
"ruff>=0.1.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
cli-memory = "cli_memory.cli:main"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
exclude = ["src*", "tests*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = ["test_*.py"]
|
||||
python_functions = ["test_*"]
|
||||
addopts = "-v --tb=short"
|
||||
|
||||
[tool.coverage.run]
|
||||
source = ["cli_memory"]
|
||||
omit = ["tests/*"]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = ["pragma: no cover", "def __repr__", "raise AssertionError", "raise NotImplementedError"]
|
||||
26
setup.py
Normal file
26
setup.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="cli-command-memory",
|
||||
version="1.0.0",
|
||||
description="CLI tool that records developer terminal workflows with smart autocomplete",
|
||||
author="Developer",
|
||||
author_email="dev@example.com",
|
||||
packages=["cli_memory", "cli_memory.commands"],
|
||||
python_requires=">=3.9",
|
||||
install_requires=[
|
||||
"click>=8.1.7",
|
||||
"rich>=13.7.0",
|
||||
"pyyaml>=6.0.1",
|
||||
"python-dotenv>=1.0.0",
|
||||
],
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"cli-memory=cli_memory.cli:main",
|
||||
],
|
||||
},
|
||||
include_package_data=True,
|
||||
package_data={
|
||||
"cli_memory": ["*.yaml", "*.yml"],
|
||||
},
|
||||
)
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests package for CLI Command Memory."""
|
||||
67
tests/conftest.py
Normal file
67
tests/conftest.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Test configuration and fixtures for CLI Command Memory."""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import pytest
|
||||
from cli_memory.config import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_dir():
|
||||
"""Provide a temporary directory."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield tmpdir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config():
|
||||
"""Provide a Config instance."""
|
||||
return Config()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_project_data():
|
||||
"""Provide sample project data."""
|
||||
return {
|
||||
"name": "test-project",
|
||||
"path": "/tmp/test",
|
||||
"git_remote": "https://github.com/user/test.git",
|
||||
"tech_stack": ["python", "docker"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_command_data():
|
||||
"""Provide sample command data."""
|
||||
return {
|
||||
"command": "git status",
|
||||
"command_type": "git",
|
||||
"exit_code": 0,
|
||||
"duration_ms": 50,
|
||||
"working_directory": "/home/user/project",
|
||||
"tags": ["git", "status"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_workflow_data():
|
||||
"""Provide sample workflow data."""
|
||||
return {
|
||||
"name": "Test Workflow",
|
||||
"description": "A test workflow",
|
||||
"commands": [
|
||||
{"command": "git status", "command_type": "git"},
|
||||
{"command": "git add .", "command_type": "git"},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def env_override():
|
||||
"""Provide environment variable override context."""
|
||||
original_env = os.environ.copy()
|
||||
|
||||
yield
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
51
tests/test_config.py
Normal file
51
tests/test_config.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Test configuration loading."""
|
||||
|
||||
from cli_memory.config import Config
|
||||
|
||||
|
||||
def test_config_init():
|
||||
"""Test Config initialization."""
|
||||
config = Config()
|
||||
assert config is not None
|
||||
assert config._config is not None
|
||||
|
||||
|
||||
def test_config_get():
|
||||
"""Test Config.get method."""
|
||||
config = Config()
|
||||
|
||||
assert config.get("database.path") is not None
|
||||
assert config.get("recording.max_commands_per_workflow") == 100
|
||||
assert config.get("nonexistent.key", "default") == "default"
|
||||
|
||||
|
||||
def test_config_set():
|
||||
"""Test Config.set method."""
|
||||
config = Config()
|
||||
config.set("test.value", "test_data")
|
||||
assert config.get("test.value") == "test_data"
|
||||
|
||||
|
||||
def test_config_get_home_dir():
|
||||
"""Test Config.get_home_dir."""
|
||||
config = Config()
|
||||
home = config.get_home_dir()
|
||||
assert home is not None
|
||||
assert "~" not in home
|
||||
|
||||
|
||||
def test_config_ensure_directories():
|
||||
"""Test Config.ensure_directories."""
|
||||
import os
|
||||
config = Config()
|
||||
config.ensure_directories()
|
||||
home = config.get_home_dir()
|
||||
assert os.path.exists(home)
|
||||
|
||||
|
||||
def test_config_reload():
|
||||
"""Test Config.reload."""
|
||||
config = Config()
|
||||
config.set("test.key", "value")
|
||||
config.reload()
|
||||
assert config.get("test.key") is None
|
||||
114
tests/test_generator.py
Normal file
114
tests/test_generator.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Tests for CLI Command Memory script generator."""
|
||||
|
||||
import os
|
||||
from cli_memory.generator import ScriptGenerator
|
||||
from cli_memory.models import Workflow, Command, CommandType
|
||||
|
||||
|
||||
def test_script_generator_init():
|
||||
"""Test ScriptGenerator initialization."""
|
||||
generator = ScriptGenerator()
|
||||
assert generator is not None
|
||||
|
||||
|
||||
def test_generate_script_basic():
|
||||
"""Test basic script generation."""
|
||||
generator = ScriptGenerator()
|
||||
|
||||
workflow = Workflow(
|
||||
id=1,
|
||||
name="test-workflow",
|
||||
description="A test workflow",
|
||||
commands=[
|
||||
Command(command="echo hello"),
|
||||
Command(command="echo world"),
|
||||
],
|
||||
)
|
||||
|
||||
script_path = generator.generate_script(
|
||||
workflow,
|
||||
name="test-script",
|
||||
include_error_handling=False,
|
||||
include_logging=False,
|
||||
)
|
||||
|
||||
assert script_path.endswith(".sh")
|
||||
assert os.path.exists(script_path)
|
||||
|
||||
with open(script_path) as f:
|
||||
content = f.read()
|
||||
|
||||
assert "#!/bin/bash" in content
|
||||
assert "echo hello" in content
|
||||
assert "echo world" in content
|
||||
|
||||
|
||||
def test_generate_script_with_error_handling():
|
||||
"""Test script generation with error handling."""
|
||||
generator = ScriptGenerator()
|
||||
|
||||
workflow = Workflow(
|
||||
id=1,
|
||||
name="safe-script",
|
||||
commands=[Command(command="ls -la")],
|
||||
)
|
||||
|
||||
script_path = generator.generate_script(
|
||||
workflow,
|
||||
include_error_handling=True,
|
||||
include_logging=False,
|
||||
)
|
||||
|
||||
with open(script_path) as f:
|
||||
content = f.read()
|
||||
|
||||
assert "set -e" in content
|
||||
|
||||
|
||||
def test_generate_script_with_logging():
|
||||
"""Test script generation with logging."""
|
||||
generator = ScriptGenerator()
|
||||
|
||||
workflow = Workflow(
|
||||
id=1,
|
||||
name="logged-script",
|
||||
commands=[Command(command="echo test")],
|
||||
)
|
||||
|
||||
script_path = generator.generate_script(
|
||||
workflow,
|
||||
include_error_handling=False,
|
||||
include_logging=True,
|
||||
)
|
||||
|
||||
with open(script_path) as f:
|
||||
content = f.read()
|
||||
|
||||
assert "LOG_FILE=" in content
|
||||
assert "log()" in content
|
||||
|
||||
|
||||
def test_list_generated_scripts():
|
||||
"""Test listing generated scripts."""
|
||||
generator = ScriptGenerator()
|
||||
|
||||
scripts = generator.list_generated_scripts()
|
||||
assert isinstance(scripts, list)
|
||||
|
||||
|
||||
def test_generate_from_commands():
|
||||
"""Test generating script from command list."""
|
||||
generator = ScriptGenerator()
|
||||
|
||||
commands = [
|
||||
Command(command="echo one"),
|
||||
Command(command="echo two"),
|
||||
]
|
||||
|
||||
script_path = generator.generate_from_commands(
|
||||
commands,
|
||||
name="from-commands",
|
||||
)
|
||||
|
||||
assert script_path.endswith(".sh")
|
||||
assert os.path.exists(script_path)
|
||||
161
tests/test_models.py
Normal file
161
tests/test_models.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""Tests for CLI Command Memory models."""
|
||||
|
||||
from cli_memory.models import Project, Command, Workflow, Suggestion, Pattern, CommandType
|
||||
|
||||
|
||||
def test_project_creation():
|
||||
"""Test Project model creation."""
|
||||
project = Project(
|
||||
name="test-project",
|
||||
path="/tmp/test",
|
||||
git_remote="https://github.com/user/test.git",
|
||||
tech_stack=["python", "docker"],
|
||||
)
|
||||
assert project.name == "test-project"
|
||||
assert project.path == "/tmp/test"
|
||||
assert project.git_remote == "https://github.com/user/test.git"
|
||||
assert "python" in project.tech_stack
|
||||
|
||||
|
||||
def test_project_to_dict():
|
||||
"""Test Project serialization."""
|
||||
project = Project(
|
||||
id=1,
|
||||
name="test",
|
||||
path="/path",
|
||||
git_remote="git@github.com:test.git",
|
||||
tech_stack=["go"],
|
||||
)
|
||||
data = project.to_dict()
|
||||
assert data["id"] == 1
|
||||
assert data["name"] == "test"
|
||||
assert data["tech_stack"] == ["go"]
|
||||
|
||||
|
||||
def test_project_from_dict():
|
||||
"""Test Project deserialization."""
|
||||
data = {
|
||||
"id": 5,
|
||||
"name": "from_dict",
|
||||
"path": "/some/path",
|
||||
"git_remote": None,
|
||||
"tech_stack": ["rust"],
|
||||
"created_at": "2024-01-01T00:00:00",
|
||||
"updated_at": "2024-01-01T00:00:00",
|
||||
}
|
||||
project = Project.from_dict(data)
|
||||
assert project.id == 5
|
||||
assert project.name == "from_dict"
|
||||
assert "rust" in project.tech_stack
|
||||
|
||||
|
||||
def test_command_creation():
|
||||
"""Test Command model creation."""
|
||||
cmd = Command(
|
||||
command="git status",
|
||||
command_type=CommandType.GIT,
|
||||
exit_code=0,
|
||||
duration_ms=50,
|
||||
working_directory="/home/user/project",
|
||||
)
|
||||
assert cmd.command == "git status"
|
||||
assert cmd.command_type == CommandType.GIT
|
||||
assert cmd.exit_code == 0
|
||||
|
||||
|
||||
def test_command_classification():
|
||||
"""Test command type classification."""
|
||||
from cli_memory.recorder import CommandRecorder
|
||||
recorder = CommandRecorder()
|
||||
|
||||
cmd = recorder.record_command(command="docker ps")
|
||||
assert cmd.command_type == CommandType.DOCKER
|
||||
|
||||
cmd = recorder.record_command(command="npm run build")
|
||||
assert cmd.command_type == CommandType.BUILD
|
||||
|
||||
cmd = recorder.record_command(command="pytest tests/")
|
||||
assert cmd.command_type == CommandType.TEST
|
||||
|
||||
cmd = recorder.record_command(command="kubectl deploy")
|
||||
assert cmd.command_type == CommandType.DEPLOY
|
||||
|
||||
|
||||
def test_command_to_dict():
|
||||
"""Test Command serialization."""
|
||||
cmd = Command(
|
||||
id=1,
|
||||
command="ls -la",
|
||||
command_type=CommandType.SYSTEM,
|
||||
exit_code=0,
|
||||
tags=["important", "review"],
|
||||
)
|
||||
data = cmd.to_dict()
|
||||
assert data["command"] == "ls -la"
|
||||
assert data["command_type"] == "system"
|
||||
assert "important" in data["tags"]
|
||||
|
||||
|
||||
def test_workflow_creation():
|
||||
"""Test Workflow model creation."""
|
||||
cmd1 = Command(command="git checkout -b feature")
|
||||
cmd2 = Command(command="git commit -m 'feat: add feature'")
|
||||
|
||||
workflow = Workflow(
|
||||
name="Feature Branch Workflow",
|
||||
description="Create and commit to feature branch",
|
||||
commands=[cmd1, cmd2],
|
||||
)
|
||||
assert len(workflow.commands) == 2
|
||||
assert workflow.name == "Feature Branch Workflow"
|
||||
assert workflow.is_automated is False
|
||||
|
||||
|
||||
def test_workflow_to_dict():
|
||||
"""Test Workflow serialization."""
|
||||
cmd = Command(command="echo hello")
|
||||
workflow = Workflow(
|
||||
id=1,
|
||||
name="Test Workflow",
|
||||
description="A test workflow",
|
||||
commands=[cmd],
|
||||
)
|
||||
data = workflow.to_dict()
|
||||
assert data["name"] == "Test Workflow"
|
||||
assert len(data["commands"]) == 1
|
||||
|
||||
|
||||
def test_suggestion_creation():
|
||||
"""Test Suggestion model creation."""
|
||||
suggestion = Suggestion(
|
||||
command="git push",
|
||||
context="git commit",
|
||||
confidence=0.85,
|
||||
frequency=10,
|
||||
)
|
||||
assert suggestion.command == "git push"
|
||||
assert suggestion.confidence == 0.85
|
||||
|
||||
|
||||
def test_pattern_creation():
|
||||
"""Test Pattern model creation."""
|
||||
pattern = Pattern(
|
||||
name="Git workflow pattern",
|
||||
command_sequence=["git status", "git add .", "git commit -m", "git push"],
|
||||
occurrences=5,
|
||||
confidence=0.75,
|
||||
)
|
||||
assert len(pattern.command_sequence) == 4
|
||||
assert pattern.occurrences == 5
|
||||
|
||||
|
||||
def test_command_type_enum():
|
||||
"""Test CommandType enum values."""
|
||||
assert CommandType.GIT.value == "git"
|
||||
assert CommandType.DOCKER.value == "docker"
|
||||
assert CommandType.BUILD.value == "build"
|
||||
assert CommandType.TEST.value == "test"
|
||||
assert CommandType.DEPLOY.value == "deploy"
|
||||
assert CommandType.FILE_OP.value == "file_op"
|
||||
assert CommandType.SYSTEM.value == "system"
|
||||
assert CommandType.OTHER.value == "other"
|
||||
76
tests/test_patterns.py
Normal file
76
tests/test_patterns.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Tests for CLI Command Memory pattern detection."""
|
||||
|
||||
from datetime import datetime
|
||||
from cli_memory.patterns import PatternDetector
|
||||
from cli_memory.models import Command, CommandType, Pattern
|
||||
|
||||
|
||||
class MockCommand:
|
||||
def __init__(self, id, command, timestamp=None, project_id=None):
|
||||
self.id = id
|
||||
self.command = command
|
||||
self.timestamp = timestamp or datetime.utcnow()
|
||||
self.project_id = project_id
|
||||
|
||||
|
||||
def test_pattern_detector_init():
|
||||
"""Test PatternDetector initialization."""
|
||||
detector = PatternDetector()
|
||||
assert detector is not None
|
||||
|
||||
|
||||
def test_calculate_confidence():
|
||||
"""Test confidence calculation."""
|
||||
detector = PatternDetector()
|
||||
|
||||
confidence = detector._calculate_confidence(occurrences=1, length=3)
|
||||
assert 0 < confidence <= 1.0
|
||||
|
||||
confidence = detector._calculate_confidence(occurrences=5, length=7)
|
||||
assert confidence > 0.5
|
||||
|
||||
|
||||
def test_sequence_similarity():
|
||||
"""Test sequence similarity calculation."""
|
||||
detector = PatternDetector()
|
||||
|
||||
seq1 = ["git status", "git add .", "git commit"]
|
||||
seq2 = ["git status", "git add .", "git commit"]
|
||||
|
||||
similarity = detector._calculate_similarity(
|
||||
Pattern(command_sequence=seq1),
|
||||
Pattern(command_sequence=seq2)
|
||||
)
|
||||
assert similarity == 1.0
|
||||
|
||||
|
||||
def test_detect_patterns_empty():
|
||||
"""Test pattern detection with no commands."""
|
||||
detector = PatternDetector()
|
||||
|
||||
patterns = detector.detect_patterns([])
|
||||
assert patterns == []
|
||||
|
||||
|
||||
def test_detect_patterns_with_commands():
|
||||
"""Test pattern detection with commands."""
|
||||
detector = PatternDetector()
|
||||
now = datetime.utcnow()
|
||||
|
||||
commands = [
|
||||
MockCommand(1, "git status", timestamp=now),
|
||||
MockCommand(2, "git add .", timestamp=now),
|
||||
MockCommand(3, "git commit", timestamp=now),
|
||||
]
|
||||
|
||||
patterns = detector.detect_patterns(commands, min_occurrences=2, min_length=2)
|
||||
assert isinstance(patterns, list)
|
||||
|
||||
|
||||
def test_analyze_workflow_patterns():
|
||||
"""Test workflow pattern analysis."""
|
||||
detector = PatternDetector()
|
||||
|
||||
result = detector.analyze_workflow_patterns()
|
||||
assert "total_patterns" in result
|
||||
assert "high_confidence_patterns" in result
|
||||
133
tests/test_recorder.py
Normal file
133
tests/test_recorder.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""Tests for CLI Command Memory recorder."""
|
||||
|
||||
from cli_memory.recorder import CommandRecorder
|
||||
from cli_memory.models import CommandType
|
||||
|
||||
|
||||
def test_recorder_initialization():
|
||||
"""Test recorder initializes correctly."""
|
||||
recorder = CommandRecorder()
|
||||
assert recorder._current_project is None
|
||||
|
||||
|
||||
def test_record_command():
|
||||
"""Test recording a single command."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(
|
||||
command="ls -la",
|
||||
exit_code=0,
|
||||
duration_ms=25,
|
||||
)
|
||||
assert cmd.command == "ls -la"
|
||||
assert cmd.exit_code == 0
|
||||
assert cmd.duration_ms == 25
|
||||
|
||||
|
||||
def test_record_multiple_commands():
|
||||
"""Test recording multiple commands."""
|
||||
recorder = CommandRecorder()
|
||||
recorder.record_command(command="pwd", exit_code=0)
|
||||
recorder.record_command(command="ls", exit_code=0)
|
||||
recorder.record_command(command="cd /tmp", exit_code=0)
|
||||
|
||||
assert recorder is not None
|
||||
|
||||
|
||||
def test_command_classification_git():
|
||||
"""Test git command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="git status")
|
||||
assert cmd.command_type == CommandType.GIT
|
||||
|
||||
cmd = recorder.record_command(command="git checkout main")
|
||||
assert cmd.command_type == CommandType.GIT
|
||||
|
||||
|
||||
def test_command_classification_docker():
|
||||
"""Test docker command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="docker ps")
|
||||
assert cmd.command_type == CommandType.DOCKER
|
||||
|
||||
cmd = recorder.record_command(command="docker-compose up -d")
|
||||
assert cmd.command_type == CommandType.DOCKER
|
||||
|
||||
|
||||
def test_command_classification_build():
|
||||
"""Test build command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="make build")
|
||||
assert cmd.command_type == CommandType.BUILD
|
||||
|
||||
cmd = recorder.record_command(command="npm run build")
|
||||
assert cmd.command_type == CommandType.BUILD
|
||||
|
||||
|
||||
def test_command_classification_test():
|
||||
"""Test test command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="pytest")
|
||||
assert cmd.command_type == CommandType.TEST
|
||||
|
||||
cmd = recorder.record_command(command="npm test")
|
||||
assert cmd.command_type == CommandType.TEST
|
||||
|
||||
|
||||
def test_command_classification_deploy():
|
||||
"""Test deploy command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="kubectl apply -f deploy.yaml")
|
||||
assert cmd.command_type == CommandType.DEPLOY
|
||||
|
||||
|
||||
def test_command_classification_file_op():
|
||||
"""Test file operation classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="cp file1 file2")
|
||||
assert cmd.command_type == CommandType.FILE_OP
|
||||
|
||||
cmd = recorder.record_command(command="rm -rf temp")
|
||||
assert cmd.command_type == CommandType.FILE_OP
|
||||
|
||||
|
||||
def test_command_classification_system():
|
||||
"""Test system command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="sudo apt-get update")
|
||||
assert cmd.command_type == CommandType.SYSTEM
|
||||
|
||||
|
||||
def test_command_classification_other():
|
||||
"""Test unknown command classification."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(command="my_custom_command arg1 arg2")
|
||||
assert cmd.command_type == CommandType.OTHER
|
||||
|
||||
|
||||
def test_record_with_tags():
|
||||
"""Test recording with tags."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(
|
||||
command="echo test",
|
||||
tags=["test-tag", "debug"],
|
||||
)
|
||||
assert "test-tag" in cmd.tags
|
||||
assert "debug" in cmd.tags
|
||||
|
||||
|
||||
def test_record_with_metadata():
|
||||
"""Test recording with metadata."""
|
||||
recorder = CommandRecorder()
|
||||
cmd = recorder.record_command(
|
||||
command="echo test",
|
||||
metadata={"key": "value", "count": 5},
|
||||
)
|
||||
assert cmd.metadata["key"] == "value"
|
||||
assert cmd.metadata["count"] == 5
|
||||
|
||||
|
||||
def test_sanitize_command():
|
||||
"""Test command sanitization."""
|
||||
recorder = CommandRecorder()
|
||||
sanitized = recorder.sanitize_command(" ls -la ")
|
||||
assert sanitized == "ls -la"
|
||||
83
tests/test_search.py
Normal file
83
tests/test_search.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Tests for CLI Command Memory search functionality."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from cli_memory.search import SearchEngine
|
||||
from cli_memory.models import Command, CommandType
|
||||
|
||||
|
||||
class MockCommand:
|
||||
def __init__(self, id, command, command_type, timestamp=None, project_id=None,
|
||||
exit_code=0, duration_ms=None, working_directory="/tmp"):
|
||||
self.id = id
|
||||
self.command = command
|
||||
self.command_type = command_type
|
||||
self.timestamp = timestamp or datetime.utcnow()
|
||||
self.project_id = project_id
|
||||
self.exit_code = exit_code
|
||||
self.duration_ms = duration_ms
|
||||
self.working_directory = working_directory
|
||||
|
||||
|
||||
def test_search_engine_init():
|
||||
"""Test SearchEngine initialization."""
|
||||
engine = SearchEngine()
|
||||
assert engine is not None
|
||||
|
||||
|
||||
def test_matches_query_exact():
|
||||
"""Test exact query matching."""
|
||||
engine = SearchEngine()
|
||||
assert engine._matches_query("git status", "git status")
|
||||
assert not engine._matches_query("git status", "docker ps")
|
||||
|
||||
|
||||
def test_matches_query_partial():
|
||||
"""Test partial query matching."""
|
||||
engine = SearchEngine()
|
||||
assert engine._matches_query("git status", "status")
|
||||
assert engine._matches_query("git status", "git")
|
||||
|
||||
|
||||
def test_search_commands_by_type():
|
||||
"""Test search filtering by command type."""
|
||||
commands = [
|
||||
MockCommand(1, "git status", CommandType.GIT),
|
||||
MockCommand(2, "git add .", CommandType.GIT),
|
||||
MockCommand(3, "docker ps", CommandType.DOCKER),
|
||||
]
|
||||
|
||||
engine = SearchEngine()
|
||||
results = engine.search_commands(commands, "", command_type="docker")
|
||||
assert len(results) == 1
|
||||
|
||||
|
||||
def test_search_recent():
|
||||
"""Test recent command search."""
|
||||
now = datetime.utcnow()
|
||||
hour_ago = now - timedelta(hours=1)
|
||||
day_ago = now - timedelta(days=1)
|
||||
|
||||
commands = [
|
||||
MockCommand(1, "recent cmd", CommandType.OTHER, timestamp=hour_ago),
|
||||
MockCommand(2, "old cmd", CommandType.OTHER, timestamp=day_ago),
|
||||
]
|
||||
|
||||
engine = SearchEngine()
|
||||
results = engine.search_recent(commands, hours=24)
|
||||
assert len(results) == 1
|
||||
|
||||
|
||||
def test_get_command_statistics():
|
||||
"""Test command statistics calculation."""
|
||||
commands = [
|
||||
MockCommand(1, "git status", CommandType.GIT),
|
||||
MockCommand(2, "git add .", CommandType.GIT),
|
||||
MockCommand(3, "docker ps", CommandType.DOCKER),
|
||||
]
|
||||
|
||||
engine = SearchEngine()
|
||||
stats = engine.get_command_statistics(commands)
|
||||
|
||||
assert stats["total_commands"] == 3
|
||||
assert "git" in stats["by_type"]
|
||||
assert stats["by_type"]["git"] == 2
|
||||
Reference in New Issue
Block a user