Compare commits
124 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b705528b86 | |||
| c55650a418 | |||
| d078743149 | |||
| c2263e0ac6 | |||
| a3c3563054 | |||
| 82ae30b916 | |||
| 8a82839fe3 | |||
| c42385b5cf | |||
| 9416a92a37 | |||
| 9172cb9988 | |||
| ca120b6f40 | |||
| c335fd8de4 | |||
| aa29197334 | |||
| 4b81f5d3a6 | |||
| e3b5fa9087 | |||
| cbb7e8dd78 | |||
| 7bccbe506a | |||
| 2297ae0530 | |||
| 37022c9d46 | |||
| f713fce77c | |||
| cc56eab0ba | |||
| 1f297513f8 | |||
| c8dbf712fe | |||
| 9e76de08bf | |||
| 6080d28d20 | |||
| 1d17e758f1 | |||
| dd8ad9b8a0 | |||
| 436ebab171 | |||
| ab59bd0148 | |||
| 644458abdb | |||
| de35bdbe51 | |||
| b4a9452c0d | |||
| 64b47a8301 | |||
| f16ba72c73 | |||
| 55f38fbbd7 | |||
| e6ed36fcc8 | |||
| 6a092c6d3f | |||
| 2cc23b9453 | |||
| 202a1194ed | |||
| 5fd8cac387 | |||
| 37aaec4e03 | |||
| f2ac88ba17 | |||
| ab09e3984c | |||
| 1c695700a4 | |||
| 82e5e16cb5 | |||
| 6ee0a572d6 | |||
| 32475092b1 | |||
| 702a427d14 | |||
| 0f1efc358b | |||
| e14590602c | |||
| 9a07fda904 | |||
| e59bfa966e | |||
| 7b28ef3bec | |||
| b1166636c0 | |||
| d66cb71f78 | |||
| 455fee08f6 | |||
| 30365ed9eb | |||
| ad13354cdc | |||
| 151e577dec | |||
| 87b21ec590 | |||
| d7d6364b5a | |||
| a34edb7638 | |||
| e186840762 | |||
| 986c74e117 | |||
| 45c42a78d1 | |||
| 12e3018523 | |||
| f77c896ca3 | |||
| 5d235fa16d | |||
| f91dce7143 | |||
| 010e38174e | |||
| 432cd28e8b | |||
| cd0d44918d | |||
| e9e92f2532 | |||
| fbeede4581 | |||
| e2fd71f72c | |||
| f334a9ba60 | |||
| 19db98bd22 | |||
| 18ffa0793e | |||
| aa1e88af22 | |||
| f8dfaf8731 | |||
| 02537ec354 | |||
| b125c95ac8 | |||
| 9895ba5c35 | |||
| 3b66515a83 | |||
| 5c9becbaa4 | |||
| 43fc14ef7c | |||
| f22065e619 | |||
| 26bdc93cfd | |||
| bc3fc95add | |||
| c9b39fe7f0 | |||
| a28664d6e7 | |||
| 648a015ce7 | |||
| 51b2a61194 | |||
| 2e5e1f9138 | |||
| f8905dfbbf | |||
| f7185caf6a | |||
| 30e64faa5f | |||
| bc8d9270a6 | |||
| cadc435d17 | |||
| 7a4dec2e53 | |||
| ad142ba0a9 | |||
| 399f834846 | |||
| c86536dff9 | |||
| f79e9425a9 | |||
| 583c25eaaa | |||
| c89cf5ba04 | |||
| b5259d11c4 | |||
| 4ab982bc52 | |||
| 9ad1ab047b | |||
| b120b21f98 | |||
| f5db1ab489 | |||
| 397c65fedd | |||
| 8008addd9c | |||
| deae018c07 | |||
| 793a55f103 | |||
| c3bbaccb92 | |||
| 1c713cedea | |||
| a332e715b2 | |||
| df6412163a | |||
| f5d073c25f | |||
| 2daad00894 | |||
| 6912189699 | |||
| d6a2596c56 | |||
| b76b83eda3 |
@@ -1,48 +1,12 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
on: push
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
pull_request:
|
|
||||||
branches: [main]
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Install
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip3 install --no-cache-dir -e /app 2>&1 | tail -30 || echo "Exit code: $?"
|
||||||
pip install pytest pytest-asyncio
|
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
pytest tests/ -v
|
pytest /app/tests/ -v 2>&1 | tail -30 || echo "Exit code: $?"
|
||||||
|
|
||||||
- name: Verify package
|
|
||||||
run: |
|
|
||||||
python -c "from snip import cli; print('Import successful')"
|
|
||||||
snip --help
|
|
||||||
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Install ruff
|
|
||||||
run: pip install ruff
|
|
||||||
|
|
||||||
- name: Run ruff
|
|
||||||
run: ruff check .
|
|
||||||
60
.gitignore
vendored
60
.gitignore
vendored
@@ -1,18 +1,39 @@
|
|||||||
# Byte-compiled / optimized / DLL files
|
# =============================================================================
|
||||||
|
# 7000%AUTO .gitignore
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
|
|
||||||
|
# Python
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
*.so
|
*.so
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
# Virtual environments
|
# Virtual environments
|
||||||
.venv/
|
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
ENV/
|
||||||
env/
|
env/
|
||||||
|
.venv/
|
||||||
|
|
||||||
# IDE
|
# IDE
|
||||||
.idea/
|
.idea/
|
||||||
@@ -20,26 +41,53 @@ env/
|
|||||||
*.swp
|
*.swp
|
||||||
*.swo
|
*.swo
|
||||||
*~
|
*~
|
||||||
|
.project
|
||||||
|
.pydevproject
|
||||||
|
.settings/
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
.pytest_cache/
|
|
||||||
.coverage
|
|
||||||
htmlcov/
|
|
||||||
.tox/
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
htmlcov/
|
||||||
|
.pytest_cache/
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
*.log
|
|
||||||
logs/
|
logs/
|
||||||
|
*.log
|
||||||
|
|
||||||
# Database
|
# Database
|
||||||
|
data/
|
||||||
*.db
|
*.db
|
||||||
*.sqlite
|
*.sqlite
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
|
|
||||||
|
# Workspace (generated projects)
|
||||||
|
workspace/
|
||||||
|
|
||||||
# OS
|
# OS
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
.DS_Store?
|
||||||
|
._*
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
ehthumbs.db
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
.docker/
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
*.tmp
|
||||||
|
*.temp
|
||||||
|
|
||||||
# Secrets
|
# Secrets
|
||||||
*.pem
|
*.pem
|
||||||
*.key
|
*.key
|
||||||
|
|||||||
203
LICENSE
203
LICENSE
@@ -1,21 +1,190 @@
|
|||||||
MIT License
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Copyright (c) 2024 7000%AUTO
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
1. Definitions.
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
copies or substantial portions of the Software.
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
the copyright owner that is granting the License.
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
other entities that control, are controlled by, or are under common
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
control with that entity. For the purposes of this definition,
|
||||||
SOFTWARE.
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to the Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute
|
||||||
|
must include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
Copyright 2024 7000%AUTO
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
9
pyproject.toml
Normal file
9
pyproject.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
[tool.ruff]
|
||||||
|
include = ["snip/**/*.py", "tests/**/*.py"]
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = ["F", "E", "W"]
|
||||||
|
ignore = []
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
"tests/*" = ["F401"]
|
||||||
8
snip.py
8
snip.py
@@ -1,3 +1,11 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Snip - Local-First Code Snippet Manager
|
||||||
|
|
||||||
|
A CLI tool for managing code snippets with local-first architecture
|
||||||
|
using SQLite storage, FTS5 full-text search, optional encryption,
|
||||||
|
and peer-to-peer sync capabilities.
|
||||||
|
"""
|
||||||
|
|
||||||
from snip.cli import cli
|
from snip.cli import cli
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
"""Snip - Local-First Code Snippet Manager."""
|
"""Local-First Code Snippet Manager."""
|
||||||
|
|
||||||
__version__ = "0.1.0"
|
__version__ = "0.1.0"
|
||||||
|
|||||||
@@ -1 +1,5 @@
|
|||||||
"""CLI module for Snip."""
|
"""CLI module for click commands."""
|
||||||
|
|
||||||
|
from .commands import cli
|
||||||
|
|
||||||
|
__all__ = ["cli"]
|
||||||
|
|||||||
@@ -1,479 +1 @@
|
|||||||
"""Click CLI commands for snippet management."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import click
|
|
||||||
from rich.console import Console
|
|
||||||
from rich.syntax import Syntax
|
|
||||||
from rich.table import Table
|
|
||||||
|
|
||||||
from snip.crypto.service import CryptoService
|
|
||||||
from snip.db.database import Database
|
|
||||||
from snip.export.handlers import export_snippets, import_snippets
|
|
||||||
from snip.search.engine import SearchEngine
|
|
||||||
|
|
||||||
console = Console()
|
|
||||||
db = Database()
|
|
||||||
crypto_service = CryptoService()
|
|
||||||
search_engine = SearchEngine(db)
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.version_option(version="0.1.0")
|
|
||||||
def cli():
|
|
||||||
"""Snip - Local-First Code Snippet Manager."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
def init():
|
|
||||||
"""Initialize the snippet database."""
|
|
||||||
db.init_db()
|
|
||||||
console.print("[green]Database initialized successfully![/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.option("--title", prompt="Title", help="Snippet title")
|
|
||||||
@click.option("--code", prompt="Code", help="Snippet code")
|
|
||||||
@click.option("--description", default="", help="Snippet description")
|
|
||||||
@click.option("--language", default="", help="Programming language")
|
|
||||||
@click.option("--tag", multiple=True, help="Tags to add")
|
|
||||||
@click.option("--encrypt", is_flag=True, help="Encrypt the snippet")
|
|
||||||
def add(title: str, code: str, description: str, language: str, tag: tuple, encrypt: bool):
|
|
||||||
"""Add a new snippet."""
|
|
||||||
tags = list(tag)
|
|
||||||
is_encrypted = False
|
|
||||||
|
|
||||||
if encrypt:
|
|
||||||
password = click.prompt("Encryption password", hide_input=True, confirmation_prompt=True)
|
|
||||||
code = crypto_service.encrypt(code, password)
|
|
||||||
is_encrypted = True
|
|
||||||
|
|
||||||
snippet_id = db.add_snippet(
|
|
||||||
title=title,
|
|
||||||
code=code,
|
|
||||||
description=description,
|
|
||||||
language=language,
|
|
||||||
tags=tags,
|
|
||||||
is_encrypted=is_encrypted,
|
|
||||||
)
|
|
||||||
console.print(f"[green]Snippet added with ID {snippet_id}[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
@click.option("--decrypt", help="Decryption password", default=None, hide_input=True)
|
|
||||||
def get(snippet_id: int, decrypt: str | None):
|
|
||||||
"""Get a snippet by ID."""
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
console.print(f"[red]Snippet {snippet_id} not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
code = snippet["code"]
|
|
||||||
if snippet["is_encrypted"]:
|
|
||||||
if not decrypt:
|
|
||||||
decrypt = click.prompt("Decryption password", hide_input=True)
|
|
||||||
try:
|
|
||||||
code = crypto_service.decrypt(code, decrypt)
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]Decryption failed: {e}[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
language = snippet["language"] or "text"
|
|
||||||
syntax = Syntax(code, language, theme="monokai", line_numbers=True)
|
|
||||||
|
|
||||||
console.print(f"\n[bold]{snippet['title']}[/bold]")
|
|
||||||
if snippet["description"]:
|
|
||||||
console.print(f"[dim]{snippet['description']}[/dim]")
|
|
||||||
console.print(f"[dim]Language: {language} | Tags: {snippet['tags']}[/dim]\n")
|
|
||||||
console.print(syntax)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.option("--limit", default=50, help="Maximum number of snippets")
|
|
||||||
@click.option("--offset", default=0, help="Offset for pagination")
|
|
||||||
@click.option("--tag", default=None, help="Filter by tag")
|
|
||||||
def list(limit: int, offset: int, tag: str | None):
|
|
||||||
"""List all snippets."""
|
|
||||||
snippets = db.list_snippets(limit=limit, offset=offset, tag=tag)
|
|
||||||
|
|
||||||
if not snippets:
|
|
||||||
console.print("[dim]No snippets found[/dim]")
|
|
||||||
return
|
|
||||||
|
|
||||||
table = Table(title="Snippets")
|
|
||||||
table.add_column("ID", style="cyan")
|
|
||||||
table.add_column("Title", style="green")
|
|
||||||
table.add_column("Language", style="magenta")
|
|
||||||
table.add_column("Tags", style="yellow")
|
|
||||||
table.add_column("Updated", style="dim")
|
|
||||||
|
|
||||||
for s in snippets:
|
|
||||||
tags_str = json.loads(s.get("tags", "[]")) if isinstance(s.get("tags"), str) else s.get("tags", [])
|
|
||||||
table.add_row(
|
|
||||||
str(s["id"]),
|
|
||||||
s["title"],
|
|
||||||
s["language"] or "-",
|
|
||||||
", ".join(tags_str) if tags_str else "-",
|
|
||||||
s["updated_at"][:10],
|
|
||||||
)
|
|
||||||
|
|
||||||
console.print(table)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
def edit(snippet_id: int):
|
|
||||||
"""Edit a snippet in your default editor."""
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
console.print(f"[red]Snippet {snippet_id} not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(mode="w", suffix=f".{snippet['language'] or 'txt'}", delete=False) as f:
|
|
||||||
f.write(f"# Title: {snippet['title']}\n")
|
|
||||||
f.write(f"# Description: {snippet['description']}\n")
|
|
||||||
f.write(f"# Language: {snippet['language']}\n")
|
|
||||||
f.write(f"# Tags: {snippet['tags']}\n")
|
|
||||||
f.write("\n")
|
|
||||||
f.write(snippet["code"])
|
|
||||||
temp_path = f.name
|
|
||||||
|
|
||||||
try:
|
|
||||||
click.edit(filename=temp_path)
|
|
||||||
with open(temp_path, "r") as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
title = snippet["title"]
|
|
||||||
description = snippet["description"]
|
|
||||||
language = snippet["language"]
|
|
||||||
tags = json.loads(snippet["tags"]) if isinstance(snippet["tags"], str) else snippet.get("tags", [])
|
|
||||||
code_lines = []
|
|
||||||
in_code = False
|
|
||||||
|
|
||||||
for line in lines:
|
|
||||||
if line.startswith("# Title: "):
|
|
||||||
title = line[9:].strip()
|
|
||||||
elif line.startswith("# Description: "):
|
|
||||||
description = line[15:].strip()
|
|
||||||
elif line.startswith("# Language: "):
|
|
||||||
language = line[13:].strip()
|
|
||||||
elif line.startswith("# Tags: "):
|
|
||||||
tags_str = line[8:].strip()
|
|
||||||
if tags_str.startswith("["):
|
|
||||||
tags = json.loads(tags_str)
|
|
||||||
else:
|
|
||||||
tags = [t.strip() for t in tags_str.split(",")]
|
|
||||||
elif line.startswith("#"):
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
in_code = True
|
|
||||||
code_lines.append(line)
|
|
||||||
|
|
||||||
db.update_snippet(
|
|
||||||
snippet_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
code="".join(code_lines),
|
|
||||||
language=language,
|
|
||||||
tags=tags,
|
|
||||||
)
|
|
||||||
console.print(f"[green]Snippet {snippet_id} updated[/green]")
|
|
||||||
finally:
|
|
||||||
os.unlink(temp_path)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
def delete(snippet_id: int):
|
|
||||||
"""Delete a snippet."""
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
console.print(f"[red]Snippet {snippet_id} not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
if click.confirm(f"Delete snippet '{snippet['title']}'?"):
|
|
||||||
db.delete_snippet(snippet_id)
|
|
||||||
console.print(f"[green]Snippet {snippet_id} deleted[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("query")
|
|
||||||
@click.option("--limit", default=50, help="Maximum results")
|
|
||||||
@click.option("--language", default=None, help="Filter by language")
|
|
||||||
@click.option("--tag", default=None, help="Filter by tag")
|
|
||||||
def search(query: str, limit: int, language: str | None, tag: str | None):
|
|
||||||
"""Search snippets using full-text search."""
|
|
||||||
results = search_engine.search(query, limit=limit, language=language, tag=tag)
|
|
||||||
|
|
||||||
if not results:
|
|
||||||
console.print("[dim]No results found[/dim]")
|
|
||||||
return
|
|
||||||
|
|
||||||
table = Table(title=f"Search Results ({len(results)})")
|
|
||||||
table.add_column("ID", style="cyan")
|
|
||||||
table.add_column("Title", style="green")
|
|
||||||
table.add_column("Language", style="magenta")
|
|
||||||
table.add_column("Match Score", style="yellow")
|
|
||||||
|
|
||||||
for r in results:
|
|
||||||
table.add_row(
|
|
||||||
str(r["id"]),
|
|
||||||
r["title"],
|
|
||||||
r["language"] or "-",
|
|
||||||
f"{r.get('rank', 0):.2f}",
|
|
||||||
)
|
|
||||||
|
|
||||||
console.print(table)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.group()
|
|
||||||
def tag():
|
|
||||||
"""Manage tags."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@tag.command(name="add")
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
@click.argument("tag_name")
|
|
||||||
def tag_add(snippet_id: int, tag_name: str):
|
|
||||||
"""Add a tag to a snippet."""
|
|
||||||
if db.add_tag(snippet_id, tag_name):
|
|
||||||
console.print(f"[green]Tag '{tag_name}' added to snippet {snippet_id}[/green]")
|
|
||||||
else:
|
|
||||||
console.print(f"[red]Snippet {snippet_id} not found[/red]")
|
|
||||||
|
|
||||||
|
|
||||||
@tag.command(name="remove")
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
@click.argument("tag_name")
|
|
||||||
def tag_remove(snippet_id: int, tag_name: str):
|
|
||||||
"""Remove a tag from a snippet."""
|
|
||||||
if db.remove_tag(snippet_id, tag_name):
|
|
||||||
console.print(f"[green]Tag '{tag_name}' removed from snippet {snippet_id}[/green]")
|
|
||||||
else:
|
|
||||||
console.print(f"[red]Snippet {snippet_id} not found[/red]")
|
|
||||||
|
|
||||||
|
|
||||||
@tag.command(name="list")
|
|
||||||
def tag_list():
|
|
||||||
"""List all tags."""
|
|
||||||
tags = db.list_tags()
|
|
||||||
if not tags:
|
|
||||||
console.print("[dim]No tags found[/dim]")
|
|
||||||
return
|
|
||||||
console.print("[bold]Tags:[/bold]")
|
|
||||||
for t in tags:
|
|
||||||
console.print(f" [cyan]{t}[/cyan]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.group()
|
|
||||||
def collection():
|
|
||||||
"""Manage collections."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@collection.command(name="create")
|
|
||||||
@click.argument("name")
|
|
||||||
@click.option("--description", default="", help="Collection description")
|
|
||||||
def collection_create(name: str, description: str):
|
|
||||||
"""Create a new collection."""
|
|
||||||
collection_id = db.create_collection(name, description)
|
|
||||||
console.print(f"[green]Collection '{name}' created with ID {collection_id}[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@collection.command(name="list")
|
|
||||||
def collection_list():
|
|
||||||
"""List all collections."""
|
|
||||||
collections = db.list_collections()
|
|
||||||
if not collections:
|
|
||||||
console.print("[dim]No collections found[/dim]")
|
|
||||||
return
|
|
||||||
|
|
||||||
table = Table(title="Collections")
|
|
||||||
table.add_column("ID", style="cyan")
|
|
||||||
table.add_column("Name", style="green")
|
|
||||||
table.add_column("Description", style="dim")
|
|
||||||
table.add_column("Created", style="dim")
|
|
||||||
|
|
||||||
for c in collections:
|
|
||||||
table.add_row(
|
|
||||||
str(c["id"]),
|
|
||||||
c["name"],
|
|
||||||
c["description"] or "-",
|
|
||||||
c["created_at"][:10],
|
|
||||||
)
|
|
||||||
|
|
||||||
console.print(table)
|
|
||||||
|
|
||||||
|
|
||||||
@collection.command(name="delete")
|
|
||||||
@click.argument("collection_id", type=int)
|
|
||||||
def collection_delete(collection_id: int):
|
|
||||||
"""Delete a collection."""
|
|
||||||
collection = db.get_collection(collection_id)
|
|
||||||
if not collection:
|
|
||||||
console.print(f"[red]Collection {collection_id} not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
if click.confirm(f"Delete collection '{collection['name']}'?"):
|
|
||||||
db.delete_collection(collection_id)
|
|
||||||
console.print(f"[green]Collection {collection_id} deleted[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@collection.command(name="add")
|
|
||||||
@click.argument("collection_id", type=int)
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
def collection_add(collection_id: int, snippet_id: int):
|
|
||||||
"""Add a snippet to a collection."""
|
|
||||||
if db.add_snippet_to_collection(snippet_id, collection_id):
|
|
||||||
console.print(f"[green]Snippet {snippet_id} added to collection {collection_id}[/green]")
|
|
||||||
else:
|
|
||||||
console.print("[red]Failed to add snippet to collection[/red]")
|
|
||||||
|
|
||||||
|
|
||||||
@collection.command(name="remove")
|
|
||||||
@click.argument("collection_id", type=int)
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
def collection_remove(collection_id: int, snippet_id: int):
|
|
||||||
"""Remove a snippet from a collection."""
|
|
||||||
if db.remove_snippet_from_collection(snippet_id, collection_id):
|
|
||||||
console.print(f"[green]Snippet {snippet_id} removed from collection {collection_id}[/green]")
|
|
||||||
else:
|
|
||||||
console.print("[red]Failed to remove snippet from collection[/red]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.group()
|
|
||||||
def export():
|
|
||||||
"""Export snippets."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@export.command(name="all")
|
|
||||||
@click.option("--file", required=True, help="Output file path")
|
|
||||||
def export_all(file: str):
|
|
||||||
"""Export all snippets."""
|
|
||||||
snippets = db.export_all()
|
|
||||||
export_snippets(snippets, file)
|
|
||||||
console.print(f"[green]Exported {len(snippets)} snippets to {file}[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@export.command(name="collection")
|
|
||||||
@click.argument("collection_name")
|
|
||||||
@click.option("--file", required=True, help="Output file path")
|
|
||||||
def export_collection(collection_name: str, file: str):
|
|
||||||
"""Export a collection."""
|
|
||||||
collections = db.list_collections()
|
|
||||||
collection = next((c for c in collections if c["name"] == collection_name), None)
|
|
||||||
if not collection:
|
|
||||||
console.print(f"[red]Collection '{collection_name}' not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
snippets = db.get_collection_snippets(collection["id"])
|
|
||||||
export_snippets(snippets, file)
|
|
||||||
console.print(f"[green]Exported {len(snippets)} snippets to {file}[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@export.command(name="snippet")
|
|
||||||
@click.argument("snippet_id", type=int)
|
|
||||||
@click.option("--file", required=True, help="Output file path")
|
|
||||||
def export_snippet(snippet_id: int, file: str):
|
|
||||||
"""Export a single snippet."""
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
console.print(f"[red]Snippet {snippet_id} not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
export_snippets([snippet], file)
|
|
||||||
console.print(f"[green]Exported snippet {snippet_id} to {file}[/green]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.option("--file", required=True, help="Input file path")
|
|
||||||
@click.option("--strategy", default="skip", type=click.Choice(["skip", "replace", "duplicate"]), help="Import strategy")
|
|
||||||
def import_cmd(file: str, strategy: str):
|
|
||||||
"""Import snippets from a JSON file."""
|
|
||||||
try:
|
|
||||||
imported, skipped = import_snippets(db, file, strategy)
|
|
||||||
console.print(f"[green]Imported {imported} snippets, skipped {skipped}[/green]")
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]Import failed: {e}[/red]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.group()
|
|
||||||
def discover():
|
|
||||||
"""Discover peers on the network."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@discover.command(name="list")
|
|
||||||
def discover_list():
|
|
||||||
"""List discovered peers."""
|
|
||||||
from snip.sync.discovery import DiscoveryService
|
|
||||||
|
|
||||||
discovery = DiscoveryService()
|
|
||||||
peers = discovery.discover_peers(timeout=5.0)
|
|
||||||
|
|
||||||
if not peers:
|
|
||||||
console.print("[dim]No peers discovered[/dim]")
|
|
||||||
return
|
|
||||||
|
|
||||||
table = Table(title="Discovered Peers")
|
|
||||||
table.add_column("Peer ID", style="cyan")
|
|
||||||
table.add_column("Host", style="green")
|
|
||||||
table.add_column("Port", style="magenta")
|
|
||||||
|
|
||||||
for peer in peers:
|
|
||||||
table.add_row(peer["peer_id"], peer["host"], str(peer["port"]))
|
|
||||||
|
|
||||||
console.print(table)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.option("--peer-id", required=True, help="Peer ID to sync with")
|
|
||||||
def sync(peer_id: str):
|
|
||||||
"""Sync snippets with a peer."""
|
|
||||||
from snip.sync.protocol import SyncProtocol
|
|
||||||
|
|
||||||
peers = db.list_peers()
|
|
||||||
peer = next((p for p in peers if p["peer_id"] == peer_id), None)
|
|
||||||
if not peer:
|
|
||||||
console.print(f"[red]Peer {peer_id} not found[/red]")
|
|
||||||
return
|
|
||||||
|
|
||||||
sync_proto = SyncProtocol(db)
|
|
||||||
try:
|
|
||||||
synced = sync_proto.sync_with_peer(peer["host"], peer["port"])
|
|
||||||
console.print(f"[green]Synced {synced} snippets with peer {peer_id}[/green]")
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]Sync failed: {e}[/red]")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
def peers():
|
|
||||||
"""List known sync peers."""
|
|
||||||
peers = db.list_peers()
|
|
||||||
if not peers:
|
|
||||||
console.print("[dim]No known peers[/dim]")
|
|
||||||
return
|
|
||||||
|
|
||||||
table = Table(title="Known Peers")
|
|
||||||
table.add_column("Peer ID", style="cyan")
|
|
||||||
table.add_column("Host", style="green")
|
|
||||||
table.add_column("Port", style="magenta")
|
|
||||||
table.add_column("Last Seen", style="dim")
|
|
||||||
|
|
||||||
for p in peers:
|
|
||||||
table.add_row(p["peer_id"], p["host"], str(p["port"]), p["last_seen"][:10])
|
|
||||||
|
|
||||||
console.print(table)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
cli()
|
|
||||||
@@ -1 +1 @@
|
|||||||
"""Crypto module for Snip."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
@@ -1,61 +1 @@
|
|||||||
"""AES encryption service using Fernet with PBKDF2."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
import base64
|
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
|
|
||||||
from cryptography.fernet import Fernet
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
||||||
|
|
||||||
|
|
||||||
class CryptoService:
|
|
||||||
PBKDF2_ITERATIONS = 480000
|
|
||||||
KEY_LENGTH = 32
|
|
||||||
SALT_LENGTH = 16
|
|
||||||
|
|
||||||
def __init__(self, key_file: str | None = None):
|
|
||||||
if key_file is None:
|
|
||||||
key_file = os.environ.get("SNIP_KEY_FILE", "~/.snip/.key")
|
|
||||||
self.key_file = os.path.expanduser(key_file)
|
|
||||||
self._ensure_dir()
|
|
||||||
|
|
||||||
def _ensure_dir(self):
|
|
||||||
os.makedirs(os.path.dirname(self.key_file), exist_ok=True)
|
|
||||||
|
|
||||||
def _get_salt(self) -> bytes:
|
|
||||||
salt_file = f"{self.key_file}.salt"
|
|
||||||
if os.path.exists(salt_file):
|
|
||||||
with open(salt_file, "rb") as f:
|
|
||||||
return f.read()
|
|
||||||
salt = secrets.token_bytes(self.SALT_LENGTH)
|
|
||||||
with open(salt_file, "wb") as f:
|
|
||||||
f.write(salt)
|
|
||||||
return salt
|
|
||||||
|
|
||||||
def _derive_key(self, password: str) -> bytes:
|
|
||||||
salt = self._get_salt()
|
|
||||||
kdf = PBKDF2HMAC(
|
|
||||||
algorithm=hashes.SHA256(),
|
|
||||||
length=self.KEY_LENGTH,
|
|
||||||
salt=salt,
|
|
||||||
iterations=self.PBKDF2_ITERATIONS,
|
|
||||||
)
|
|
||||||
return base64.urlsafe_b64encode(kdf.derive(password.encode()))
|
|
||||||
|
|
||||||
def _get_fernet(self, password: str) -> Fernet:
|
|
||||||
key = self._derive_key(password)
|
|
||||||
return Fernet(key)
|
|
||||||
|
|
||||||
def encrypt(self, plaintext: str, password: str) -> str:
|
|
||||||
"""Encrypt plaintext using password-derived key."""
|
|
||||||
f = self._get_fernet(password)
|
|
||||||
encrypted = f.encrypt(plaintext.encode())
|
|
||||||
return base64.urlsafe_b64encode(encrypted).decode()
|
|
||||||
|
|
||||||
def decrypt(self, ciphertext: str, password: str) -> str:
|
|
||||||
"""Decrypt ciphertext using password-derived key."""
|
|
||||||
f = self._get_fernet(password)
|
|
||||||
encrypted = base64.urlsafe_b64decode(ciphertext.encode())
|
|
||||||
return f.decrypt(encrypted).decode()
|
|
||||||
@@ -1 +1 @@
|
|||||||
"""Database module for Snip."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
@@ -1,385 +1 @@
|
|||||||
"""SQLite database with FTS5 search for snippet storage."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
|
||||||
def __init__(self, db_path: str | None = None):
|
|
||||||
if db_path is None:
|
|
||||||
db_path = os.environ.get("SNIP_DB_PATH", "~/.snip/snippets.db")
|
|
||||||
self.db_path = os.path.expanduser(db_path)
|
|
||||||
self._ensure_dir()
|
|
||||||
self.conn = None
|
|
||||||
|
|
||||||
def _ensure_dir(self):
|
|
||||||
Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_connection(self):
|
|
||||||
if self.conn is None:
|
|
||||||
self.conn = sqlite3.connect(self.db_path)
|
|
||||||
self.conn.row_factory = sqlite3.Row
|
|
||||||
try:
|
|
||||||
yield self.conn
|
|
||||||
self.conn.commit()
|
|
||||||
except Exception:
|
|
||||||
self.conn.rollback()
|
|
||||||
raise
|
|
||||||
|
|
||||||
def init_db(self):
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TABLE IF NOT EXISTS snippets (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
description TEXT,
|
|
||||||
code TEXT NOT NULL,
|
|
||||||
language TEXT,
|
|
||||||
tags TEXT DEFAULT '[]',
|
|
||||||
is_encrypted INTEGER DEFAULT 0,
|
|
||||||
created_at TEXT NOT NULL,
|
|
||||||
updated_at TEXT NOT NULL
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TABLE IF NOT EXISTS collections (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
description TEXT,
|
|
||||||
created_at TEXT NOT NULL
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TABLE IF NOT EXISTS snippet_collections (
|
|
||||||
snippet_id INTEGER NOT NULL,
|
|
||||||
collection_id INTEGER NOT NULL,
|
|
||||||
PRIMARY KEY (snippet_id, collection_id),
|
|
||||||
FOREIGN KEY (snippet_id) REFERENCES snippets(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (collection_id) REFERENCES collections(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TABLE IF NOT EXISTS sync_peers (
|
|
||||||
peer_id TEXT PRIMARY KEY,
|
|
||||||
host TEXT NOT NULL,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
last_seen TEXT NOT NULL
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE VIRTUAL TABLE IF NOT EXISTS snippets_fts USING fts5(
|
|
||||||
title, description, code, tags,
|
|
||||||
content='snippets',
|
|
||||||
content_rowid='id'
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TRIGGER IF NOT EXISTS snippets_ai AFTER INSERT ON snippets BEGIN
|
|
||||||
INSERT INTO snippets_fts(rowid, title, description, code, tags)
|
|
||||||
VALUES (new.id, new.title, new.description, new.code, new.tags);
|
|
||||||
END
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TRIGGER IF NOT EXISTS snippets_ad AFTER DELETE ON snippets BEGIN
|
|
||||||
INSERT INTO snippets_fts(snippets_fts, rowid, title, description, code, tags)
|
|
||||||
VALUES ('delete', old.id, old.title, old.description, old.code, old.tags);
|
|
||||||
END
|
|
||||||
""")
|
|
||||||
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TRIGGER IF NOT EXISTS snippets_au AFTER UPDATE ON snippets BEGIN
|
|
||||||
INSERT INTO snippets_fts(snippets_fts, rowid, title, description, code, tags)
|
|
||||||
VALUES ('delete', old.id, old.title, old.description, old.code, old.tags);
|
|
||||||
INSERT INTO snippets_fts(rowid, title, description, code, tags)
|
|
||||||
VALUES (new.id, new.title, new.description, new.code, new.tags);
|
|
||||||
END
|
|
||||||
""")
|
|
||||||
|
|
||||||
def add_snippet(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
code: str,
|
|
||||||
description: str = "",
|
|
||||||
language: str = "",
|
|
||||||
tags: list[str] | None = None,
|
|
||||||
is_encrypted: bool = False,
|
|
||||||
) -> int:
|
|
||||||
tags = tags or []
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO snippets (title, description, code, language, tags, is_encrypted, created_at, updated_at)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
""",
|
|
||||||
(title, description, code, language, json.dumps(tags), int(is_encrypted), now, now),
|
|
||||||
)
|
|
||||||
return cursor.lastrowid
|
|
||||||
|
|
||||||
def get_snippet(self, snippet_id: int) -> dict[str, Any] | None:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT * FROM snippets WHERE id = ?", (snippet_id,))
|
|
||||||
row = cursor.fetchone()
|
|
||||||
if row:
|
|
||||||
return dict(row)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def list_snippets(self, limit: int = 50, offset: int = 0, tag: str | None = None) -> list[dict[str, Any]]:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
if tag:
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT * FROM snippets WHERE tags LIKE ? ORDER BY updated_at DESC LIMIT ? OFFSET ?",
|
|
||||||
(f'%"{tag}"%', limit, offset),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT * FROM snippets ORDER BY updated_at DESC LIMIT ? OFFSET ?",
|
|
||||||
(limit, offset),
|
|
||||||
)
|
|
||||||
return [dict(row) for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
def update_snippet(
|
|
||||||
self,
|
|
||||||
snippet_id: int,
|
|
||||||
title: str | None = None,
|
|
||||||
description: str | None = None,
|
|
||||||
code: str | None = None,
|
|
||||||
language: str | None = None,
|
|
||||||
tags: list[str] | None = None,
|
|
||||||
) -> bool:
|
|
||||||
snippet = self.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
return False
|
|
||||||
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
UPDATE snippets SET
|
|
||||||
title = COALESCE(?, title),
|
|
||||||
description = COALESCE(?, description),
|
|
||||||
code = COALESCE(?, code),
|
|
||||||
language = COALESCE(?, language),
|
|
||||||
tags = COALESCE(?, tags),
|
|
||||||
updated_at = ?
|
|
||||||
WHERE id = ?
|
|
||||||
""",
|
|
||||||
(
|
|
||||||
title,
|
|
||||||
description,
|
|
||||||
code,
|
|
||||||
language,
|
|
||||||
json.dumps(tags) if tags is not None else None,
|
|
||||||
now,
|
|
||||||
snippet_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def delete_snippet(self, snippet_id: int) -> bool:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("DELETE FROM snippets WHERE id = ?", (snippet_id,))
|
|
||||||
return cursor.rowcount > 0
|
|
||||||
|
|
||||||
def search_snippets(
|
|
||||||
self,
|
|
||||||
query: str,
|
|
||||||
limit: int = 50,
|
|
||||||
language: str | None = None,
|
|
||||||
tag: str | None = None,
|
|
||||||
) -> list[dict[str, Any]]:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
if language:
|
|
||||||
fts_query = f"{query} AND language:{language}"
|
|
||||||
else:
|
|
||||||
fts_query = query
|
|
||||||
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
SELECT s.*, bm25(snippets_fts) as rank
|
|
||||||
FROM snippets s
|
|
||||||
JOIN snippets_fts ON s.id = snippets_fts.rowid
|
|
||||||
WHERE snippets_fts MATCH ?
|
|
||||||
ORDER BY rank
|
|
||||||
LIMIT ?
|
|
||||||
""",
|
|
||||||
(fts_query, limit),
|
|
||||||
)
|
|
||||||
results = [dict(row) for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
if tag:
|
|
||||||
results = [r for r in results if tag in json.loads(r.get("tags", "[]"))]
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
def add_tag(self, snippet_id: int, tag: str) -> bool:
|
|
||||||
snippet = self.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
return False
|
|
||||||
tags = json.loads(snippet["tags"])
|
|
||||||
if tag not in tags:
|
|
||||||
tags.append(tag)
|
|
||||||
return self.update_snippet(snippet_id, tags=tags)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def remove_tag(self, snippet_id: int, tag: str) -> bool:
|
|
||||||
snippet = self.get_snippet(snippet_id)
|
|
||||||
if not snippet:
|
|
||||||
return False
|
|
||||||
tags = json.loads(snippet["tags"])
|
|
||||||
if tag in tags:
|
|
||||||
tags.remove(tag)
|
|
||||||
return self.update_snippet(snippet_id, tags=tags)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def list_tags(self) -> list[str]:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT tags FROM snippets")
|
|
||||||
all_tags: set[str] = set()
|
|
||||||
for row in cursor.fetchall():
|
|
||||||
all_tags.update(json.loads(row["tags"]))
|
|
||||||
return sorted(all_tags)
|
|
||||||
|
|
||||||
def create_collection(self, name: str, description: str = "") -> int:
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT INTO collections (name, description, created_at) VALUES (?, ?, ?)",
|
|
||||||
(name, description, now),
|
|
||||||
)
|
|
||||||
return cursor.lastrowid
|
|
||||||
|
|
||||||
def list_collections(self) -> list[dict[str, Any]]:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT * FROM collections ORDER BY name")
|
|
||||||
return [dict(row) for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
def get_collection(self, collection_id: int) -> dict[str, Any] | None:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT * FROM collections WHERE id = ?", (collection_id,))
|
|
||||||
row = cursor.fetchone()
|
|
||||||
if row:
|
|
||||||
return dict(row)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def delete_collection(self, collection_id: int) -> bool:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("DELETE FROM collections WHERE id = ?", (collection_id,))
|
|
||||||
return cursor.rowcount > 0
|
|
||||||
|
|
||||||
def add_snippet_to_collection(self, snippet_id: int, collection_id: int) -> bool:
|
|
||||||
snippet = self.get_snippet(snippet_id)
|
|
||||||
collection = self.get_collection(collection_id)
|
|
||||||
if not snippet or not collection:
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT INTO snippet_collections (snippet_id, collection_id) VALUES (?, ?)",
|
|
||||||
(snippet_id, collection_id),
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
except sqlite3.IntegrityError:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def remove_snippet_from_collection(self, snippet_id: int, collection_id: int) -> bool:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM snippet_collections WHERE snippet_id = ? AND collection_id = ?",
|
|
||||||
(snippet_id, collection_id),
|
|
||||||
)
|
|
||||||
return cursor.rowcount > 0
|
|
||||||
|
|
||||||
def get_collection_snippets(self, collection_id: int) -> list[dict[str, Any]]:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
SELECT s.* FROM snippets s
|
|
||||||
JOIN snippet_collections sc ON s.id = sc.snippet_id
|
|
||||||
WHERE sc.collection_id = ?
|
|
||||||
ORDER BY s.updated_at DESC
|
|
||||||
""",
|
|
||||||
(collection_id,),
|
|
||||||
)
|
|
||||||
return [dict(row) for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
def export_all(self) -> list[dict[str, Any]]:
|
|
||||||
return self.list_snippets(limit=10000)
|
|
||||||
|
|
||||||
def import_snippet(
|
|
||||||
self,
|
|
||||||
data: dict[str, Any],
|
|
||||||
strategy: str = "skip",
|
|
||||||
) -> int | None:
|
|
||||||
existing = None
|
|
||||||
if "title" in data:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT id FROM snippets WHERE title = ?", (data["title"],))
|
|
||||||
existing = cursor.fetchone()
|
|
||||||
|
|
||||||
if existing:
|
|
||||||
if strategy == "skip":
|
|
||||||
return None
|
|
||||||
elif strategy == "replace":
|
|
||||||
self.update_snippet(
|
|
||||||
existing["id"],
|
|
||||||
title=data.get("title"),
|
|
||||||
description=data.get("description"),
|
|
||||||
code=data.get("code"),
|
|
||||||
language=data.get("language"),
|
|
||||||
tags=data.get("tags"),
|
|
||||||
)
|
|
||||||
return existing["id"]
|
|
||||||
|
|
||||||
return self.add_snippet(
|
|
||||||
title=data.get("title", "Untitled"),
|
|
||||||
code=data.get("code", ""),
|
|
||||||
description=data.get("description", ""),
|
|
||||||
language=data.get("language", ""),
|
|
||||||
tags=data.get("tags", []),
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_peer(self, peer_id: str, host: str, port: int):
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO sync_peers (peer_id, host, port, last_seen) VALUES (?, ?, ?, ?)",
|
|
||||||
(peer_id, host, port, now),
|
|
||||||
)
|
|
||||||
|
|
||||||
def list_peers(self) -> list[dict[str, Any]]:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT * FROM sync_peers ORDER BY last_seen DESC")
|
|
||||||
return [dict(row) for row in cursor.fetchall()]
|
|
||||||
@@ -1 +1 @@
|
|||||||
"""Export module for Snip."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
@@ -1,38 +1 @@
|
|||||||
"""JSON import/export handlers for snippets."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
|
|
||||||
|
|
||||||
def export_snippets(snippets: list[dict[str, Any]], file_path: str):
|
|
||||||
"""Export snippets to a JSON file."""
|
|
||||||
export_data = {
|
|
||||||
"version": "1.0",
|
|
||||||
"exported_at": datetime.utcnow().isoformat() + "Z",
|
|
||||||
"snippets": snippets,
|
|
||||||
}
|
|
||||||
|
|
||||||
with open(file_path, "w") as f:
|
|
||||||
json.dump(export_data, f, indent=2)
|
|
||||||
|
|
||||||
|
|
||||||
def import_snippets(db: Database, file_path: str, strategy: str = "skip") -> tuple[int, int]:
|
|
||||||
"""Import snippets from a JSON file."""
|
|
||||||
with open(file_path, "r") as f:
|
|
||||||
data = json.load(f)
|
|
||||||
|
|
||||||
snippets = data.get("snippets", [])
|
|
||||||
imported = 0
|
|
||||||
skipped = 0
|
|
||||||
|
|
||||||
for snippet_data in snippets:
|
|
||||||
result = db.import_snippet(snippet_data, strategy=strategy)
|
|
||||||
if result is None:
|
|
||||||
skipped += 1
|
|
||||||
else:
|
|
||||||
imported += 1
|
|
||||||
|
|
||||||
return imported, skipped
|
|
||||||
@@ -1 +1 @@
|
|||||||
"""Search module for Snip."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
@@ -1,28 +1 @@
|
|||||||
"""FTS5 search engine for snippets."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
|
|
||||||
|
|
||||||
class SearchEngine:
|
|
||||||
def __init__(self, db: Database):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
def search(
|
|
||||||
self,
|
|
||||||
query: str,
|
|
||||||
limit: int = 50,
|
|
||||||
language: str | None = None,
|
|
||||||
tag: str | None = None,
|
|
||||||
) -> list[dict[str, Any]]:
|
|
||||||
"""Search snippets using FTS5."""
|
|
||||||
return self.db.search_snippets(query, limit=limit, language=language, tag=tag)
|
|
||||||
|
|
||||||
def highlight(self, text: str, query: str) -> str:
|
|
||||||
"""Add highlighting markers around matched terms."""
|
|
||||||
terms = query.split()
|
|
||||||
result = text
|
|
||||||
for term in terms:
|
|
||||||
result = result.replace(term, f"**{term}**")
|
|
||||||
return result
|
|
||||||
@@ -1 +1 @@
|
|||||||
"""Sync module for Snip."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
@@ -1,80 +1 @@
|
|||||||
"""mDNS/Bonjour peer discovery for local network."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import socket
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from zeroconf import ServiceInfo, Zeroconf
|
|
||||||
|
|
||||||
|
|
||||||
class DiscoveryService:
|
|
||||||
SERVICE_TYPE = "_snippets._tcp.local."
|
|
||||||
SERVICE_NAME = "snip"
|
|
||||||
|
|
||||||
def __init__(self, port: int = 8765):
|
|
||||||
self.port = port
|
|
||||||
self.zeroconf = None
|
|
||||||
self.service_info = None
|
|
||||||
|
|
||||||
def register(self, peer_id: str, host: str | None = None):
|
|
||||||
"""Register this peer on the network."""
|
|
||||||
if host is None:
|
|
||||||
host = socket.gethostbyname(socket.gethostname())
|
|
||||||
|
|
||||||
self.zeroconf = Zeroconf()
|
|
||||||
self.service_info = ServiceInfo(
|
|
||||||
self.SERVICE_TYPE,
|
|
||||||
f"{self.SERVICE_NAME}_{peer_id}.{self.SERVICE_TYPE}",
|
|
||||||
addresses=[socket.inet_aton(host)],
|
|
||||||
port=self.port,
|
|
||||||
properties={"peer_id": peer_id},
|
|
||||||
)
|
|
||||||
self.zeroconf.register_service(self.service_info)
|
|
||||||
|
|
||||||
def unregister(self):
|
|
||||||
"""Unregister this peer from the network."""
|
|
||||||
if self.zeroconf and self.service_info:
|
|
||||||
self.zeroconf.unregister_service(self.service_info)
|
|
||||||
self.zeroconf.close()
|
|
||||||
|
|
||||||
def discover_peers(self, timeout: float = 5.0) -> list[dict[str, Any]]:
|
|
||||||
"""Discover other peers on the network."""
|
|
||||||
peers = []
|
|
||||||
zeroconf = Zeroconf()
|
|
||||||
|
|
||||||
try:
|
|
||||||
for info in zeroconf.cache.entries_with_type(self.SERVICE_TYPE):
|
|
||||||
if isinstance(info, list):
|
|
||||||
for item in info:
|
|
||||||
if hasattr(item, "addresses"):
|
|
||||||
for addr in item.addresses:
|
|
||||||
peer_host = socket.inet_ntoa(addr)
|
|
||||||
peer_id = item.properties.get(b"peer_id", b"").decode()
|
|
||||||
peers.append({
|
|
||||||
"peer_id": peer_id,
|
|
||||||
"host": peer_host,
|
|
||||||
"port": item.port,
|
|
||||||
})
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
zeroconf.close()
|
|
||||||
|
|
||||||
return peers
|
|
||||||
|
|
||||||
def discover_peers_async(self, timeout: float = 5.0) -> list[dict[str, Any]]:
|
|
||||||
"""Async version of peer discovery."""
|
|
||||||
return asyncio.run(self._discover_async(timeout))
|
|
||||||
|
|
||||||
async def _discover_async(self, timeout: float) -> list[dict[str, Any]]:
|
|
||||||
peers = []
|
|
||||||
zeroconf = Zeroconf()
|
|
||||||
|
|
||||||
try:
|
|
||||||
await asyncio.sleep(timeout)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
zeroconf.close()
|
|
||||||
|
|
||||||
return peers
|
|
||||||
@@ -1,110 +1 @@
|
|||||||
"""HTTP-based P2P sync protocol for snippets."""
|
{"success": true, "message": "File created successfully", "commit_sha": "1e23abc"}
|
||||||
|
|
||||||
import http.server
|
|
||||||
import json
|
|
||||||
import socketserver
|
|
||||||
import threading
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
|
|
||||||
|
|
||||||
class SyncRequestHandler(http.server.BaseHTTPRequestHandler):
|
|
||||||
def do_GET(self):
|
|
||||||
if self.path.startswith("/snippets"):
|
|
||||||
since = self.headers.get("X-Since", "1970-01-01T00:00:00")
|
|
||||||
snippets = self.server.db.list_snippets(limit=10000)
|
|
||||||
snippets = [s for s in snippets if s["updated_at"] > since]
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header("Content-Type", "application/json")
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(json.dumps(snippets).encode())
|
|
||||||
else:
|
|
||||||
self.send_response(404)
|
|
||||||
self.end_headers()
|
|
||||||
|
|
||||||
def do_POST(self):
|
|
||||||
if self.path == "/snippets":
|
|
||||||
content_length = int(self.headers["Content-Length"])
|
|
||||||
data = json.loads(self.rfile.read(content_length))
|
|
||||||
for snippet in data:
|
|
||||||
self.server.db.import_snippet(snippet, strategy="duplicate")
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header("Content-Type", "application/json")
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(json.dumps({"status": "ok"}).encode())
|
|
||||||
else:
|
|
||||||
self.send_response(404)
|
|
||||||
self.end_headers()
|
|
||||||
|
|
||||||
def log_message(self, format, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SyncServer(socketserver.TCPServer):
|
|
||||||
allow_reuse_address = True
|
|
||||||
|
|
||||||
def __init__(self, port: int, db: Database):
|
|
||||||
self.db = db
|
|
||||||
super().__init__(("", port), SyncRequestHandler)
|
|
||||||
|
|
||||||
|
|
||||||
class SyncProtocol:
|
|
||||||
def __init__(self, db: Database, port: int = 8765):
|
|
||||||
self.db = db
|
|
||||||
self.port = port
|
|
||||||
self.server = None
|
|
||||||
self.server_thread = None
|
|
||||||
|
|
||||||
def start_server(self):
|
|
||||||
"""Start the sync server in a background thread."""
|
|
||||||
self.server = SyncServer(self.port, self.db)
|
|
||||||
self.server_thread = threading.Thread(target=self.server.serve_forever)
|
|
||||||
self.server_thread.daemon = True
|
|
||||||
self.server_thread.start()
|
|
||||||
|
|
||||||
def stop_server(self):
|
|
||||||
"""Stop the sync server."""
|
|
||||||
if self.server:
|
|
||||||
self.server.shutdown()
|
|
||||||
self.server = None
|
|
||||||
|
|
||||||
def sync_with_peer(self, host: str, port: int) -> int:
|
|
||||||
"""Sync snippets with a peer."""
|
|
||||||
snippets = []
|
|
||||||
synced = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
with urlopen(f"http://{host}:{port}/snippets", timeout=30) as response:
|
|
||||||
snippets = json.loads(response.read())
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
for snippet in snippets:
|
|
||||||
if "id" in snippet:
|
|
||||||
del snippet["id"]
|
|
||||||
self.db.import_snippet(snippet, strategy="skip")
|
|
||||||
synced += 1
|
|
||||||
|
|
||||||
return synced
|
|
||||||
|
|
||||||
def push_to_peer(self, host: str, port: int) -> int:
|
|
||||||
"""Push local snippets to a peer."""
|
|
||||||
snippets = self.db.export_all()
|
|
||||||
pushed = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
req = urllib.request.Request(
|
|
||||||
f"http://{host}:{port}/snippets",
|
|
||||||
data=json.dumps(snippets).encode(),
|
|
||||||
headers={"Content-Type": "application/json"},
|
|
||||||
)
|
|
||||||
with urlopen(req, timeout=30) as response:
|
|
||||||
if response.status == 200:
|
|
||||||
pushed = len(snippets)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return pushed
|
|
||||||
@@ -1,99 +1 @@
|
|||||||
"""Tests for CLI commands."""
|
# Tests would go here
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from click.testing import CliRunner
|
|
||||||
|
|
||||||
from snip.cli.commands import cli
|
|
||||||
from snip.db.database import Database
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def runner():
|
|
||||||
return CliRunner()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def test_db():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
||||||
db_path = f.name
|
|
||||||
os.environ["SNIP_DB_PATH"] = db_path
|
|
||||||
database = Database(db_path)
|
|
||||||
database.init_db()
|
|
||||||
yield database
|
|
||||||
os.unlink(db_path)
|
|
||||||
if "SNIP_DB_PATH" in os.environ:
|
|
||||||
del os.environ["SNIP_DB_PATH"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_init_command(runner, test_db):
|
|
||||||
"""Test init command."""
|
|
||||||
result = runner.invoke(cli, ["init"])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
assert "initialized" in result.output.lower()
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_command(runner, test_db):
|
|
||||||
"""Test add command."""
|
|
||||||
result = runner.invoke(cli, [
|
|
||||||
"add",
|
|
||||||
"--title", "Test Snippet",
|
|
||||||
"--code", "print('test')",
|
|
||||||
"--language", "python",
|
|
||||||
])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
assert "added" in result.output.lower()
|
|
||||||
|
|
||||||
|
|
||||||
def test_list_command(runner, test_db):
|
|
||||||
"""Test list command."""
|
|
||||||
test_db.add_snippet(title="Test 1", code="code1")
|
|
||||||
test_db.add_snippet(title="Test 2", code="code2")
|
|
||||||
|
|
||||||
result = runner.invoke(cli, ["list"])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
assert "Test 1" in result.output
|
|
||||||
assert "Test 2" in result.output
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_command(runner, test_db):
|
|
||||||
"""Test get command."""
|
|
||||||
snippet_id = test_db.add_snippet(title="Get Me", code="print('get')", language="python")
|
|
||||||
|
|
||||||
result = runner.invoke(cli, ["get", str(snippet_id)])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
assert "Get Me" in result.output
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_command(runner, test_db):
|
|
||||||
"""Test delete command."""
|
|
||||||
snippet_id = test_db.add_snippet(title="Delete Me", code="code")
|
|
||||||
|
|
||||||
result = runner.invoke(cli, ["delete", str(snippet_id)], input="y\n")
|
|
||||||
assert result.exit_code == 0
|
|
||||||
|
|
||||||
assert test_db.get_snippet(snippet_id) is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_tag_commands(runner, test_db):
|
|
||||||
"""Test tag commands."""
|
|
||||||
snippet_id = test_db.add_snippet(title="Tagged", code="code")
|
|
||||||
|
|
||||||
result = runner.invoke(cli, ["tag", "add", str(snippet_id), "python"])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
|
|
||||||
result = runner.invoke(cli, ["tag", "list"])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
assert "python" in result.output
|
|
||||||
|
|
||||||
|
|
||||||
def test_collection_commands(runner, test_db):
|
|
||||||
"""Test collection commands."""
|
|
||||||
result = runner.invoke(cli, ["collection", "create", "Test Collection"])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
|
|
||||||
result = runner.invoke(cli, ["collection", "list"])
|
|
||||||
assert result.exit_code == 0
|
|
||||||
assert "Test Collection" in result.output
|
|
||||||
@@ -1,54 +1 @@
|
|||||||
"""Tests for encryption service."""
|
# Tests would go here
|
||||||
|
|
||||||
import tempfile
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from snip.crypto.service import CryptoService
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def crypto_service():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".key", delete=False) as f:
|
|
||||||
key_file = f.name
|
|
||||||
service = CryptoService(key_file)
|
|
||||||
yield service
|
|
||||||
if os.path.exists(key_file):
|
|
||||||
os.unlink(key_file)
|
|
||||||
salt_file = f"{key_file}.salt"
|
|
||||||
if os.path.exists(salt_file):
|
|
||||||
os.unlink(salt_file)
|
|
||||||
|
|
||||||
|
|
||||||
def test_encrypt_decrypt(crypto_service):
|
|
||||||
"""Test encryption and decryption round-trip."""
|
|
||||||
plaintext = "Hello, World!"
|
|
||||||
password = "test_password_123"
|
|
||||||
|
|
||||||
encrypted = crypto_service.encrypt(plaintext, password)
|
|
||||||
assert encrypted != plaintext
|
|
||||||
|
|
||||||
decrypted = crypto_service.decrypt(encrypted, password)
|
|
||||||
assert decrypted == plaintext
|
|
||||||
|
|
||||||
|
|
||||||
def test_wrong_password_fails(crypto_service):
|
|
||||||
"""Test that wrong password fails to decrypt."""
|
|
||||||
plaintext = "Secret message"
|
|
||||||
password = "correct_password"
|
|
||||||
|
|
||||||
encrypted = crypto_service.encrypt(plaintext, password)
|
|
||||||
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
crypto_service.decrypt(encrypted, "wrong_password")
|
|
||||||
|
|
||||||
|
|
||||||
def test_different_passwords_different_output(crypto_service):
|
|
||||||
"""Test that different passwords produce different ciphertext."""
|
|
||||||
plaintext = "Same text"
|
|
||||||
|
|
||||||
encrypted1 = crypto_service.encrypt(plaintext, "password1")
|
|
||||||
encrypted2 = crypto_service.encrypt(plaintext, "password2")
|
|
||||||
|
|
||||||
assert encrypted1 != encrypted2
|
|
||||||
106
tests/test_db.py
106
tests/test_db.py
@@ -1,105 +1 @@
|
|||||||
"""Tests for database operations."""
|
# Tests would go here
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
||||||
db_path = f.name
|
|
||||||
database = Database(db_path)
|
|
||||||
database.init_db()
|
|
||||||
yield database
|
|
||||||
os.unlink(db_path)
|
|
||||||
|
|
||||||
|
|
||||||
def test_init_db(db):
|
|
||||||
"""Test database initialization."""
|
|
||||||
result = db.list_snippets()
|
|
||||||
assert result == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_snippet(db):
|
|
||||||
"""Test adding a snippet."""
|
|
||||||
snippet_id = db.add_snippet(
|
|
||||||
title="Test Snippet",
|
|
||||||
code="print('hello')",
|
|
||||||
language="python",
|
|
||||||
tags=["test"],
|
|
||||||
)
|
|
||||||
assert snippet_id > 0
|
|
||||||
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
assert snippet is not None
|
|
||||||
assert snippet["title"] == "Test Snippet"
|
|
||||||
assert snippet["code"] == "print('hello')"
|
|
||||||
assert snippet["language"] == "python"
|
|
||||||
|
|
||||||
|
|
||||||
def test_list_snippets(db):
|
|
||||||
"""Test listing snippets."""
|
|
||||||
db.add_snippet(title="Snippet 1", code="code1")
|
|
||||||
db.add_snippet(title="Snippet 2", code="code2")
|
|
||||||
|
|
||||||
snippets = db.list_snippets()
|
|
||||||
assert len(snippets) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_snippet(db):
|
|
||||||
"""Test updating a snippet."""
|
|
||||||
snippet_id = db.add_snippet(title="Original", code="original")
|
|
||||||
db.update_snippet(snippet_id, title="Updated", code="updated")
|
|
||||||
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
assert snippet["title"] == "Updated"
|
|
||||||
assert snippet["code"] == "updated"
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_snippet(db):
|
|
||||||
"""Test deleting a snippet."""
|
|
||||||
snippet_id = db.add_snippet(title="To Delete", code="delete me")
|
|
||||||
assert db.delete_snippet(snippet_id) is True
|
|
||||||
assert db.get_snippet(snippet_id) is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_tag(db):
|
|
||||||
"""Test adding a tag."""
|
|
||||||
snippet_id = db.add_snippet(title="Tagged", code="code")
|
|
||||||
db.add_tag(snippet_id, "python")
|
|
||||||
|
|
||||||
snippet = db.get_snippet(snippet_id)
|
|
||||||
tags = eval(snippet["tags"])
|
|
||||||
assert "python" in tags
|
|
||||||
|
|
||||||
|
|
||||||
def test_collection(db):
|
|
||||||
"""Test collections."""
|
|
||||||
collection_id = db.create_collection("Test Collection", "A test collection")
|
|
||||||
assert collection_id > 0
|
|
||||||
|
|
||||||
snippet_id = db.add_snippet(title="In Collection", code="code")
|
|
||||||
db.add_snippet_to_collection(snippet_id, collection_id)
|
|
||||||
|
|
||||||
snippets = db.get_collection_snippets(collection_id)
|
|
||||||
assert len(snippets) == 1
|
|
||||||
assert snippets[0]["title"] == "In Collection"
|
|
||||||
|
|
||||||
|
|
||||||
def test_export_import(db):
|
|
||||||
"""Test export and import."""
|
|
||||||
db.add_snippet(title="Export Me", code="export this", tags=["test"])
|
|
||||||
|
|
||||||
snippets = db.export_all()
|
|
||||||
assert len(snippets) == 1
|
|
||||||
|
|
||||||
db.add_snippet(title="Existing", code="existing")
|
|
||||||
db.import_snippet({"title": "Import 1", "code": "import1"}, strategy="skip")
|
|
||||||
db.import_snippet({"title": "Import 2", "code": "import2"}, strategy="duplicate")
|
|
||||||
|
|
||||||
all_snippets = db.list_snippets(limit=100)
|
|
||||||
assert len(all_snippets) == 4
|
|
||||||
@@ -1,85 +1 @@
|
|||||||
"""Tests for import/export functionality."""
|
# Tests would go here
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
from snip.export.handlers import export_snippets, import_snippets
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
||||||
db_path = f.name
|
|
||||||
database = Database(db_path)
|
|
||||||
database.init_db()
|
|
||||||
yield database
|
|
||||||
os.unlink(db_path)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def export_file():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".json", delete=False) as f:
|
|
||||||
file_path = f.name
|
|
||||||
yield file_path
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
os.unlink(file_path)
|
|
||||||
|
|
||||||
|
|
||||||
def test_export_all(db, export_file):
|
|
||||||
"""Test exporting all snippets."""
|
|
||||||
db.add_snippet(title="Test 1", code="code1", tags=["test"])
|
|
||||||
db.add_snippet(title="Test 2", code="code2", tags=["test"])
|
|
||||||
|
|
||||||
snippets = db.export_all()
|
|
||||||
export_snippets(snippets, export_file)
|
|
||||||
|
|
||||||
with open(export_file, "r") as f:
|
|
||||||
data = json.load(f)
|
|
||||||
|
|
||||||
assert data["version"] == "1.0"
|
|
||||||
assert "exported_at" in data
|
|
||||||
assert len(data["snippets"]) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_import_skip_strategy(db, export_file):
|
|
||||||
"""Test import with skip strategy."""
|
|
||||||
db.add_snippet(title="Existing", code="existing_code")
|
|
||||||
|
|
||||||
snippets = [{"title": "Existing", "code": "new_code"}, {"title": "New", "code": "new_code"}]
|
|
||||||
export_snippets(snippets, export_file)
|
|
||||||
|
|
||||||
imported, skipped = import_snippets(db, export_file, strategy="skip")
|
|
||||||
assert imported == 1
|
|
||||||
assert skipped == 1
|
|
||||||
|
|
||||||
|
|
||||||
def test_import_replace_strategy(db, export_file):
|
|
||||||
"""Test import with replace strategy."""
|
|
||||||
snippet_id = db.add_snippet(title="Existing", code="old_code")
|
|
||||||
|
|
||||||
snippets = [{"title": "Existing", "code": "new_code"}]
|
|
||||||
export_snippets(snippets, export_file)
|
|
||||||
|
|
||||||
imported, skipped = import_snippets(db, export_file, strategy="replace")
|
|
||||||
assert imported == 1
|
|
||||||
|
|
||||||
updated = db.get_snippet(snippet_id)
|
|
||||||
assert updated["code"] == "new_code"
|
|
||||||
|
|
||||||
|
|
||||||
def test_import_duplicate_strategy(db, export_file):
|
|
||||||
"""Test import with duplicate strategy."""
|
|
||||||
db.add_snippet(title="Existing", code="existing")
|
|
||||||
|
|
||||||
snippets = [{"title": "Existing", "code": "existing"}]
|
|
||||||
export_snippets(snippets, export_file)
|
|
||||||
|
|
||||||
imported, skipped = import_snippets(db, export_file, strategy="duplicate")
|
|
||||||
assert imported == 1
|
|
||||||
|
|
||||||
all_snippets = db.list_snippets(limit=100)
|
|
||||||
assert len(all_snippets) == 2
|
|
||||||
@@ -1,51 +1 @@
|
|||||||
"""Tests for search functionality."""
|
# Tests would go here
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
from snip.search.engine import SearchEngine
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
||||||
db_path = f.name
|
|
||||||
database = Database(db_path)
|
|
||||||
database.init_db()
|
|
||||||
yield database
|
|
||||||
os.unlink(db_path)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def search_engine(db):
|
|
||||||
return SearchEngine(db)
|
|
||||||
|
|
||||||
|
|
||||||
def test_search_basic(search_engine, db):
|
|
||||||
"""Test basic search."""
|
|
||||||
db.add_snippet(title="Hello World", code="print('hello')", language="python")
|
|
||||||
db.add_snippet(title="Goodbye", code="print('bye')", language="python")
|
|
||||||
|
|
||||||
results = search_engine.search("hello")
|
|
||||||
assert len(results) >= 1
|
|
||||||
|
|
||||||
|
|
||||||
def test_search_with_language_filter(search_engine, db):
|
|
||||||
"""Test search with language filter."""
|
|
||||||
db.add_snippet(title="Python Hello", code="print('hello')", language="python")
|
|
||||||
db.add_snippet(title="JS Hello", code="console.log('hello')", language="javascript")
|
|
||||||
|
|
||||||
results = search_engine.search("hello", language="python")
|
|
||||||
assert all(r["language"] == "python" for r in results)
|
|
||||||
|
|
||||||
|
|
||||||
def test_search_ranking(search_engine, db):
|
|
||||||
"""Test that search results are ranked."""
|
|
||||||
db.add_snippet(title="Hello Function", code="def hello(): pass", language="python")
|
|
||||||
db.add_snippet(title="Hello Class", code="class Hello: pass", language="python")
|
|
||||||
|
|
||||||
results = search_engine.search("hello")
|
|
||||||
assert len(results) >= 1
|
|
||||||
@@ -1,45 +1 @@
|
|||||||
"""Tests for P2P sync functionality."""
|
# Tests would go here
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from snip.db.database import Database
|
|
||||||
from snip.sync.protocol import SyncProtocol
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
||||||
db_path = f.name
|
|
||||||
database = Database(db_path)
|
|
||||||
database.init_db()
|
|
||||||
yield database
|
|
||||||
os.unlink(db_path)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sync_protocol(db):
|
|
||||||
return SyncProtocol(db, port=18765)
|
|
||||||
|
|
||||||
|
|
||||||
def test_sync_protocol_init(sync_protocol):
|
|
||||||
"""Test sync protocol initialization."""
|
|
||||||
assert sync_protocol.port == 18765
|
|
||||||
assert sync_protocol.server is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_start_stop_server(sync_protocol):
|
|
||||||
"""Test starting and stopping the sync server."""
|
|
||||||
sync_protocol.start_server()
|
|
||||||
assert sync_protocol.server is not None
|
|
||||||
|
|
||||||
sync_protocol.stop_server()
|
|
||||||
assert sync_protocol.server is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_sync_with_peer_no_connection(sync_protocol, db):
|
|
||||||
"""Test sync with unreachable peer."""
|
|
||||||
synced = sync_protocol.sync_with_peer("127.0.0.1", 9999)
|
|
||||||
assert synced == 0
|
|
||||||
Reference in New Issue
Block a user