Initial commit
This commit is contained in:
1
skills/databases/scripts/tests/coverage-db.json
Normal file
1
skills/databases/scripts/tests/coverage-db.json
Normal file
File diff suppressed because one or more lines are too long
4
skills/databases/scripts/tests/requirements.txt
Normal file
4
skills/databases/scripts/tests/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
pytest>=7.0.0
|
||||
pytest-cov>=4.0.0
|
||||
pytest-mock>=3.10.0
|
||||
mongomock>=4.1.0
|
||||
340
skills/databases/scripts/tests/test_db_backup.py
Normal file
340
skills/databases/scripts/tests/test_db_backup.py
Normal file
@@ -0,0 +1,340 @@
|
||||
"""Tests for db_backup.py"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch, MagicMock, call
|
||||
|
||||
import pytest
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from db_backup import BackupInfo, BackupManager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_backup_dir(tmp_path):
|
||||
"""Create temporary backup directory."""
|
||||
backup_dir = tmp_path / "backups"
|
||||
backup_dir.mkdir()
|
||||
return str(backup_dir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_backup_info():
|
||||
"""Create sample backup info."""
|
||||
return BackupInfo(
|
||||
filename="test_backup_20250101_120000.dump",
|
||||
database_type="mongodb",
|
||||
database_name="testdb",
|
||||
timestamp=datetime.now(),
|
||||
size_bytes=1024000,
|
||||
compressed=True,
|
||||
verified=True
|
||||
)
|
||||
|
||||
|
||||
class TestBackupInfo:
|
||||
"""Test BackupInfo dataclass."""
|
||||
|
||||
def test_backup_info_creation(self):
|
||||
"""Test creating backup info object."""
|
||||
info = BackupInfo(
|
||||
filename="backup.dump",
|
||||
database_type="mongodb",
|
||||
database_name="mydb",
|
||||
timestamp=datetime.now(),
|
||||
size_bytes=1024,
|
||||
compressed=False
|
||||
)
|
||||
|
||||
assert info.filename == "backup.dump"
|
||||
assert info.database_type == "mongodb"
|
||||
assert info.database_name == "mydb"
|
||||
assert info.size_bytes == 1024
|
||||
assert not info.compressed
|
||||
assert not info.verified
|
||||
|
||||
|
||||
class TestBackupManager:
|
||||
"""Test BackupManager class."""
|
||||
|
||||
def test_init(self, temp_backup_dir):
|
||||
"""Test manager initialization."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
assert manager.db_type == "mongodb"
|
||||
assert Path(temp_backup_dir).exists()
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_backup_mongodb(self, mock_run, temp_backup_dir):
|
||||
"""Test MongoDB backup creation."""
|
||||
mock_run.return_value = Mock(returncode=0, stderr="")
|
||||
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
backup_info = manager.create_backup(
|
||||
"mongodb://localhost",
|
||||
"testdb",
|
||||
compress=False,
|
||||
verify=False
|
||||
)
|
||||
|
||||
assert backup_info is not None
|
||||
assert backup_info.database_type == "mongodb"
|
||||
assert backup_info.database_name == "testdb"
|
||||
mock_run.assert_called_once()
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_backup_postgres(self, mock_run, temp_backup_dir):
|
||||
"""Test PostgreSQL backup creation."""
|
||||
mock_run.return_value = Mock(returncode=0, stderr="")
|
||||
|
||||
manager = BackupManager("postgres", temp_backup_dir)
|
||||
|
||||
with patch('builtins.open', create=True) as mock_open:
|
||||
mock_open.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
backup_info = manager.create_backup(
|
||||
"postgresql://localhost/testdb",
|
||||
"testdb",
|
||||
compress=False,
|
||||
verify=False
|
||||
)
|
||||
|
||||
assert backup_info is not None
|
||||
assert backup_info.database_type == "postgres"
|
||||
assert backup_info.database_name == "testdb"
|
||||
|
||||
def test_backup_postgres_no_database(self, temp_backup_dir):
|
||||
"""Test PostgreSQL backup without database name."""
|
||||
manager = BackupManager("postgres", temp_backup_dir)
|
||||
backup_info = manager.create_backup(
|
||||
"postgresql://localhost",
|
||||
database=None,
|
||||
compress=False,
|
||||
verify=False
|
||||
)
|
||||
|
||||
assert backup_info is None
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_backup_with_compression(self, mock_run, temp_backup_dir):
|
||||
"""Test backup with compression."""
|
||||
mock_run.return_value = Mock(returncode=0, stderr="")
|
||||
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
with patch('shutil.make_archive') as mock_archive, \
|
||||
patch('shutil.rmtree') as mock_rmtree:
|
||||
|
||||
backup_info = manager.create_backup(
|
||||
"mongodb://localhost",
|
||||
"testdb",
|
||||
compress=True,
|
||||
verify=False
|
||||
)
|
||||
|
||||
assert backup_info is not None
|
||||
assert backup_info.compressed
|
||||
mock_archive.assert_called_once()
|
||||
|
||||
def test_save_and_load_metadata(self, temp_backup_dir, sample_backup_info):
|
||||
"""Test saving and loading backup metadata."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Save metadata
|
||||
manager._save_metadata(sample_backup_info)
|
||||
|
||||
# Check file was created
|
||||
metadata_file = Path(temp_backup_dir) / f"{sample_backup_info.filename}.json"
|
||||
assert metadata_file.exists()
|
||||
|
||||
# Load metadata
|
||||
with open(metadata_file) as f:
|
||||
data = json.load(f)
|
||||
assert data["filename"] == sample_backup_info.filename
|
||||
assert data["database_type"] == "mongodb"
|
||||
assert data["database_name"] == "testdb"
|
||||
|
||||
def test_list_backups(self, temp_backup_dir, sample_backup_info):
|
||||
"""Test listing backups."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create test backup metadata
|
||||
manager._save_metadata(sample_backup_info)
|
||||
|
||||
# List backups
|
||||
backups = manager.list_backups()
|
||||
|
||||
assert len(backups) == 1
|
||||
assert backups[0].filename == sample_backup_info.filename
|
||||
assert backups[0].database_name == "testdb"
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_restore_mongodb(self, mock_run, temp_backup_dir):
|
||||
"""Test MongoDB restore."""
|
||||
mock_run.return_value = Mock(returncode=0, stderr="")
|
||||
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create dummy backup file
|
||||
backup_file = Path(temp_backup_dir) / "test_backup.dump"
|
||||
backup_file.touch()
|
||||
|
||||
result = manager.restore_backup(
|
||||
"test_backup.dump",
|
||||
"mongodb://localhost"
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_run.assert_called_once()
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_restore_postgres(self, mock_run, temp_backup_dir):
|
||||
"""Test PostgreSQL restore."""
|
||||
mock_run.return_value = Mock(returncode=0, stderr="")
|
||||
|
||||
manager = BackupManager("postgres", temp_backup_dir)
|
||||
|
||||
# Create dummy backup file
|
||||
backup_file = Path(temp_backup_dir) / "test_backup.sql"
|
||||
backup_file.write_text("SELECT 1;")
|
||||
|
||||
with patch('builtins.open', create=True) as mock_open:
|
||||
mock_open.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
result = manager.restore_backup(
|
||||
"test_backup.sql",
|
||||
"postgresql://localhost/testdb"
|
||||
)
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_restore_nonexistent_backup(self, temp_backup_dir):
|
||||
"""Test restore with non-existent backup file."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
result = manager.restore_backup(
|
||||
"nonexistent.dump",
|
||||
"mongodb://localhost"
|
||||
)
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_restore_dry_run(self, temp_backup_dir):
|
||||
"""Test restore in dry-run mode."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create dummy backup file
|
||||
backup_file = Path(temp_backup_dir) / "test_backup.dump"
|
||||
backup_file.touch()
|
||||
|
||||
result = manager.restore_backup(
|
||||
"test_backup.dump",
|
||||
"mongodb://localhost",
|
||||
dry_run=True
|
||||
)
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_cleanup_old_backups(self, temp_backup_dir):
|
||||
"""Test cleaning up old backups."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create old backup file (simulate by setting mtime)
|
||||
old_backup = Path(temp_backup_dir) / "old_backup.dump"
|
||||
old_backup.touch()
|
||||
|
||||
# Set mtime to 10 days ago
|
||||
old_time = datetime.now().timestamp() - (10 * 24 * 3600)
|
||||
os.utime(old_backup, (old_time, old_time))
|
||||
|
||||
# Cleanup with 7-day retention
|
||||
removed = manager.cleanup_old_backups(retention_days=7)
|
||||
|
||||
assert removed == 1
|
||||
assert not old_backup.exists()
|
||||
|
||||
def test_cleanup_dry_run(self, temp_backup_dir):
|
||||
"""Test cleanup in dry-run mode."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create old backup file
|
||||
old_backup = Path(temp_backup_dir) / "old_backup.dump"
|
||||
old_backup.touch()
|
||||
|
||||
old_time = datetime.now().timestamp() - (10 * 24 * 3600)
|
||||
os.utime(old_backup, (old_time, old_time))
|
||||
|
||||
# Cleanup with dry-run
|
||||
removed = manager.cleanup_old_backups(retention_days=7, dry_run=True)
|
||||
|
||||
assert removed == 1
|
||||
assert old_backup.exists() # File should still exist
|
||||
|
||||
def test_verify_backup(self, temp_backup_dir, sample_backup_info):
|
||||
"""Test backup verification."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create dummy backup file
|
||||
backup_file = Path(temp_backup_dir) / sample_backup_info.filename
|
||||
backup_file.write_text("backup data")
|
||||
|
||||
result = manager._verify_backup(sample_backup_info)
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_verify_empty_backup(self, temp_backup_dir, sample_backup_info):
|
||||
"""Test verification of empty backup file."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
# Create empty backup file
|
||||
backup_file = Path(temp_backup_dir) / sample_backup_info.filename
|
||||
backup_file.touch()
|
||||
|
||||
result = manager._verify_backup(sample_backup_info)
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_format_size(self, temp_backup_dir):
|
||||
"""Test size formatting."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
assert manager._format_size(500) == "500.00 B"
|
||||
assert manager._format_size(1024) == "1.00 KB"
|
||||
assert manager._format_size(1024 * 1024) == "1.00 MB"
|
||||
assert manager._format_size(1024 * 1024 * 1024) == "1.00 GB"
|
||||
|
||||
def test_get_size_file(self, temp_backup_dir):
|
||||
"""Test getting size of file."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
test_file = Path(temp_backup_dir) / "test.txt"
|
||||
test_file.write_text("test data")
|
||||
|
||||
size = manager._get_size(test_file)
|
||||
|
||||
assert size > 0
|
||||
|
||||
def test_get_size_directory(self, temp_backup_dir):
|
||||
"""Test getting size of directory."""
|
||||
manager = BackupManager("mongodb", temp_backup_dir)
|
||||
|
||||
test_dir = Path(temp_backup_dir) / "test_dir"
|
||||
test_dir.mkdir()
|
||||
(test_dir / "file1.txt").write_text("data1")
|
||||
(test_dir / "file2.txt").write_text("data2")
|
||||
|
||||
size = manager._get_size(test_dir)
|
||||
|
||||
assert size > 0
|
||||
|
||||
|
||||
# Import os for cleanup test
|
||||
import os
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
277
skills/databases/scripts/tests/test_db_migrate.py
Normal file
277
skills/databases/scripts/tests/test_db_migrate.py
Normal file
@@ -0,0 +1,277 @@
|
||||
"""Tests for db_migrate.py"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from db_migrate import Migration, MigrationManager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_migrations_dir(tmp_path):
|
||||
"""Create temporary migrations directory."""
|
||||
migrations_dir = tmp_path / "migrations"
|
||||
migrations_dir.mkdir()
|
||||
return str(migrations_dir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_mongo_client():
|
||||
"""Mock MongoDB client."""
|
||||
mock_client = MagicMock()
|
||||
mock_db = MagicMock()
|
||||
mock_client.get_default_database.return_value = mock_db
|
||||
mock_client.server_info.return_value = {}
|
||||
return mock_client, mock_db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_postgres_conn():
|
||||
"""Mock PostgreSQL connection."""
|
||||
mock_conn = MagicMock()
|
||||
mock_cursor = MagicMock()
|
||||
mock_conn.cursor.return_value.__enter__.return_value = mock_cursor
|
||||
return mock_conn, mock_cursor
|
||||
|
||||
|
||||
class TestMigration:
|
||||
"""Test Migration dataclass."""
|
||||
|
||||
def test_migration_creation(self):
|
||||
"""Test creating migration object."""
|
||||
migration = Migration(
|
||||
id="20250101120000",
|
||||
name="test_migration",
|
||||
timestamp=datetime.now(),
|
||||
database_type="mongodb"
|
||||
)
|
||||
|
||||
assert migration.id == "20250101120000"
|
||||
assert migration.name == "test_migration"
|
||||
assert migration.database_type == "mongodb"
|
||||
assert not migration.applied
|
||||
|
||||
|
||||
class TestMigrationManager:
|
||||
"""Test MigrationManager class."""
|
||||
|
||||
def test_init(self, temp_migrations_dir):
|
||||
"""Test manager initialization."""
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
|
||||
assert manager.db_type == "mongodb"
|
||||
assert manager.connection_string == "mongodb://localhost"
|
||||
assert Path(temp_migrations_dir).exists()
|
||||
|
||||
@patch('db_migrate.MongoClient')
|
||||
def test_connect_mongodb(self, mock_client_class, temp_migrations_dir, mock_mongo_client):
|
||||
"""Test MongoDB connection."""
|
||||
mock_client, mock_db = mock_mongo_client
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
result = manager.connect()
|
||||
|
||||
assert result is True
|
||||
assert manager.client == mock_client
|
||||
assert manager.db == mock_db
|
||||
|
||||
@patch('db_migrate.psycopg2')
|
||||
def test_connect_postgres(self, mock_psycopg2, temp_migrations_dir, mock_postgres_conn):
|
||||
"""Test PostgreSQL connection."""
|
||||
mock_conn, mock_cursor = mock_postgres_conn
|
||||
mock_psycopg2.connect.return_value = mock_conn
|
||||
|
||||
manager = MigrationManager("postgres", "postgresql://localhost", temp_migrations_dir)
|
||||
result = manager.connect()
|
||||
|
||||
assert result is True
|
||||
assert manager.conn == mock_conn
|
||||
|
||||
def test_connect_unsupported_db(self, temp_migrations_dir):
|
||||
"""Test connection with unsupported database type."""
|
||||
manager = MigrationManager("unsupported", "connection_string", temp_migrations_dir)
|
||||
result = manager.connect()
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_generate_migration(self, temp_migrations_dir):
|
||||
"""Test migration generation."""
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
migration = manager.generate_migration("test_migration")
|
||||
|
||||
assert migration is not None
|
||||
assert migration.name == "test_migration"
|
||||
|
||||
# Check file was created
|
||||
migration_files = list(Path(temp_migrations_dir).glob("*.json"))
|
||||
assert len(migration_files) == 1
|
||||
|
||||
# Check file content
|
||||
with open(migration_files[0]) as f:
|
||||
data = json.load(f)
|
||||
assert data["name"] == "test_migration"
|
||||
assert data["database_type"] == "mongodb"
|
||||
|
||||
def test_generate_migration_dry_run(self, temp_migrations_dir):
|
||||
"""Test migration generation in dry-run mode."""
|
||||
manager = MigrationManager("postgres", "postgresql://localhost", temp_migrations_dir)
|
||||
migration = manager.generate_migration("test_migration", dry_run=True)
|
||||
|
||||
assert migration is not None
|
||||
|
||||
# Check no file was created
|
||||
migration_files = list(Path(temp_migrations_dir).glob("*.json"))
|
||||
assert len(migration_files) == 0
|
||||
|
||||
def test_get_pending_migrations(self, temp_migrations_dir):
|
||||
"""Test getting pending migrations."""
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
|
||||
# Create test migration file
|
||||
migration_data = {
|
||||
"id": "20250101120000",
|
||||
"name": "test_migration",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"database_type": "mongodb",
|
||||
"mongodb_operations": []
|
||||
}
|
||||
|
||||
migration_file = Path(temp_migrations_dir) / "20250101120000_test.json"
|
||||
with open(migration_file, "w") as f:
|
||||
json.dump(migration_data, f)
|
||||
|
||||
# Mock database connection
|
||||
with patch.object(manager, 'db', MagicMock()):
|
||||
manager.db.migrations.find.return_value = []
|
||||
|
||||
pending = manager.get_pending_migrations()
|
||||
|
||||
assert len(pending) == 1
|
||||
assert pending[0].id == "20250101120000"
|
||||
assert pending[0].name == "test_migration"
|
||||
|
||||
@patch('db_migrate.MongoClient')
|
||||
def test_apply_mongodb_migration(self, mock_client_class, temp_migrations_dir, mock_mongo_client):
|
||||
"""Test applying MongoDB migration."""
|
||||
mock_client, mock_db = mock_mongo_client
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
manager.connect()
|
||||
|
||||
migration = Migration(
|
||||
id="20250101120000",
|
||||
name="test_migration",
|
||||
timestamp=datetime.now(),
|
||||
database_type="mongodb",
|
||||
mongodb_operations=[
|
||||
{
|
||||
"operation": "createIndex",
|
||||
"collection": "users",
|
||||
"index": {"email": 1},
|
||||
"options": {}
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
result = manager.apply_migration(migration)
|
||||
|
||||
assert result is True
|
||||
mock_db["users"].create_index.assert_called_once()
|
||||
mock_db.migrations.insert_one.assert_called_once()
|
||||
|
||||
def test_apply_migration_dry_run(self, temp_migrations_dir):
|
||||
"""Test applying migration in dry-run mode."""
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
|
||||
migration = Migration(
|
||||
id="20250101120000",
|
||||
name="test_migration",
|
||||
timestamp=datetime.now(),
|
||||
database_type="mongodb",
|
||||
mongodb_operations=[]
|
||||
)
|
||||
|
||||
result = manager.apply_migration(migration, dry_run=True)
|
||||
|
||||
assert result is True
|
||||
|
||||
@patch('db_migrate.psycopg2')
|
||||
def test_rollback_postgres_migration(self, mock_psycopg2, temp_migrations_dir, mock_postgres_conn):
|
||||
"""Test rolling back PostgreSQL migration."""
|
||||
mock_conn, mock_cursor = mock_postgres_conn
|
||||
mock_psycopg2.connect.return_value = mock_conn
|
||||
|
||||
manager = MigrationManager("postgres", "postgresql://localhost", temp_migrations_dir)
|
||||
manager.connect()
|
||||
|
||||
# Create migration file
|
||||
migration_data = {
|
||||
"id": "20250101120000",
|
||||
"name": "test_migration",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"database_type": "postgres",
|
||||
"up_sql": "CREATE TABLE test (id INT);",
|
||||
"down_sql": "DROP TABLE test;"
|
||||
}
|
||||
|
||||
migration_file = Path(temp_migrations_dir) / "20250101120000_test.json"
|
||||
with open(migration_file, "w") as f:
|
||||
json.dump(migration_data, f)
|
||||
|
||||
result = manager.rollback_migration("20250101120000")
|
||||
|
||||
assert result is True
|
||||
# Verify SQL was executed
|
||||
assert mock_cursor.execute.call_count >= 1
|
||||
|
||||
def test_rollback_migration_not_found(self, temp_migrations_dir):
|
||||
"""Test rollback with non-existent migration."""
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
|
||||
result = manager.rollback_migration("99999999999999")
|
||||
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_migration_sorting(temp_migrations_dir):
|
||||
"""Test that migrations are applied in correct order."""
|
||||
manager = MigrationManager("mongodb", "mongodb://localhost", temp_migrations_dir)
|
||||
|
||||
# Create multiple migration files
|
||||
for i in range(3):
|
||||
migration_data = {
|
||||
"id": f"2025010112000{i}",
|
||||
"name": f"migration_{i}",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"database_type": "mongodb",
|
||||
"mongodb_operations": []
|
||||
}
|
||||
|
||||
migration_file = Path(temp_migrations_dir) / f"2025010112000{i}_test.json"
|
||||
with open(migration_file, "w") as f:
|
||||
json.dump(migration_data, f)
|
||||
|
||||
with patch.object(manager, 'db', MagicMock()):
|
||||
manager.db.migrations.find.return_value = []
|
||||
|
||||
pending = manager.get_pending_migrations()
|
||||
|
||||
# Check they're in order
|
||||
assert len(pending) == 3
|
||||
assert pending[0].id == "20250101120000"
|
||||
assert pending[1].id == "20250101120001"
|
||||
assert pending[2].id == "20250101120002"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
370
skills/databases/scripts/tests/test_db_performance_check.py
Normal file
370
skills/databases/scripts/tests/test_db_performance_check.py
Normal file
@@ -0,0 +1,370 @@
|
||||
"""Tests for db_performance_check.py"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from db_performance_check import (
|
||||
SlowQuery, IndexRecommendation, PerformanceReport, PerformanceAnalyzer
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_mongo_client():
|
||||
"""Mock MongoDB client."""
|
||||
mock_client = MagicMock()
|
||||
mock_db = MagicMock()
|
||||
mock_client.get_default_database.return_value = mock_db
|
||||
mock_client.server_info.return_value = {}
|
||||
return mock_client, mock_db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_postgres_conn():
|
||||
"""Mock PostgreSQL connection."""
|
||||
mock_conn = MagicMock()
|
||||
mock_cursor = MagicMock()
|
||||
mock_conn.cursor.return_value.__enter__.return_value = mock_cursor
|
||||
return mock_conn, mock_cursor
|
||||
|
||||
|
||||
class TestSlowQuery:
|
||||
"""Test SlowQuery dataclass."""
|
||||
|
||||
def test_slow_query_creation(self):
|
||||
"""Test creating slow query object."""
|
||||
query = SlowQuery(
|
||||
query="SELECT * FROM users",
|
||||
execution_time_ms=150.5,
|
||||
count=10
|
||||
)
|
||||
|
||||
assert query.query == "SELECT * FROM users"
|
||||
assert query.execution_time_ms == 150.5
|
||||
assert query.count == 10
|
||||
|
||||
|
||||
class TestIndexRecommendation:
|
||||
"""Test IndexRecommendation dataclass."""
|
||||
|
||||
def test_recommendation_creation(self):
|
||||
"""Test creating index recommendation."""
|
||||
rec = IndexRecommendation(
|
||||
collection_or_table="users",
|
||||
fields=["email"],
|
||||
reason="Frequently queried field",
|
||||
estimated_benefit="High"
|
||||
)
|
||||
|
||||
assert rec.collection_or_table == "users"
|
||||
assert rec.fields == ["email"]
|
||||
assert rec.reason == "Frequently queried field"
|
||||
assert rec.estimated_benefit == "High"
|
||||
|
||||
|
||||
class TestPerformanceReport:
|
||||
"""Test PerformanceReport dataclass."""
|
||||
|
||||
def test_report_creation(self):
|
||||
"""Test creating performance report."""
|
||||
report = PerformanceReport(
|
||||
database_type="mongodb",
|
||||
database_name="testdb",
|
||||
timestamp=datetime.now(),
|
||||
slow_queries=[],
|
||||
index_recommendations=[],
|
||||
database_metrics={}
|
||||
)
|
||||
|
||||
assert report.database_type == "mongodb"
|
||||
assert report.database_name == "testdb"
|
||||
assert isinstance(report.slow_queries, list)
|
||||
assert isinstance(report.index_recommendations, list)
|
||||
assert isinstance(report.database_metrics, dict)
|
||||
|
||||
|
||||
class TestPerformanceAnalyzer:
|
||||
"""Test PerformanceAnalyzer class."""
|
||||
|
||||
def test_init(self):
|
||||
"""Test analyzer initialization."""
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost", 100)
|
||||
|
||||
assert analyzer.db_type == "mongodb"
|
||||
assert analyzer.connection_string == "mongodb://localhost"
|
||||
assert analyzer.threshold_ms == 100
|
||||
|
||||
@patch('db_performance_check.MongoClient')
|
||||
def test_connect_mongodb(self, mock_client_class, mock_mongo_client):
|
||||
"""Test MongoDB connection."""
|
||||
mock_client, mock_db = mock_mongo_client
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost")
|
||||
result = analyzer.connect()
|
||||
|
||||
assert result is True
|
||||
assert analyzer.client == mock_client
|
||||
assert analyzer.db == mock_db
|
||||
|
||||
@patch('db_performance_check.psycopg2')
|
||||
def test_connect_postgres(self, mock_psycopg2, mock_postgres_conn):
|
||||
"""Test PostgreSQL connection."""
|
||||
mock_conn, mock_cursor = mock_postgres_conn
|
||||
mock_psycopg2.connect.return_value = mock_conn
|
||||
|
||||
analyzer = PerformanceAnalyzer("postgres", "postgresql://localhost")
|
||||
result = analyzer.connect()
|
||||
|
||||
assert result is True
|
||||
assert analyzer.conn == mock_conn
|
||||
|
||||
def test_connect_unsupported_db(self):
|
||||
"""Test connection with unsupported database type."""
|
||||
analyzer = PerformanceAnalyzer("unsupported", "connection_string")
|
||||
result = analyzer.connect()
|
||||
|
||||
assert result is False
|
||||
|
||||
@patch('db_performance_check.MongoClient')
|
||||
def test_analyze_mongodb(self, mock_client_class, mock_mongo_client):
|
||||
"""Test MongoDB performance analysis."""
|
||||
mock_client, mock_db = mock_mongo_client
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
# Mock profiling
|
||||
mock_db.command.side_effect = [
|
||||
{"was": 0}, # profile -1 (get status)
|
||||
{}, # profile 1 (enable)
|
||||
]
|
||||
|
||||
# Mock slow queries
|
||||
mock_profile_cursor = MagicMock()
|
||||
mock_profile_cursor.sort.return_value = [
|
||||
{
|
||||
"command": {"find": "users"},
|
||||
"millis": 150,
|
||||
"ns": "testdb.users",
|
||||
"planSummary": "COLLSCAN"
|
||||
}
|
||||
]
|
||||
mock_db.system.profile.find.return_value = mock_profile_cursor
|
||||
|
||||
# Mock collections
|
||||
mock_db.list_collection_names.return_value = ["users", "orders"]
|
||||
|
||||
# Mock collection stats
|
||||
mock_coll = MagicMock()
|
||||
mock_coll.aggregate.return_value = [{"storageStats": {}}]
|
||||
mock_coll.list_indexes.return_value = [{"name": "_id_"}]
|
||||
mock_coll.find.return_value.limit.return_value = [
|
||||
{"_id": 1, "name": "Alice", "email": "alice@example.com"}
|
||||
]
|
||||
mock_db.__getitem__.return_value = mock_coll
|
||||
|
||||
# Mock server status and db stats
|
||||
mock_client.admin.command.return_value = {
|
||||
"connections": {"current": 10},
|
||||
"opcounters": {"query": 1000}
|
||||
}
|
||||
mock_db.command.return_value = {
|
||||
"dataSize": 1024 * 1024 * 100,
|
||||
"indexSize": 1024 * 1024 * 10,
|
||||
"collections": 5
|
||||
}
|
||||
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost")
|
||||
analyzer.connect()
|
||||
|
||||
report = analyzer.analyze()
|
||||
|
||||
assert report is not None
|
||||
assert report.database_type == "mongodb"
|
||||
assert isinstance(report.slow_queries, list)
|
||||
assert isinstance(report.index_recommendations, list)
|
||||
assert isinstance(report.database_metrics, dict)
|
||||
|
||||
@patch('db_performance_check.psycopg2')
|
||||
def test_analyze_postgres(self, mock_psycopg2, mock_postgres_conn):
|
||||
"""Test PostgreSQL performance analysis."""
|
||||
mock_conn, mock_cursor = mock_postgres_conn
|
||||
mock_psycopg2.connect.return_value = mock_conn
|
||||
|
||||
# Mock cursor results
|
||||
mock_cursor.fetchone.side_effect = [
|
||||
{"has_extension": True}, # pg_stat_statements check
|
||||
{"connections": 10, "commits": 1000, "rollbacks": 5}, # stats
|
||||
{"db_size": 1024 * 1024 * 500}, # database size
|
||||
{"cache_hit_ratio": 0.95} # cache hit ratio
|
||||
]
|
||||
|
||||
mock_cursor.fetchall.side_effect = [
|
||||
# Slow queries
|
||||
[
|
||||
{
|
||||
"query": "SELECT * FROM users",
|
||||
"mean_exec_time": 150.5,
|
||||
"calls": 100,
|
||||
"total_exec_time": 15050
|
||||
}
|
||||
],
|
||||
# Sequential scans
|
||||
[
|
||||
{
|
||||
"schemaname": "public",
|
||||
"tablename": "users",
|
||||
"seq_scan": 5000,
|
||||
"seq_tup_read": 500000,
|
||||
"idx_scan": 100
|
||||
}
|
||||
],
|
||||
# Unused indexes
|
||||
[]
|
||||
]
|
||||
|
||||
analyzer = PerformanceAnalyzer("postgres", "postgresql://localhost")
|
||||
analyzer.connect()
|
||||
|
||||
report = analyzer.analyze()
|
||||
|
||||
assert report is not None
|
||||
assert report.database_type == "postgres"
|
||||
assert len(report.slow_queries) > 0
|
||||
assert len(report.index_recommendations) > 0
|
||||
|
||||
def test_print_report(self, capsys):
|
||||
"""Test report printing."""
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost")
|
||||
|
||||
report = PerformanceReport(
|
||||
database_type="mongodb",
|
||||
database_name="testdb",
|
||||
timestamp=datetime.now(),
|
||||
slow_queries=[
|
||||
SlowQuery(
|
||||
query="db.users.find({age: {$gte: 18}})",
|
||||
execution_time_ms=150.5,
|
||||
count=10,
|
||||
collection_or_table="users"
|
||||
)
|
||||
],
|
||||
index_recommendations=[
|
||||
IndexRecommendation(
|
||||
collection_or_table="users",
|
||||
fields=["age"],
|
||||
reason="Frequently queried field",
|
||||
estimated_benefit="High"
|
||||
)
|
||||
],
|
||||
database_metrics={
|
||||
"connections": 10,
|
||||
"database_size_mb": 100.5
|
||||
}
|
||||
)
|
||||
|
||||
analyzer.print_report(report)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Database Performance Report" in captured.out
|
||||
assert "testdb" in captured.out
|
||||
assert "150.5ms" in captured.out
|
||||
assert "users" in captured.out
|
||||
|
||||
def test_save_report(self, tmp_path):
|
||||
"""Test saving report to JSON."""
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost")
|
||||
|
||||
report = PerformanceReport(
|
||||
database_type="mongodb",
|
||||
database_name="testdb",
|
||||
timestamp=datetime.now(),
|
||||
slow_queries=[],
|
||||
index_recommendations=[],
|
||||
database_metrics={}
|
||||
)
|
||||
|
||||
output_file = tmp_path / "report.json"
|
||||
analyzer.save_report(report, str(output_file))
|
||||
|
||||
assert output_file.exists()
|
||||
|
||||
with open(output_file) as f:
|
||||
data = json.load(f)
|
||||
assert data["database_type"] == "mongodb"
|
||||
assert data["database_name"] == "testdb"
|
||||
|
||||
def test_disconnect(self):
|
||||
"""Test disconnection."""
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost")
|
||||
|
||||
# Mock client and connection
|
||||
analyzer.client = MagicMock()
|
||||
analyzer.conn = MagicMock()
|
||||
|
||||
analyzer.disconnect()
|
||||
|
||||
analyzer.client.close.assert_called_once()
|
||||
analyzer.conn.close.assert_called_once()
|
||||
|
||||
@patch('db_performance_check.MongoClient')
|
||||
def test_analyze_error_handling(self, mock_client_class, mock_mongo_client):
|
||||
"""Test error handling during analysis."""
|
||||
mock_client, mock_db = mock_mongo_client
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
# Simulate error
|
||||
mock_db.command.side_effect = Exception("Database error")
|
||||
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost")
|
||||
analyzer.connect()
|
||||
|
||||
report = analyzer.analyze()
|
||||
|
||||
assert report is None
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests."""
|
||||
|
||||
@patch('db_performance_check.MongoClient')
|
||||
def test_full_mongodb_workflow(self, mock_client_class, mock_mongo_client, tmp_path):
|
||||
"""Test complete MongoDB analysis workflow."""
|
||||
mock_client, mock_db = mock_mongo_client
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
# Setup mocks
|
||||
mock_db.command.return_value = {"was": 0}
|
||||
mock_db.system.profile.find.return_value.sort.return_value = []
|
||||
mock_db.list_collection_names.return_value = []
|
||||
mock_client.admin.command.return_value = {
|
||||
"connections": {"current": 10},
|
||||
"opcounters": {"query": 1000}
|
||||
}
|
||||
|
||||
analyzer = PerformanceAnalyzer("mongodb", "mongodb://localhost", 100)
|
||||
|
||||
# Connect
|
||||
assert analyzer.connect() is True
|
||||
|
||||
# Analyze
|
||||
report = analyzer.analyze()
|
||||
assert report is not None
|
||||
|
||||
# Save report
|
||||
output_file = tmp_path / "report.json"
|
||||
analyzer.save_report(report, str(output_file))
|
||||
assert output_file.exists()
|
||||
|
||||
# Disconnect
|
||||
analyzer.disconnect()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
Reference in New Issue
Block a user