Initial commit
This commit is contained in:
3
skills/devops/scripts/tests/requirements.txt
Normal file
3
skills/devops/scripts/tests/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
pytest>=7.0.0
|
||||
pytest-cov>=4.0.0
|
||||
pytest-mock>=3.10.0
|
||||
285
skills/devops/scripts/tests/test_cloudflare_deploy.py
Normal file
285
skills/devops/scripts/tests/test_cloudflare_deploy.py
Normal file
@@ -0,0 +1,285 @@
|
||||
"""
|
||||
Tests for cloudflare-deploy.py
|
||||
|
||||
Run with: pytest test_cloudflare_deploy.py -v
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch, mock_open
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from cloudflare_deploy import CloudflareDeploy, CloudflareDeployError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_project(tmp_path):
|
||||
"""Create temporary project directory with wrangler.toml"""
|
||||
project_dir = tmp_path / "test-worker"
|
||||
project_dir.mkdir()
|
||||
|
||||
wrangler_toml = project_dir / "wrangler.toml"
|
||||
wrangler_toml.write_text('''
|
||||
name = "test-worker"
|
||||
main = "src/index.ts"
|
||||
compatibility_date = "2024-01-01"
|
||||
''')
|
||||
|
||||
return project_dir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def deployer(temp_project):
|
||||
"""Create CloudflareDeploy instance with temp project"""
|
||||
return CloudflareDeploy(
|
||||
project_dir=temp_project,
|
||||
env="staging",
|
||||
dry_run=False,
|
||||
verbose=False
|
||||
)
|
||||
|
||||
|
||||
class TestCloudflareDeployInit:
|
||||
"""Test CloudflareDeploy initialization"""
|
||||
|
||||
def test_init_with_defaults(self, temp_project):
|
||||
deployer = CloudflareDeploy(project_dir=temp_project)
|
||||
assert deployer.project_dir == temp_project.resolve()
|
||||
assert deployer.env is None
|
||||
assert deployer.dry_run is False
|
||||
assert deployer.verbose is False
|
||||
|
||||
def test_init_with_custom_params(self, temp_project):
|
||||
deployer = CloudflareDeploy(
|
||||
project_dir=temp_project,
|
||||
env="production",
|
||||
dry_run=True,
|
||||
verbose=True
|
||||
)
|
||||
assert deployer.env == "production"
|
||||
assert deployer.dry_run is True
|
||||
assert deployer.verbose is True
|
||||
|
||||
|
||||
class TestValidateProject:
|
||||
"""Test project validation"""
|
||||
|
||||
def test_validate_existing_project(self, deployer):
|
||||
assert deployer.validate_project() is True
|
||||
|
||||
def test_validate_nonexistent_project(self, tmp_path):
|
||||
deployer = CloudflareDeploy(project_dir=tmp_path / "nonexistent")
|
||||
with pytest.raises(CloudflareDeployError, match="does not exist"):
|
||||
deployer.validate_project()
|
||||
|
||||
def test_validate_missing_wrangler_toml(self, tmp_path):
|
||||
project_dir = tmp_path / "no-toml"
|
||||
project_dir.mkdir()
|
||||
deployer = CloudflareDeploy(project_dir=project_dir)
|
||||
|
||||
with pytest.raises(CloudflareDeployError, match="wrangler.toml not found"):
|
||||
deployer.validate_project()
|
||||
|
||||
|
||||
class TestCheckWranglerInstalled:
|
||||
"""Test wrangler CLI detection"""
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_wrangler_installed(self, mock_run, deployer):
|
||||
mock_run.return_value = Mock(
|
||||
returncode=0,
|
||||
stdout="wrangler 3.0.0",
|
||||
stderr=""
|
||||
)
|
||||
assert deployer.check_wrangler_installed() is True
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_wrangler_not_installed(self, mock_run, deployer):
|
||||
mock_run.side_effect = FileNotFoundError()
|
||||
assert deployer.check_wrangler_installed() is False
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_wrangler_command_fails(self, mock_run, deployer):
|
||||
mock_run.side_effect = subprocess.CalledProcessError(1, "wrangler")
|
||||
assert deployer.check_wrangler_installed() is False
|
||||
|
||||
|
||||
class TestGetWorkerName:
|
||||
"""Test worker name extraction"""
|
||||
|
||||
def test_get_worker_name_success(self, deployer):
|
||||
name = deployer.get_worker_name()
|
||||
assert name == "test-worker"
|
||||
|
||||
def test_get_worker_name_no_name(self, tmp_path):
|
||||
project_dir = tmp_path / "no-name"
|
||||
project_dir.mkdir()
|
||||
|
||||
wrangler_toml = project_dir / "wrangler.toml"
|
||||
wrangler_toml.write_text("main = 'index.ts'")
|
||||
|
||||
deployer = CloudflareDeploy(project_dir=project_dir)
|
||||
with pytest.raises(CloudflareDeployError, match="Worker name not found"):
|
||||
deployer.get_worker_name()
|
||||
|
||||
def test_get_worker_name_with_quotes(self, tmp_path):
|
||||
project_dir = tmp_path / "quoted"
|
||||
project_dir.mkdir()
|
||||
|
||||
wrangler_toml = project_dir / "wrangler.toml"
|
||||
wrangler_toml.write_text('name = "my-worker"\n')
|
||||
|
||||
deployer = CloudflareDeploy(project_dir=project_dir)
|
||||
assert deployer.get_worker_name() == "my-worker"
|
||||
|
||||
def test_get_worker_name_single_quotes(self, tmp_path):
|
||||
project_dir = tmp_path / "single-quotes"
|
||||
project_dir.mkdir()
|
||||
|
||||
wrangler_toml = project_dir / "wrangler.toml"
|
||||
wrangler_toml.write_text("name = 'my-worker'\n")
|
||||
|
||||
deployer = CloudflareDeploy(project_dir=project_dir)
|
||||
assert deployer.get_worker_name() == "my-worker"
|
||||
|
||||
|
||||
class TestBuildDeployCommand:
|
||||
"""Test deploy command construction"""
|
||||
|
||||
def test_basic_command(self, temp_project):
|
||||
deployer = CloudflareDeploy(project_dir=temp_project)
|
||||
cmd = deployer.build_deploy_command()
|
||||
assert cmd == ["wrangler", "deploy"]
|
||||
|
||||
def test_command_with_env(self, temp_project):
|
||||
deployer = CloudflareDeploy(project_dir=temp_project, env="production")
|
||||
cmd = deployer.build_deploy_command()
|
||||
assert cmd == ["wrangler", "deploy", "--env", "production"]
|
||||
|
||||
def test_command_with_dry_run(self, temp_project):
|
||||
deployer = CloudflareDeploy(project_dir=temp_project, dry_run=True)
|
||||
cmd = deployer.build_deploy_command()
|
||||
assert cmd == ["wrangler", "deploy", "--dry-run"]
|
||||
|
||||
def test_command_with_env_and_dry_run(self, temp_project):
|
||||
deployer = CloudflareDeploy(
|
||||
project_dir=temp_project,
|
||||
env="staging",
|
||||
dry_run=True
|
||||
)
|
||||
cmd = deployer.build_deploy_command()
|
||||
assert cmd == ["wrangler", "deploy", "--env", "staging", "--dry-run"]
|
||||
|
||||
|
||||
class TestRunCommand:
|
||||
"""Test command execution"""
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_run_command_success(self, mock_run, deployer):
|
||||
mock_run.return_value = Mock(
|
||||
returncode=0,
|
||||
stdout="Success",
|
||||
stderr=""
|
||||
)
|
||||
|
||||
exit_code, stdout, stderr = deployer.run_command(["echo", "test"])
|
||||
|
||||
assert exit_code == 0
|
||||
assert stdout == "Success"
|
||||
assert stderr == ""
|
||||
mock_run.assert_called_once()
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_run_command_failure_with_check(self, mock_run, deployer):
|
||||
mock_run.side_effect = subprocess.CalledProcessError(
|
||||
1, "cmd", stderr="Error"
|
||||
)
|
||||
|
||||
with pytest.raises(CloudflareDeployError, match="Command failed"):
|
||||
deployer.run_command(["false"], check=True)
|
||||
|
||||
@patch('subprocess.run')
|
||||
def test_run_command_failure_no_check(self, mock_run, deployer):
|
||||
mock_run.side_effect = subprocess.CalledProcessError(
|
||||
1, "cmd", output="", stderr="Error"
|
||||
)
|
||||
|
||||
exit_code, stdout, stderr = deployer.run_command(["false"], check=False)
|
||||
|
||||
assert exit_code == 1
|
||||
|
||||
|
||||
class TestDeploy:
|
||||
"""Test full deployment flow"""
|
||||
|
||||
@patch.object(CloudflareDeploy, 'check_wrangler_installed')
|
||||
@patch.object(CloudflareDeploy, 'run_command')
|
||||
def test_deploy_success(self, mock_run_cmd, mock_check_wrangler, deployer):
|
||||
mock_check_wrangler.return_value = True
|
||||
mock_run_cmd.return_value = (0, "Deployed successfully", "")
|
||||
|
||||
result = deployer.deploy()
|
||||
|
||||
assert result is True
|
||||
mock_check_wrangler.assert_called_once()
|
||||
mock_run_cmd.assert_called_once()
|
||||
|
||||
@patch.object(CloudflareDeploy, 'check_wrangler_installed')
|
||||
def test_deploy_wrangler_not_installed(self, mock_check_wrangler, deployer):
|
||||
mock_check_wrangler.return_value = False
|
||||
|
||||
with pytest.raises(CloudflareDeployError, match="wrangler CLI not installed"):
|
||||
deployer.deploy()
|
||||
|
||||
@patch.object(CloudflareDeploy, 'check_wrangler_installed')
|
||||
@patch.object(CloudflareDeploy, 'run_command')
|
||||
def test_deploy_command_fails(self, mock_run_cmd, mock_check_wrangler, deployer):
|
||||
mock_check_wrangler.return_value = True
|
||||
mock_run_cmd.side_effect = CloudflareDeployError("Deploy failed")
|
||||
|
||||
with pytest.raises(CloudflareDeployError, match="Deploy failed"):
|
||||
deployer.deploy()
|
||||
|
||||
def test_deploy_invalid_project(self, tmp_path):
|
||||
deployer = CloudflareDeploy(project_dir=tmp_path / "nonexistent")
|
||||
|
||||
with pytest.raises(CloudflareDeployError):
|
||||
deployer.deploy()
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests"""
|
||||
|
||||
@patch.object(CloudflareDeploy, 'check_wrangler_installed')
|
||||
@patch.object(CloudflareDeploy, 'run_command')
|
||||
def test_full_deployment_flow(self, mock_run_cmd, mock_check_wrangler, temp_project):
|
||||
mock_check_wrangler.return_value = True
|
||||
mock_run_cmd.return_value = (0, "Success", "")
|
||||
|
||||
deployer = CloudflareDeploy(
|
||||
project_dir=temp_project,
|
||||
env="production",
|
||||
dry_run=False,
|
||||
verbose=True
|
||||
)
|
||||
|
||||
result = deployer.deploy()
|
||||
|
||||
assert result is True
|
||||
assert mock_run_cmd.call_count == 1
|
||||
|
||||
# Verify correct command was built
|
||||
call_args = mock_run_cmd.call_args[0][0]
|
||||
assert "wrangler" in call_args
|
||||
assert "deploy" in call_args
|
||||
assert "--env" in call_args
|
||||
assert "production" in call_args
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
436
skills/devops/scripts/tests/test_docker_optimize.py
Normal file
436
skills/devops/scripts/tests/test_docker_optimize.py
Normal file
@@ -0,0 +1,436 @@
|
||||
"""
|
||||
Tests for docker-optimize.py
|
||||
|
||||
Run with: pytest test_docker_optimize.py -v
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from docker_optimize import DockerfileAnalyzer
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_dockerfile(tmp_path):
|
||||
"""Create temporary Dockerfile"""
|
||||
dockerfile = tmp_path / "Dockerfile"
|
||||
return dockerfile
|
||||
|
||||
|
||||
def write_dockerfile(filepath, content):
|
||||
"""Helper to write Dockerfile content"""
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
class TestDockerfileAnalyzerInit:
|
||||
"""Test DockerfileAnalyzer initialization"""
|
||||
|
||||
def test_init(self, temp_dockerfile):
|
||||
write_dockerfile(temp_dockerfile, "FROM node:20\n")
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
|
||||
assert analyzer.dockerfile_path == temp_dockerfile
|
||||
assert analyzer.verbose is False
|
||||
assert analyzer.lines == []
|
||||
assert analyzer.issues == []
|
||||
assert analyzer.suggestions == []
|
||||
|
||||
|
||||
class TestLoadDockerfile:
|
||||
"""Test Dockerfile loading"""
|
||||
|
||||
def test_load_success(self, temp_dockerfile):
|
||||
content = "FROM node:20\nWORKDIR /app\n"
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
result = analyzer.load_dockerfile()
|
||||
|
||||
assert result is True
|
||||
assert len(analyzer.lines) == 2
|
||||
|
||||
def test_load_nonexistent(self, tmp_path):
|
||||
analyzer = DockerfileAnalyzer(tmp_path / "nonexistent")
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
analyzer.load_dockerfile()
|
||||
|
||||
|
||||
class TestAnalyzeBaseImage:
|
||||
"""Test base image analysis"""
|
||||
|
||||
def test_latest_tag(self, temp_dockerfile):
|
||||
write_dockerfile(temp_dockerfile, "FROM node:latest\n")
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_base_image()
|
||||
|
||||
assert len(analyzer.issues) == 1
|
||||
assert analyzer.issues[0]['category'] == 'base_image'
|
||||
assert 'latest' in analyzer.issues[0]['message']
|
||||
|
||||
def test_no_tag(self, temp_dockerfile):
|
||||
write_dockerfile(temp_dockerfile, "FROM node\n")
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_base_image()
|
||||
|
||||
assert len(analyzer.issues) == 1
|
||||
assert 'no tag' in analyzer.issues[0]['message']
|
||||
|
||||
def test_specific_tag(self, temp_dockerfile):
|
||||
write_dockerfile(temp_dockerfile, "FROM node:20-alpine\n")
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_base_image()
|
||||
|
||||
# Should have no issues with specific tag
|
||||
base_image_issues = [i for i in analyzer.issues if i['category'] == 'base_image']
|
||||
assert len(base_image_issues) == 0
|
||||
|
||||
def test_non_alpine_suggestion(self, temp_dockerfile):
|
||||
write_dockerfile(temp_dockerfile, "FROM node:20\n")
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_base_image()
|
||||
|
||||
assert len(analyzer.suggestions) >= 1
|
||||
assert any('Alpine' in s['message'] for s in analyzer.suggestions)
|
||||
|
||||
|
||||
class TestAnalyzeMultiStage:
|
||||
"""Test multi-stage build analysis"""
|
||||
|
||||
def test_single_stage_with_build_tools(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
WORKDIR /app
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
COPY . .
|
||||
CMD ["node", "server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_multi_stage()
|
||||
|
||||
assert len(analyzer.issues) == 1
|
||||
assert analyzer.issues[0]['category'] == 'optimization'
|
||||
assert 'multi-stage' in analyzer.issues[0]['message'].lower()
|
||||
|
||||
def test_multi_stage_no_issues(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20 AS build
|
||||
WORKDIR /app
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
COPY --from=build /app/dist ./dist
|
||||
CMD ["node", "dist/server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_multi_stage()
|
||||
|
||||
multi_stage_issues = [i for i in analyzer.issues if i['category'] == 'optimization']
|
||||
assert len(multi_stage_issues) == 0
|
||||
|
||||
|
||||
class TestAnalyzeLayerCaching:
|
||||
"""Test layer caching analysis"""
|
||||
|
||||
def test_source_before_dependencies(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN npm install
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_layer_caching()
|
||||
|
||||
assert len(analyzer.issues) == 1
|
||||
assert analyzer.issues[0]['category'] == 'caching'
|
||||
|
||||
def test_correct_order(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
WORKDIR /app
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
COPY . .
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_layer_caching()
|
||||
|
||||
caching_issues = [i for i in analyzer.issues if i['category'] == 'caching']
|
||||
assert len(caching_issues) == 0
|
||||
|
||||
|
||||
class TestAnalyzeSecurity:
|
||||
"""Test security analysis"""
|
||||
|
||||
def test_no_user_instruction(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
CMD ["node", "server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_security()
|
||||
|
||||
assert len(analyzer.issues) >= 1
|
||||
security_issues = [i for i in analyzer.issues if i['category'] == 'security']
|
||||
assert any('root' in i['message'] for i in security_issues)
|
||||
|
||||
def test_with_user_instruction(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
USER node
|
||||
CMD ["node", "server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_security()
|
||||
|
||||
# Should not have root user issue
|
||||
root_issues = [i for i in analyzer.issues
|
||||
if i['category'] == 'security' and 'root' in i['message']]
|
||||
assert len(root_issues) == 0
|
||||
|
||||
def test_detect_secrets(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
ENV API_KEY=secret123
|
||||
ENV PASSWORD=mypassword
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_security()
|
||||
|
||||
secret_issues = [i for i in analyzer.issues
|
||||
if i['category'] == 'security' and 'secret' in i['message'].lower()]
|
||||
assert len(secret_issues) >= 1
|
||||
|
||||
|
||||
class TestAnalyzeAptCache:
|
||||
"""Test apt cache cleanup analysis"""
|
||||
|
||||
def test_apt_without_cleanup(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM ubuntu:22.04
|
||||
RUN apt-get update && apt-get install -y curl
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_apt_cache()
|
||||
|
||||
assert len(analyzer.suggestions) >= 1
|
||||
assert any('apt cache' in s['message'] for s in analyzer.suggestions)
|
||||
|
||||
def test_apt_with_cleanup(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM ubuntu:22.04
|
||||
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_apt_cache()
|
||||
|
||||
apt_suggestions = [s for s in analyzer.suggestions if 'apt cache' in s['message']]
|
||||
assert len(apt_suggestions) == 0
|
||||
|
||||
|
||||
class TestAnalyzeCombineRun:
|
||||
"""Test RUN command combination analysis"""
|
||||
|
||||
def test_consecutive_runs(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y curl
|
||||
RUN apt-get clean
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_combine_run()
|
||||
|
||||
assert len(analyzer.suggestions) >= 1
|
||||
assert any('consecutive' in s['message'] for s in analyzer.suggestions)
|
||||
|
||||
def test_non_consecutive_runs(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
RUN apt-get update
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_combine_run()
|
||||
|
||||
consecutive_suggestions = [s for s in analyzer.suggestions
|
||||
if 'consecutive' in s['message']]
|
||||
assert len(consecutive_suggestions) == 0
|
||||
|
||||
|
||||
class TestAnalyzeWorkdir:
|
||||
"""Test WORKDIR analysis"""
|
||||
|
||||
def test_no_workdir(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
COPY . /app
|
||||
CMD ["node", "/app/server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_workdir()
|
||||
|
||||
assert len(analyzer.suggestions) >= 1
|
||||
assert any('WORKDIR' in s['message'] for s in analyzer.suggestions)
|
||||
|
||||
def test_with_workdir(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
CMD ["node", "server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
analyzer.load_dockerfile()
|
||||
analyzer.analyze_workdir()
|
||||
|
||||
workdir_suggestions = [s for s in analyzer.suggestions if 'WORKDIR' in s['message']]
|
||||
assert len(workdir_suggestions) == 0
|
||||
|
||||
|
||||
class TestFullAnalyze:
|
||||
"""Test complete analysis"""
|
||||
|
||||
def test_analyze_poor_dockerfile(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:latest
|
||||
COPY . .
|
||||
RUN npm install
|
||||
CMD ["node", "server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
results = analyzer.analyze()
|
||||
|
||||
assert 'dockerfile' in results
|
||||
assert 'total_lines' in results
|
||||
assert 'issues' in results
|
||||
assert 'suggestions' in results
|
||||
assert 'summary' in results
|
||||
|
||||
# Should have multiple issues and suggestions
|
||||
assert results['summary']['warnings'] > 0
|
||||
assert results['summary']['suggestions'] > 0
|
||||
|
||||
def test_analyze_good_dockerfile(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM node:20-alpine AS build
|
||||
WORKDIR /app
|
||||
COPY package.json .
|
||||
RUN npm ci --only=production
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
COPY --from=build /app/dist ./dist
|
||||
COPY --from=build /app/node_modules ./node_modules
|
||||
USER node
|
||||
EXPOSE 3000
|
||||
CMD ["node", "dist/server.js"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
results = analyzer.analyze()
|
||||
|
||||
# Should have minimal issues
|
||||
assert results['summary']['errors'] == 0
|
||||
# May have some suggestions, but fewer issues overall
|
||||
|
||||
|
||||
class TestPrintResults:
|
||||
"""Test results printing"""
|
||||
|
||||
def test_print_results(self, temp_dockerfile, capsys):
|
||||
content = "FROM node:latest\n"
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile)
|
||||
results = analyzer.analyze()
|
||||
analyzer.print_results(results)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Dockerfile Analysis" in captured.out
|
||||
assert "Summary:" in captured.out
|
||||
assert "ISSUES:" in captured.out or "SUGGESTIONS:" in captured.out
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests"""
|
||||
|
||||
def test_full_analysis_workflow(self, temp_dockerfile):
|
||||
content = """
|
||||
FROM python:3.11
|
||||
COPY . /app
|
||||
RUN pip install -r /app/requirements.txt
|
||||
ENV API_KEY=secret
|
||||
CMD ["python", "/app/app.py"]
|
||||
"""
|
||||
write_dockerfile(temp_dockerfile, content)
|
||||
|
||||
analyzer = DockerfileAnalyzer(temp_dockerfile, verbose=True)
|
||||
results = analyzer.analyze()
|
||||
|
||||
# Verify all expected checks ran
|
||||
assert len(analyzer.issues) > 0
|
||||
assert len(analyzer.suggestions) > 0
|
||||
|
||||
# Should flag multiple categories
|
||||
categories = {i['category'] for i in analyzer.issues}
|
||||
assert 'security' in categories
|
||||
|
||||
# Verify summary calculations
|
||||
total_findings = (results['summary']['errors'] +
|
||||
results['summary']['warnings'] +
|
||||
results['summary']['suggestions'])
|
||||
assert total_findings > 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
Reference in New Issue
Block a user