Skip to content

Comprehensive Tests #95

Comprehensive Tests

Comprehensive Tests #95

name: Comprehensive Tests
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main, develop ]
schedule:
# Run tests daily at 2 AM UTC
- cron: '0 2 * * *'
env:
PYTHON_VERSION: '3.11'
GO_VERSION: '1.21'
jobs:
# Unit Tests - Fast feedback
unit-tests:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.11', '3.12']
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache Python dependencies
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev,test]"
- name: Run unit tests
run: |
python -m pytest tests/ -v \
--cov=migration_assistant \
--cov-report=xml \
--cov-report=html \
--cov-report=term-missing \
--tb=short \
-m "not integration and not benchmark"
- name: Upload Python coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: python-unit
name: python-unit-${{ matrix.python-version }}
# Go Tests
go-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: ${{ env.GO_VERSION }}
- name: Cache Go modules
uses: actions/cache@v3
with:
path: |
~/.cache/go-build
~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
- name: Run Go tests
working-directory: go-engine
run: |
go test ./... -v -race -coverprofile=coverage.out -covermode=atomic
- name: Run Go benchmarks
working-directory: go-engine
run: |
go test ./... -v -bench=. -benchmem -run=^$ > benchmark_results.txt
- name: Upload Go coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./go-engine/coverage.out
flags: go
name: go-tests
- name: Upload benchmark results
uses: actions/upload-artifact@v3
with:
name: go-benchmark-results
path: go-engine/benchmark_results.txt
# Integration Tests - Requires Docker
integration-tests:
runs-on: ubuntu-latest
needs: [unit-tests, go-tests]
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: ${{ env.GO_VERSION }}
- name: Cache Python dependencies
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev,test]"
- name: Build Go binary
working-directory: go-engine
run: |
go build -o bin/migration-engine ./cmd/migration-engine
- name: Start test environment
run: |
docker-compose -f docker-compose.test.yml up -d --build
# Wait for services to be ready
echo "Waiting for services to start..."
sleep 30
# Check service health
docker-compose -f docker-compose.test.yml ps
- name: Run integration tests
run: |
python -m pytest tests/ -v \
--cov=migration_assistant \
--cov-report=xml \
--cov-report=html \
--tb=short \
-m "integration" \
--maxfail=5
env:
TEST_DATABASE_MYSQL_HOST: localhost
TEST_DATABASE_MYSQL_PORT: 3307
TEST_DATABASE_POSTGRES_HOST: localhost
TEST_DATABASE_POSTGRES_PORT: 5433
TEST_DATABASE_MONGO_HOST: localhost
TEST_DATABASE_MONGO_PORT: 27018
TEST_DATABASE_REDIS_HOST: localhost
TEST_DATABASE_REDIS_PORT: 6380
TEST_LOCALSTACK_HOST: localhost
TEST_LOCALSTACK_PORT: 4566
- name: Upload integration test coverage
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: python-integration
name: python-integration
- name: Collect Docker logs on failure
if: failure()
run: |
mkdir -p logs
docker-compose -f docker-compose.test.yml logs > logs/docker-compose.log
docker-compose -f docker-compose.test.yml ps > logs/docker-ps.log
- name: Upload Docker logs
if: failure()
uses: actions/upload-artifact@v3
with:
name: docker-logs
path: logs/
- name: Stop test environment
if: always()
run: |
docker-compose -f docker-compose.test.yml down -v
# Performance Tests
performance-tests:
runs-on: ubuntu-latest
needs: [unit-tests, go-tests]
if: github.event_name == 'schedule' || contains(github.event.head_commit.message, '[perf]')
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: ${{ env.GO_VERSION }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev,test]"
- name: Build Go binary
working-directory: go-engine
run: |
go build -o bin/migration-engine ./cmd/migration-engine
- name: Start test environment
run: |
docker-compose -f docker-compose.test.yml up -d --build
sleep 30
- name: Run performance benchmarks
run: |
python -m pytest tests/ -v \
--benchmark-only \
--benchmark-json=benchmark_results.json \
--benchmark-sort=mean \
-m "benchmark"
- name: Upload benchmark results
uses: actions/upload-artifact@v3
with:
name: python-benchmark-results
path: benchmark_results.json
- name: Stop test environment
if: always()
run: |
docker-compose -f docker-compose.test.yml down -v
# Security Tests
security-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install security tools
run: |
python -m pip install --upgrade pip
pip install bandit safety
- name: Run Bandit security scan
run: |
bandit -r migration_assistant/ -f json -o bandit_results.json
continue-on-error: true
- name: Run Safety check
run: |
safety check --json --output safety_results.json
continue-on-error: true
- name: Upload security scan results
uses: actions/upload-artifact@v3
with:
name: security-scan-results
path: |
bandit_results.json
safety_results.json
# Code Quality
code-quality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install quality tools
run: |
python -m pip install --upgrade pip
pip install black isort flake8 mypy
pip install -e ".[dev]"
- name: Check code formatting with Black
run: |
black --check --diff migration_assistant/ tests/
- name: Check import sorting with isort
run: |
isort --check-only --diff migration_assistant/ tests/
- name: Lint with flake8
run: |
flake8 migration_assistant/ tests/ --max-line-length=88 --extend-ignore=E203,W503
- name: Type check with mypy
run: |
mypy migration_assistant/ --ignore-missing-imports
continue-on-error: true
# Documentation Tests
docs-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install documentation tools
run: |
python -m pip install --upgrade pip
pip install sphinx sphinx-rtd-theme
pip install -e ".[dev]"
- name: Build documentation
run: |
sphinx-build -b html docs/ docs/_build/html -W
continue-on-error: true
- name: Upload documentation
uses: actions/upload-artifact@v3
with:
name: documentation
path: docs/_build/html/
# Test Results Summary
test-summary:
runs-on: ubuntu-latest
needs: [unit-tests, go-tests, integration-tests, security-tests, code-quality]
if: always()
steps:
- name: Download all artifacts
uses: actions/download-artifact@v3
- name: Generate test summary
run: |
echo "# Test Results Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "## Test Status" >> $GITHUB_STEP_SUMMARY
echo "- Unit Tests: ${{ needs.unit-tests.result }}" >> $GITHUB_STEP_SUMMARY
echo "- Go Tests: ${{ needs.go-tests.result }}" >> $GITHUB_STEP_SUMMARY
echo "- Integration Tests: ${{ needs.integration-tests.result }}" >> $GITHUB_STEP_SUMMARY
echo "- Security Tests: ${{ needs.security-tests.result }}" >> $GITHUB_STEP_SUMMARY
echo "- Code Quality: ${{ needs.code-quality.result }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [[ "${{ needs.unit-tests.result }}" == "success" &&
"${{ needs.go-tests.result }}" == "success" &&
"${{ needs.integration-tests.result }}" == "success" ]]; then
echo "✅ All critical tests passed!" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Some tests failed. Please check the logs." >> $GITHUB_STEP_SUMMARY
fi