Skip to content

HyperAgent CI/CD

HyperAgent CI/CD #7

Workflow file for this run

# HyperAgent CI/CD Pipeline
name: HyperAgent CI/CD
on:
push:
branches: [main, develop]
pull_request:
branches: [main, develop]
schedule:
- cron: '0 2 * * *' # Daily security scan
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
PYTHON_VERSION: '3.10'
jobs:
# ============================================================================
# STAGE 1: CODE QUALITY & LINTING
# ============================================================================
lint-and-format:
name: '[*] Code Quality Checks'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 black isort mypy bandit
- name: Run Black formatter check
run: black --check hyperagent/ || true
- name: Run isort import check
run: isort --check-only hyperagent/ || true
- name: Run Flake8 linting
run: flake8 hyperagent/ --max-line-length=120 --statistics --count || true
- name: Run MyPy type checking
run: mypy hyperagent/ --ignore-missing-imports || true
# ============================================================================
# STAGE 2: SECURITY SCANNING
# ============================================================================
security-scan:
name: '[*] Security Scan'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run Bandit security linter
run: |
pip install bandit
bandit -r hyperagent/ -f json -o bandit-report.json || true
bandit -r hyperagent/ || true
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
scan-ref: '.'
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'CRITICAL,HIGH'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@v2
if: always()
with:
sarif_file: 'trivy-results.sarif'
# ============================================================================
# STAGE 3: UNIT TESTS
# ============================================================================
unit-tests:
name: '[TEST] Unit Tests'
runs-on: ubuntu-latest
services:
postgres:
image: postgres:15-alpine
env:
POSTGRES_DB: hyperagent_test
POSTGRES_USER: test_user
POSTGRES_PASSWORD: test_password
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install pytest pytest-asyncio pytest-cov pytest-mock
- name: Set up database
run: |
PGPASSWORD=test_password psql -h localhost -U test_user -d hyperagent_test -c "CREATE EXTENSION IF NOT EXISTS vector;"
env:
PGHOST: localhost
PGPORT: 5432
PGUSER: test_user
PGDATABASE: hyperagent_test
- name: Run unit tests
run: |
pytest tests/unit/ -v --cov=hyperagent --cov-report=xml --cov-report=term --cov-report=html
env:
DATABASE_URL: postgresql://test_user:test_password@localhost:5432/hyperagent_test
REDIS_URL: redis://localhost:6379/0
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
- name: Check coverage threshold
run: |
coverage report --fail-under=80 || echo "Coverage below 80%"
- name: Upload coverage HTML
uses: actions/upload-artifact@v3
if: always()
with:
name: coverage-report
path: htmlcov/
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: unittests
name: codecov-umbrella
# ============================================================================
# STAGE 4: INTEGRATION TESTS
# ============================================================================
integration-tests:
name: '[TEST] Integration Tests'
runs-on: ubuntu-latest
services:
postgres:
image: postgres:15-alpine
env:
POSTGRES_DB: hyperagent_test
POSTGRES_USER: test_user
POSTGRES_PASSWORD: test_password
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install dependencies
run: |
pip install -r requirements.txt
pip install pytest pytest-asyncio
- name: Run integration tests
run: |
pytest tests/integration/ -v
env:
DATABASE_URL: postgresql://test_user:test_password@localhost:5432/hyperagent_test
REDIS_URL: redis://localhost:6379/0
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
# ============================================================================
# STAGE 5: BUILD DOCKER IMAGE
# ============================================================================
build-docker:
name: '[BUILD] Docker Image'
runs-on: ubuntu-latest
needs: [lint-and-format, security-scan, unit-tests]
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Container Registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=sha
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
platforms: linux/amd64,linux/arm64
build-args: |
BUILDKIT_INLINE_CACHE=1
# ============================================================================
# STAGE 6: DOCKER COMPOSE TEST
# ============================================================================
docker-compose-test:
name: '[TEST] Docker Compose Integration'
runs-on: ubuntu-latest
needs: [build-docker]
steps:
- uses: actions/checkout@v4
- name: Start services with Docker Compose
run: |
docker-compose up -d
sleep 10
- name: Wait for services to be healthy
run: |
timeout 60 bash -c 'until docker-compose ps | grep -q "healthy"; do sleep 2; done'
- name: Test API health endpoint
run: |
curl -f http://localhost:8000/api/v1/health/basic || exit 1
- name: Stop services
if: always()
run: |
docker-compose down -v
# ============================================================================
# STAGE 7: NOTIFICATIONS
# ============================================================================
notify:
name: '[*] Notifications'
runs-on: ubuntu-latest
if: always()
needs: [lint-and-format, unit-tests, integration-tests, build-docker, docker-compose-test]
steps:
- name: Determine status
id: status
run: |
if [ "${{ needs.unit-tests.result }}" == "success" ] && \
[ "${{ needs.integration-tests.result }}" == "success" ] && \
[ "${{ needs.build-docker.result }}" == "success" ] && \
[ "${{ needs.docker-compose-test.result }}" == "success" ]; then
echo "status=success" >> $GITHUB_OUTPUT
else
echo "status=failure" >> $GITHUB_OUTPUT
fi
- name: Print status
run: |
echo "Pipeline Status: ${{ steps.status.outputs.status }}"
echo "Lint: ${{ needs.lint-and-format.result }}"
echo "Unit Tests: ${{ needs.unit-tests.result }}"
echo "Integration Tests: ${{ needs.integration-tests.result }}"
echo "Docker Build: ${{ needs.build-docker.result }}"
echo "Docker Compose: ${{ needs.docker-compose-test.result }}"