# BAD: invalidates cache on every code change
FROM python:3.12-slim
COPY . /app
RUN -weight: 500;">pip -weight: 500;">install -r requirements.txt
# BAD: invalidates cache on every code change
FROM python:3.12-slim
COPY . /app
RUN -weight: 500;">pip -weight: 500;">install -r requirements.txt
# BAD: invalidates cache on every code change
FROM python:3.12-slim
COPY . /app
RUN -weight: 500;">pip -weight: 500;">install -r requirements.txt
# GOOD: dependencies cached until requirements.txt changes
FROM python:3.12-slim
COPY requirements.txt /app/
RUN -weight: 500;">pip -weight: 500;">install -r requirements.txt
COPY . /app
# GOOD: dependencies cached until requirements.txt changes
FROM python:3.12-slim
COPY requirements.txt /app/
RUN -weight: 500;">pip -weight: 500;">install -r requirements.txt
COPY . /app
# GOOD: dependencies cached until requirements.txt changes
FROM python:3.12-slim
COPY requirements.txt /app/
RUN -weight: 500;">pip -weight: 500;">install -r requirements.txt
COPY . /app
- name: Build uses: -weight: 500;">docker/build-push-action@v5 with: context: . cache-from: type=gha cache-to: type=gha,mode=max push: true
- name: Build uses: -weight: 500;">docker/build-push-action@v5 with: context: . cache-from: type=gha cache-to: type=gha,mode=max push: true
- name: Build uses: -weight: 500;">docker/build-push-action@v5 with: context: . cache-from: type=gha cache-to: type=gha,mode=max push: true
strategy: matrix: shard: [1, 2, 3, 4]
steps: - name: Run tests run: | pytest --splits 4 --group ${{ matrix.shard }} \ --splitting-algorithm least_duration
strategy: matrix: shard: [1, 2, 3, 4]
steps: - name: Run tests run: | pytest --splits 4 --group ${{ matrix.shard }} \ --splitting-algorithm least_duration
strategy: matrix: shard: [1, 2, 3, 4]
steps: - name: Run tests run: | pytest --splits 4 --group ${{ matrix.shard }} \ --splitting-algorithm least_duration
# -weight: 500;">docker-compose.ci.yml
services: deps-python: build: context: . dockerfile: Dockerfile.deps-python image: registry.local/deps-python:latest -weight: 500;">service-api: build: context: ./services/api args: BASE_IMAGE: registry.local/deps-python:latest
# -weight: 500;">docker-compose.ci.yml
services: deps-python: build: context: . dockerfile: Dockerfile.deps-python image: registry.local/deps-python:latest -weight: 500;">service-api: build: context: ./services/api args: BASE_IMAGE: registry.local/deps-python:latest
# -weight: 500;">docker-compose.ci.yml
services: deps-python: build: context: . dockerfile: Dockerfile.deps-python image: registry.local/deps-python:latest -weight: 500;">service-api: build: context: ./services/api args: BASE_IMAGE: registry.local/deps-python:latest
# Dockerfile.deps-python
FROM python:3.12-slim
COPY requirements/*.txt /deps/
RUN -weight: 500;">pip -weight: 500;">install -r /deps/base.txt -r /deps/test.txt
# Dockerfile.deps-python
FROM python:3.12-slim
COPY requirements/*.txt /deps/
RUN -weight: 500;">pip -weight: 500;">install -r /deps/base.txt -r /deps/test.txt
# Dockerfile.deps-python
FROM python:3.12-slim
COPY requirements/*.txt /deps/
RUN -weight: 500;">pip -weight: 500;">install -r /deps/base.txt -r /deps/test.txt
# .github/scripts/test_selector.py
import subprocess, json, pathlib changed = subprocess.check_output( ["-weight: 500;">git", "diff", "--name-only", "origin/main...HEAD"]
).decode().strip().split("\n") test_map = { "services/api/": "tests/api/", "services/auth/": "tests/auth/", "services/billing/": "tests/billing/", "shared/": "tests/", # shared code = run everything
} tests_to_run = set()
for file in changed: for src, test_dir in test_map.items(): if file.startswith(src): tests_to_run.add(test_dir) # If nothing matched, run everything (safety net)
if not tests_to_run: tests_to_run.add("tests/") print(" ".join(tests_to_run))
# .github/scripts/test_selector.py
import subprocess, json, pathlib changed = subprocess.check_output( ["-weight: 500;">git", "diff", "--name-only", "origin/main...HEAD"]
).decode().strip().split("\n") test_map = { "services/api/": "tests/api/", "services/auth/": "tests/auth/", "services/billing/": "tests/billing/", "shared/": "tests/", # shared code = run everything
} tests_to_run = set()
for file in changed: for src, test_dir in test_map.items(): if file.startswith(src): tests_to_run.add(test_dir) # If nothing matched, run everything (safety net)
if not tests_to_run: tests_to_run.add("tests/") print(" ".join(tests_to_run))
# .github/scripts/test_selector.py
import subprocess, json, pathlib changed = subprocess.check_output( ["-weight: 500;">git", "diff", "--name-only", "origin/main...HEAD"]
).decode().strip().split("\n") test_map = { "services/api/": "tests/api/", "services/auth/": "tests/auth/", "services/billing/": "tests/billing/", "shared/": "tests/", # shared code = run everything
} tests_to_run = set()
for file in changed: for src, test_dir in test_map.items(): if file.startswith(src): tests_to_run.add(test_dir) # If nothing matched, run everything (safety net)
if not tests_to_run: tests_to_run.add("tests/") print(" ".join(tests_to_run))
- name: Select tests id: tests run: echo "dirs=$(python .github/scripts/test_selector.py)" >> $GITHUB_OUTPUT - name: Run tests run: pytest ${{ steps.tests.outputs.dirs }}
- name: Select tests id: tests run: echo "dirs=$(python .github/scripts/test_selector.py)" >> $GITHUB_OUTPUT - name: Run tests run: pytest ${{ steps.tests.outputs.dirs }}
- name: Select tests id: tests run: echo "dirs=$(python .github/scripts/test_selector.py)" >> $GITHUB_OUTPUT - name: Run tests run: pytest ${{ steps.tests.outputs.dirs }}
- name: Cache mypy uses: actions/cache@v4 with: path: .mypy_cache key: mypy-${{ hashFiles('**/*.py') }} restore-keys: mypy- - name: Type check run: mypy --incremental src/
- name: Cache mypy uses: actions/cache@v4 with: path: .mypy_cache key: mypy-${{ hashFiles('**/*.py') }} restore-keys: mypy- - name: Type check run: mypy --incremental src/
- name: Cache mypy uses: actions/cache@v4 with: path: .mypy_cache key: mypy-${{ hashFiles('**/*.py') }} restore-keys: mypy- - name: Type check run: mypy --incremental src/
- name: Cache ESLint uses: actions/cache@v4 with: path: .eslintcache key: eslint-${{ hashFiles('**/*.ts', '**/*.tsx') }} - name: Lint run: eslint --cache --cache-location .eslintcache src/
- name: Cache ESLint uses: actions/cache@v4 with: path: .eslintcache key: eslint-${{ hashFiles('**/*.ts', '**/*.tsx') }} - name: Lint run: eslint --cache --cache-location .eslintcache src/
- name: Cache ESLint uses: actions/cache@v4 with: path: .eslintcache key: eslint-${{ hashFiles('**/*.ts', '**/*.tsx') }} - name: Lint run: eslint --cache --cache-location .eslintcache src/
runs-on: self-hosted # In our runner setup (systemd -weight: 500;">service)
# Runner installed at /opt/actions-runner
# Runs as dedicated ci-runner user with Docker socket access
runs-on: self-hosted # In our runner setup (systemd -weight: 500;">service)
# Runner installed at /opt/actions-runner
# Runs as dedicated ci-runner user with Docker socket access
runs-on: self-hosted # In our runner setup (systemd -weight: 500;">service)
# Runner installed at /opt/actions-runner
# Runs as dedicated ci-runner user with Docker socket access
jobs: build-and-test: # 3 minutes runs-on: self-hosted steps: [...] security-scan: # 5 minutes, runs in parallel runs-on: self-hosted steps: - uses: aquasecurity/trivy-action@master - run: bandit -r src/ -f json -o bandit-report.json deploy: # waits for BOTH needs: [build-and-test, security-scan] if: github.ref == 'refs/heads/main' steps: [...]
jobs: build-and-test: # 3 minutes runs-on: self-hosted steps: [...] security-scan: # 5 minutes, runs in parallel runs-on: self-hosted steps: - uses: aquasecurity/trivy-action@master - run: bandit -r src/ -f json -o bandit-report.json deploy: # waits for BOTH needs: [build-and-test, security-scan] if: github.ref == 'refs/heads/main' steps: [...]
jobs: build-and-test: # 3 minutes runs-on: self-hosted steps: [...] security-scan: # 5 minutes, runs in parallel runs-on: self-hosted steps: - uses: aquasecurity/trivy-action@master - run: bandit -r src/ -f json -o bandit-report.json deploy: # waits for BOTH needs: [build-and-test, security-scan] if: github.ref == 'refs/heads/main' steps: [...] - Download GitHub Actions runner binary
- Create systemd -weight: 500;">service
- Give the runner user Docker socket access
- Configure labels for routing - Bazel or Nx for true incremental builds across a monorepo. We're not there yet — our repo isn't big enough to justify the complexity.
- Test impact analysis using coverage data to be even more surgical about test selection.
- Merge queues (GitHub's native feature) to batch CI runs and reduce total runner time.
- Remote build caching (Turborepo, Gradle remote cache) for teams with larger monorepos — we've seen this shave another 40% off already-optimized builds. - Self-Hosted LLMs vs API: Cost Comparison — the self-hosted runner approach from Step 6 applied to AI inference infrastructure
- Build vs Buy Framework — should you build your own CI tooling or buy? (Spoiler: optimize what you have first)
- Secret Management for DevOps — keeping credentials secure in fast CI/CD pipelines