# -weight: 500;">docker-compose.yml — what I actually run
version: '3.8'
services: n8n: image: -weight: 500;">docker.n8nio/n8n -weight: 500;">restart: unless-stopped ports: - "5678:5678" environment: - N8N_BASIC_AUTH_ACTIVE=true - N8N_BASIC_AUTH_USER=${N8N_USER} - N8N_BASIC_AUTH_PASSWORD=${N8N_PASSWORD} - N8N_HOST=n8n.yourdomain.com - N8N_PORT=5678 - N8N_PROTOCOL=https - WEBHOOK_URL=https://n8n.yourdomain.com/ - GENERIC_TIMEZONE=America/Denver - DB_TYPE=postgresdb - DB_POSTGRESDB_HOST=postgres - DB_POSTGRESDB_DATABASE=n8n - DB_POSTGRESDB_USER=${POSTGRES_USER} - DB_POSTGRESDB_PASSWORD=${POSTGRES_PASSWORD} volumes: - n8n_data:/home/node/.n8n depends_on: - postgres postgres: image: postgres:15 -weight: 500;">restart: unless-stopped environment: - POSTGRES_USER=${POSTGRES_USER} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_DB=n8n volumes: - postgres_data:/var/lib/postgresql/data volumes: n8n_data: postgres_data:
# -weight: 500;">docker-compose.yml — what I actually run
version: '3.8'
services: n8n: image: -weight: 500;">docker.n8nio/n8n -weight: 500;">restart: unless-stopped ports: - "5678:5678" environment: - N8N_BASIC_AUTH_ACTIVE=true - N8N_BASIC_AUTH_USER=${N8N_USER} - N8N_BASIC_AUTH_PASSWORD=${N8N_PASSWORD} - N8N_HOST=n8n.yourdomain.com - N8N_PORT=5678 - N8N_PROTOCOL=https - WEBHOOK_URL=https://n8n.yourdomain.com/ - GENERIC_TIMEZONE=America/Denver - DB_TYPE=postgresdb - DB_POSTGRESDB_HOST=postgres - DB_POSTGRESDB_DATABASE=n8n - DB_POSTGRESDB_USER=${POSTGRES_USER} - DB_POSTGRESDB_PASSWORD=${POSTGRES_PASSWORD} volumes: - n8n_data:/home/node/.n8n depends_on: - postgres postgres: image: postgres:15 -weight: 500;">restart: unless-stopped environment: - POSTGRES_USER=${POSTGRES_USER} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_DB=n8n volumes: - postgres_data:/var/lib/postgresql/data volumes: n8n_data: postgres_data:
# -weight: 500;">docker-compose.yml — what I actually run
version: '3.8'
services: n8n: image: -weight: 500;">docker.n8nio/n8n -weight: 500;">restart: unless-stopped ports: - "5678:5678" environment: - N8N_BASIC_AUTH_ACTIVE=true - N8N_BASIC_AUTH_USER=${N8N_USER} - N8N_BASIC_AUTH_PASSWORD=${N8N_PASSWORD} - N8N_HOST=n8n.yourdomain.com - N8N_PORT=5678 - N8N_PROTOCOL=https - WEBHOOK_URL=https://n8n.yourdomain.com/ - GENERIC_TIMEZONE=America/Denver - DB_TYPE=postgresdb - DB_POSTGRESDB_HOST=postgres - DB_POSTGRESDB_DATABASE=n8n - DB_POSTGRESDB_USER=${POSTGRES_USER} - DB_POSTGRESDB_PASSWORD=${POSTGRES_PASSWORD} volumes: - n8n_data:/home/node/.n8n depends_on: - postgres postgres: image: postgres:15 -weight: 500;">restart: unless-stopped environment: - POSTGRES_USER=${POSTGRES_USER} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_DB=n8n volumes: - postgres_data:/var/lib/postgresql/data volumes: n8n_data: postgres_data:
Webhook (Stripe) → Verify signature (Code node) → Switch on event.type → checkout.session.completed: → Update database (Postgres node) → Send welcome email (SendGrid node) → invoice.payment_failed: → Update database → Alert Slack → customer.subscription.deleted: → Update database → Trigger offboarding sequence
Webhook (Stripe) → Verify signature (Code node) → Switch on event.type → checkout.session.completed: → Update database (Postgres node) → Send welcome email (SendGrid node) → invoice.payment_failed: → Update database → Alert Slack → customer.subscription.deleted: → Update database → Trigger offboarding sequence
Webhook (Stripe) → Verify signature (Code node) → Switch on event.type → checkout.session.completed: → Update database (Postgres node) → Send welcome email (SendGrid node) → invoice.payment_failed: → Update database → Alert Slack → customer.subscription.deleted: → Update database → Trigger offboarding sequence
// Code node — call Claude with structured output
const Anthropic = require('@anthropic-ai/sdk'); const client = new Anthropic({ apiKey: $env.ANTHROPIC_API_KEY,
}); const response = await client.messages.create({ model: 'claude-sonnet-4-6', max_tokens: 1024, messages: [ { role: 'user', content: `Analyze this content and return JSON: ${$json.content}` } ],
}); const text = response.content[0].text; // Parse structured output
try { return [{ json: JSON.parse(text) }];
} catch { return [{ json: { raw: text, parseError: true } }];
}
// Code node — call Claude with structured output
const Anthropic = require('@anthropic-ai/sdk'); const client = new Anthropic({ apiKey: $env.ANTHROPIC_API_KEY,
}); const response = await client.messages.create({ model: 'claude-sonnet-4-6', max_tokens: 1024, messages: [ { role: 'user', content: `Analyze this content and return JSON: ${$json.content}` } ],
}); const text = response.content[0].text; // Parse structured output
try { return [{ json: JSON.parse(text) }];
} catch { return [{ json: { raw: text, parseError: true } }];
}
// Code node — call Claude with structured output
const Anthropic = require('@anthropic-ai/sdk'); const client = new Anthropic({ apiKey: $env.ANTHROPIC_API_KEY,
}); const response = await client.messages.create({ model: 'claude-sonnet-4-6', max_tokens: 1024, messages: [ { role: 'user', content: `Analyze this content and return JSON: ${$json.content}` } ],
}); const text = response.content[0].text; // Parse structured output
try { return [{ json: JSON.parse(text) }];
} catch { return [{ json: { raw: text, parseError: true } }];
}
Schedule (6:00 AM daily) → Read content queue from Postgres → Filter: ready_to_post = true AND scheduled_for <= now() → Loop over items: → Switch on platform: → dev.to: HTTP Request node (dev.to API) → LinkedIn: HTTP Request node (LinkedIn API) → Instagram: HTTP Request node (Buffer API) → Update post -weight: 500;">status in database → Wait 30 seconds (avoid rate limits) → Send daily summary to Slack
Schedule (6:00 AM daily) → Read content queue from Postgres → Filter: ready_to_post = true AND scheduled_for <= now() → Loop over items: → Switch on platform: → dev.to: HTTP Request node (dev.to API) → LinkedIn: HTTP Request node (LinkedIn API) → Instagram: HTTP Request node (Buffer API) → Update post -weight: 500;">status in database → Wait 30 seconds (avoid rate limits) → Send daily summary to Slack
Schedule (6:00 AM daily) → Read content queue from Postgres → Filter: ready_to_post = true AND scheduled_for <= now() → Loop over items: → Switch on platform: → dev.to: HTTP Request node (dev.to API) → LinkedIn: HTTP Request node (LinkedIn API) → Instagram: HTTP Request node (Buffer API) → Update post -weight: 500;">status in database → Wait 30 seconds (avoid rate limits) → Send daily summary to Slack
Error Trigger → Slack (send message): "Workflow '{{ $json.workflow.name }}' failed\n{{ $json.execution.error.message }}"
Error Trigger → Slack (send message): "Workflow '{{ $json.workflow.name }}' failed\n{{ $json.execution.error.message }}"
Error Trigger → Slack (send message): "Workflow '{{ $json.workflow.name }}' failed\n{{ $json.execution.error.message }}" - Zero maintenance — Zapier handles uptime, updates, and reliability. It just works.
- 5000+ integrations — if you need a connector, Zapier almost certainly has it
- Non-technical user friendly — your marketing team can build Zaps without engineering help
- Zapier's error handling — Zaps retry automatically, you get emails on failures, the dashboard shows exactly what went wrong - Use PostgreSQL, not SQLite — the default SQLite storage doesn't handle concurrent workflow executions well. Switch before you hit problems.
- Set WEBHOOK_URL correctly — n8n uses this to generate webhook URLs for your workflows. If it's wrong, incoming webhooks silently fail.
- Run behind a reverse proxy — I use Caddy for automatic HTTPS. n8n should not be exposed on port 5678 directly. - Inventory your Zaps — list every Zap, its trigger, actions, and approximate run frequency
- Start with simple ones — migrate your simplest Zaps first to get comfortable with n8n's node model
- Run in parallel — keep Zapier Zaps active while building n8n equivalents; validate they produce the same outputs before deactivating Zapier
- Migrate complex ones last — multi-step Zaps with branching are easier once you understand n8n's flow model
- Don't cancel Zapier immediately — wait 30 days after full migration to confirm nothing is missing