danilonovais commited on
Commit
caca973
·
1 Parent(s): 8bb173b

feat: add workflows for nightly backups, deployment to Hugging Face, and knowledge base sync; include scripts for backup and restore

Browse files
n8n-infra/.github/workflows/backup-workflows.yml ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Nightly Backups (DB + Workflows)
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '0 3 * * *' # daily at 03:00 UTC
6
+ workflow_dispatch: {}
7
+
8
+ jobs:
9
+ backup:
10
+ runs-on: ubuntu-latest
11
+ steps:
12
+ - name: Checkout
13
+ uses: actions/checkout@v4
14
+
15
+ - name: Install tools
16
+ run: |
17
+ sudo apt-get update
18
+ sudo apt-get install -y --no-install-recommends postgresql-client jq curl
19
+
20
+ - name: Run backup script
21
+ env:
22
+ DB_POSTGRESDB_HOST: ${{ secrets.DB_POSTGRESDB_HOST }}
23
+ DB_POSTGRESDB_PORT: ${{ secrets.DB_POSTGRESDB_PORT }}
24
+ DB_POSTGRESDB_DATABASE: ${{ secrets.DB_POSTGRESDB_DATABASE }}
25
+ DB_POSTGRESDB_USER: ${{ secrets.DB_POSTGRESDB_USER }}
26
+ DB_POSTGRESDB_PASSWORD: ${{ secrets.DB_POSTGRESDB_PASSWORD }}
27
+ N8N_URL: ${{ secrets.N8N_URL }}
28
+ N8N_API_KEY: ${{ secrets.N8N_API_KEY }}
29
+ run: |
30
+ bash n8n-infra/scripts/backup.sh
31
+
32
+ - name: Commit and push backups
33
+ if: ${{ success() }}
34
+ run: |
35
+ git config user.email "[email protected]"
36
+ git config user.name "github-actions[bot]"
37
+ git add n8n-infra/workflows/backup || true
38
+ git add n8n-infra/backups || true
39
+ git commit -m "chore(backups): nightly DB + workflow exports" || echo "No changes to commit"
40
+ git push
41
+
n8n-infra/.github/workflows/deploy-to-hf.yml ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy to Hugging Face Space
2
+
3
+ on:
4
+ push:
5
+ branches: [ main ]
6
+ paths:
7
+ - 'n8n-infra/docker/Dockerfile'
8
+ - 'n8n-infra/.github/workflows/deploy-to-hf.yml'
9
+ workflow_dispatch: {}
10
+
11
+ jobs:
12
+ deploy:
13
+ runs-on: ubuntu-latest
14
+ steps:
15
+ - name: Checkout
16
+ uses: actions/checkout@v4
17
+
18
+ - name: Prepare Space push payload
19
+ id: prep
20
+ run: |
21
+ mkdir -p space_push
22
+ cp n8n-infra/docker/Dockerfile space_push/Dockerfile
23
+ echo "# n8n Docker Space" > space_push/README.md
24
+ echo "Deployed from GitHub via Actions." >> space_push/README.md
25
+
26
+ - name: Configure git
27
+ run: |
28
+ git config --global user.email "[email protected]"
29
+ git config --global user.name "github-actions[bot]"
30
+
31
+ - name: Push to Hugging Face Space
32
+ env:
33
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
34
+ HF_SPACE_ID: ${{ secrets.HF_SPACE_ID }}
35
+ run: |
36
+ if [ -z "${HF_TOKEN}" ] || [ -z "${HF_SPACE_ID}" ]; then
37
+ echo "HF_TOKEN or HF_SPACE_ID is not set" >&2
38
+ exit 1
39
+ fi
40
+ cd space_push
41
+ git init
42
+ git add .
43
+ git commit -m "Deploy from GitHub Actions"
44
+ git branch -M main
45
+ git remote add origin https://user:${HF_TOKEN}@huggingface.co/spaces/${HF_SPACE_ID}
46
+ git push -f origin main
47
+
48
+ - name: Request Space restart
49
+ env:
50
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
51
+ HF_SPACE_ID: ${{ secrets.HF_SPACE_ID }}
52
+ run: |
53
+ python3 - <<'PY'
54
+ import os
55
+ from huggingface_hub import HfApi
56
+
57
+ token = os.environ["HF_TOKEN"]
58
+ space = os.environ["HF_SPACE_ID"]
59
+ api = HfApi(token=token)
60
+ api.restart_space(repo_id=space)
61
+ print(f"Restart requested for Space: {space}")
62
+ PY
63
+
n8n-infra/.github/workflows/sync-knowledge.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Sync Knowledge Base
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '0 */12 * * *' # every 12 hours
6
+ workflow_dispatch: {}
7
+
8
+ jobs:
9
+ sync:
10
+ runs-on: ubuntu-latest
11
+ steps:
12
+ - name: Checkout
13
+ uses: actions/checkout@v4
14
+
15
+ - name: Install git and rsync
16
+ run: |
17
+ sudo apt-get update
18
+ sudo apt-get install -y --no-install-recommends git rsync curl
19
+
20
+ - name: Sync repos and trigger ingestion
21
+ env:
22
+ WEBHOOK_URL: ${{ secrets.N8N_INGEST_WEBHOOK_URL }}
23
+ run: |
24
+ bash n8n-infra/scripts/sync-knowledge.sh
25
+
26
+ - name: Commit and push updated knowledge mirror
27
+ run: |
28
+ git config user.email "[email protected]"
29
+ git config user.name "github-actions[bot]"
30
+ git add n8n-infra/knowledge || true
31
+ git commit -m "chore(knowledge): sync mirrors" || echo "No changes to commit"
32
+ git push
33
+
n8n-infra/README.md ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # n8n Infrastructure on Hugging Face Spaces + Supabase
2
+
3
+ This repository provides infra-as-code, Docker assets, automation scripts, and CI/CD to run a self-hosted n8n instance on Hugging Face Spaces with Supabase (Postgres + SSL) as the database, optional vector store, and knowledge base sync.
4
+
5
+ ## Overview
6
+
7
+ - Containerization: Docker (pinned n8n version) and Docker Compose
8
+ - Orchestration: Hugging Face Spaces (Docker Space)
9
+ - Database: Supabase Postgres (SSL required)
10
+ - Vector Store: pgvector on Supabase or optional Qdrant service for local dev
11
+ - Integrations: GitHub, Google Cloud CLI, Vertex AI, LangChain community nodes
12
+ - Automation: Backups (DB + workflows), knowledge sync, CI/CD with GitHub Actions
13
+
14
+ ## Repository Structure
15
+
16
+ ```
17
+ n8n-infra/
18
+ docker/
19
+ Dockerfile
20
+ docker-compose.yml
21
+ config/
22
+ .env.example
23
+ credentials/
24
+ workflows/
25
+ backup/
26
+ knowledge/
27
+ n8n/
28
+ videos-e-animacoes/
29
+ midjourney-prompt/
30
+ scripts/
31
+ backup.sh
32
+ restore.sh
33
+ sync-knowledge.sh
34
+ .github/
35
+ workflows/
36
+ deploy-to-hf.yml
37
+ backup-workflows.yml
38
+ sync-knowledge.yml
39
+ README.md
40
+ ```
41
+
42
+ Notes:
43
+ - CI files are in `.github/workflows` (standard for GitHub Actions).
44
+ - Secrets are provided via repo/Actions secrets or environment variables at runtime.
45
+
46
+ ## Prerequisites
47
+
48
+ - Supabase project with SSL enabled and (optionally) pgvector extension enabled
49
+ - Hugging Face account with a Docker Space created
50
+ - GitHub repository (this repo) with Actions enabled
51
+ - Local: Docker and Docker Compose installed
52
+
53
+ ## Configuration
54
+
55
+ 1) Copy the env template and fill values:
56
+
57
+ ```
58
+ cp config/.env.example config/.env
59
+ ```
60
+
61
+ Key variables (see `config/.env.example` for full list):
62
+ - N8N_ENCRYPTION_KEY, N8N_USER_MANAGEMENT_JWT_SECRET
63
+ - DB_* for Supabase (host, db, user, password) with `DB_POSTGRESDB_SSL=true`
64
+ - WEBHOOK_URL (public URL for n8n webhooks)
65
+ - HF_TOKEN, GITHUB_TOKEN (for CI/CD & sync jobs)
66
+ - GOOGLE_PROJECT_ID, GOOGLE_CREDENTIALS_PATH (if using Google/Vertex)
67
+ - N8N_API_KEY and N8N_URL for workflow export API
68
+
69
+ Store any OAuth JSON/keyfiles under `config/credentials/` (keep them out of Git).
70
+
71
+ Placeholders to populate in `config/.env` (high priority):
72
+ - N8N_ENCRYPTION_KEY=
73
+ - N8N_USER_MANAGEMENT_JWT_SECRET=
74
+ - DB_TYPE=postgresdb
75
+ - DB_POSTGRESDB_HOST=
76
+ - DB_POSTGRESDB_PORT=5432
77
+ - DB_POSTGRESDB_DATABASE=
78
+ - DB_POSTGRESDB_USER=
79
+ - DB_POSTGRESDB_PASSWORD=
80
+ - DB_POSTGRESDB_SSL=true
81
+ - WEBHOOK_URL=
82
+ - HF_TOKEN=
83
+ - GITHUB_TOKEN=
84
+ - GOOGLE_PROJECT_ID=
85
+ - GOOGLE_CREDENTIALS_PATH=
86
+
87
+ ## Local Development with Docker Compose
88
+
89
+ 1) Ensure `config/.env` is present and valid. For local testing you can point to Supabase or a local Postgres; for Supabase, keep SSL enabled.
90
+
91
+ 2) Start services:
92
+
93
+ ```
94
+ cd n8n-infra/docker
95
+ docker compose --env-file ../config/.env up -d
96
+ ```
97
+
98
+ Services:
99
+ - n8n: http://localhost:5678 (first user registration on first run)
100
+ - Optional vector DB: Qdrant at http://localhost:6333
101
+
102
+ 3) Stop services:
103
+
104
+ ```
105
+ docker compose --env-file ../config/.env down
106
+ ```
107
+
108
+ ## Deploy on Hugging Face Spaces (Docker)
109
+
110
+ 1) Create a Space (type: Docker) on Hugging Face.
111
+
112
+ 2) Configure repository secrets in GitHub:
113
+ - `HF_TOKEN`: a write token for the Space
114
+ - `HF_SPACE_ID`: e.g. `org-or-user/space-name`
115
+
116
+ 3) CI/CD: The workflow `deploy-to-hf.yml` pushes a minimal Space repository that contains the Dockerfile. On push to `main` or manual dispatch, it:
117
+ - Copies `n8n-infra/docker/Dockerfile` to a temporary directory
118
+ - Commits and pushes that directory to the Space git repo
119
+ - Requests a Space restart/rebuild
120
+
121
+ You can customize the pushed contents if you need additional runtime assets.
122
+
123
+ 4) Space Secrets: In the Space settings, define the same environment variables as in `config/.env` for production (e.g., DB_*, N8N_ENCRYPTION_KEY, N8N_USER_MANAGEMENT_JWT_SECRET, WEBHOOK_URL, GOOGLE_*). These are injected at container runtime.
124
+
125
+ ## Supabase Setup
126
+
127
+ 1) Create a Supabase project and note the host, database, user, and password.
128
+
129
+ 2) SSL enforcement:
130
+ - Keep `DB_POSTGRESDB_SSL=true`
131
+ - If Supabase requires, set `DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED=false`
132
+
133
+ 3) Vector support:
134
+ - Option A (recommended): Enable pgvector on your Supabase Postgres for unified storage of embeddings.
135
+ - Option B: Use an external vector DB (e.g., Qdrant) for local dev or if preferred.
136
+
137
+ ## Backups and Restores
138
+
139
+ Workflows and DB backups can run locally or via GitHub Actions.
140
+
141
+ - Backup script:
142
+ - DB: `pg_dump` with SSL required
143
+ - Workflows: Export via n8n REST API (`N8N_URL` + `N8N_API_KEY`)
144
+
145
+ Run locally:
146
+
147
+ ```
148
+ bash n8n-infra/scripts/backup.sh
149
+ ```
150
+
151
+ Restore locally (from a `.sql` dump):
152
+
153
+ ```
154
+ bash n8n-infra/scripts/restore.sh path/to/backup.sql
155
+ ```
156
+
157
+ Nightly backups via Actions: see `.github/workflows/backup-workflows.yml`.
158
+
159
+ ## Knowledge Base Sync
160
+
161
+ The `sync-knowledge.sh` script pulls the specified GitHub repos into `n8n-infra/knowledge/` and then triggers ingestion (either via an n8n webhook you define or your own ingestion script) into your chosen vector store (pgvector or Qdrant).
162
+
163
+ Run locally:
164
+
165
+ ```
166
+ bash n8n-infra/scripts/sync-knowledge.sh
167
+ ```
168
+
169
+ Automated: see `.github/workflows/sync-knowledge.yml`.
170
+
171
+ ## LangChain, Agents, and Community Nodes
172
+
173
+ The Dockerfile enables community nodes and includes a placeholder list via `N8N_COMMUNITY_PACKAGES`. Add or adjust packages for LangChain, Google APIs, Vertex AI, etc. Configure related credentials via environment variables and `config/credentials/`.
174
+
175
+ Notes for n8n community nodes:
176
+ - Ensure `N8N_ENABLE_COMMUNITY_NODES=true`
177
+ - Set `N8N_COMMUNITY_PACKAGES` to include packages like `n8n-nodes-langchain`, `n8n-nodes-google`, and any Vertex AI integrations you use.
178
+ - After first run, verify nodes appear in the n8n editor.
179
+
180
+ ## Rollback Strategy
181
+
182
+ - Hugging Face Space: Use the Space’s git history to roll back to a previous commit or re-run a prior successful build.
183
+ - GitHub Actions: Redeploy a previous commit via workflow “Run workflow” (manual dispatch).
184
+ - Database: Restore using `scripts/restore.sh` from a prior `.sql` dump.
185
+
186
+ ## Security Notes
187
+
188
+ - Never commit real secrets. Use `config/.env` locally and GitHub Actions secrets in CI.
189
+ - Use SSL for DB connections (`DB_POSTGRESDB_SSL=true`). For Supabase, set `DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED=false` if required.
190
+ - Rotate keys regularly (`N8N_ENCRYPTION_KEY`, `N8N_USER_MANAGEMENT_JWT_SECRET`).
n8n-infra/config/.env.example ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # n8n core
2
+ N8N_ENCRYPTION_KEY=
3
+ N8N_USER_MANAGEMENT_JWT_SECRET=
4
+ N8N_HOST=localhost
5
+ N8N_PORT=5678
6
+ N8N_PROTOCOL=http
7
+
8
+ # Database (Supabase Postgres)
9
+ DB_TYPE=postgresdb
10
+ DB_POSTGRESDB_HOST=
11
+ DB_POSTGRESDB_PORT=5432
12
+ DB_POSTGRESDB_DATABASE=
13
+ DB_POSTGRESDB_USER=
14
+ DB_POSTGRESDB_PASSWORD=
15
+ DB_POSTGRESDB_SSL=true
16
+ DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED=false
17
+
18
+ # Webhooks
19
+ WEBHOOK_URL=
20
+
21
+ # Tokens and external integrations
22
+ HF_TOKEN=
23
+ GITHUB_TOKEN=
24
+
25
+ # Google / Vertex AI
26
+ GOOGLE_PROJECT_ID=
27
+ GOOGLE_CREDENTIALS_PATH=/home/node/.n8n/credentials/google-credentials.json
28
+
29
+ # Community packages (adjust as needed)
30
+ N8N_ENABLE_COMMUNITY_NODES=true
31
+ N8N_COMMUNITY_PACKAGES=["n8n-nodes-langchain","n8n-nodes-google","n8n-nodes-vertexai"]
32
+
33
+ # n8n API (for exporting workflows)
34
+ N8N_URL=http://localhost:5678
35
+ N8N_API_KEY=
36
+
37
+ # Optional Vector DB (local dev)
38
+ QDRANT_URL=http://localhost:6333
39
+
n8n-infra/config/credentials/.gitkeep ADDED
@@ -0,0 +1 @@
 
 
1
+
n8n-infra/docker/Dockerfile ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Base: pinned n8n image (do not use latest)
2
+ FROM n8nio/n8n:1.53.1
3
+
4
+ # Switch to root to install deps
5
+ USER root
6
+
7
+ # Install OS tools: git, curl, jq, Postgres client, Python, and Google Cloud CLI
8
+ RUN apt-get update && \
9
+ apt-get install -y --no-install-recommends \
10
+ git curl jq postgresql-client python3 python3-pip ca-certificates gnupg && \
11
+ echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | tee /etc/apt/sources.list.d/google-cloud-sdk.list && \
12
+ curl -fsSL https://packages.cloud.google.com/apt/doc/apt-key.gpg | gpg --dearmor -o /usr/share/keyrings/cloud.google.gpg && \
13
+ apt-get update && apt-get install -y --no-install-recommends google-cloud-cli && \
14
+ rm -rf /var/lib/apt/lists/*
15
+
16
+ # Create directories for custom packages and knowledge/workflows mounts
17
+ RUN mkdir -p /home/node/.n8n/custom \
18
+ && chown -R node:node /home/node/.n8n
19
+
20
+ # Enable community nodes and declare default packages (adjust as needed)
21
+ ENV N8N_ENABLE_COMMUNITY_NODES=true \
22
+ N8N_COMMUNITY_PACKAGES='["n8n-nodes-langchain", "n8n-nodes-google", "n8n-nodes-vertexai"]'
23
+
24
+ # Drop back to the n8n user
25
+ USER node
26
+
27
+ # Expose default n8n port
28
+ EXPOSE 5678
29
+
30
+ # n8n entrypoint is provided by the base image
31
+
n8n-infra/docker/docker-compose.yml ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: "3.8"
2
+
3
+ services:
4
+ n8n:
5
+ image: n8nio/n8n:1.53.1
6
+ container_name: n8n
7
+ restart: unless-stopped
8
+ env_file:
9
+ - ../config/.env
10
+ ports:
11
+ - "5678:5678"
12
+ environment:
13
+ # Force SSL for DB
14
+ - DB_POSTGRESDB_SSL=true
15
+ # Community nodes
16
+ - N8N_ENABLE_COMMUNITY_NODES=${N8N_ENABLE_COMMUNITY_NODES:-true}
17
+ - N8N_COMMUNITY_PACKAGES=${N8N_COMMUNITY_PACKAGES:-["n8n-nodes-langchain","n8n-nodes-google","n8n-nodes-vertexai"]}
18
+ # n8n basics
19
+ - N8N_HOST=${N8N_HOST:-localhost}
20
+ - N8N_PORT=${N8N_PORT:-5678}
21
+ - N8N_PROTOCOL=${N8N_PROTOCOL:-http}
22
+ - WEBHOOK_URL=${WEBHOOK_URL}
23
+ volumes:
24
+ - n8n_data:/home/node/.n8n
25
+ - ../workflows:/data/workflows:rw
26
+ - ../knowledge:/data/knowledge:rw
27
+ healthcheck:
28
+ test: ["CMD", "curl", "-fsS", "http://localhost:5678/healthz"]
29
+ interval: 30s
30
+ timeout: 10s
31
+ retries: 5
32
+
33
+ # Optional vector DB for local development (use Supabase+pgvector in prod)
34
+ qdrant:
35
+ image: qdrant/qdrant:v1.9.1
36
+ container_name: qdrant
37
+ restart: unless-stopped
38
+ ports:
39
+ - "6333:6333"
40
+ volumes:
41
+ - qdrant_data:/qdrant/storage
42
+
43
+ volumes:
44
+ n8n_data:
45
+ qdrant_data:
46
+
n8n-infra/knowledge/midjourney-prompt/.gitkeep ADDED
@@ -0,0 +1 @@
 
 
1
+
n8n-infra/knowledge/n8n/.gitkeep ADDED
@@ -0,0 +1 @@
 
 
1
+
n8n-infra/knowledge/videos-e-animacoes/.gitkeep ADDED
@@ -0,0 +1 @@
 
 
1
+
n8n-infra/scripts/backup.sh ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
5
+ ENV_FILE="$ROOT_DIR/config/.env"
6
+
7
+ if [ -f "$ENV_FILE" ]; then
8
+ # shellcheck disable=SC1090
9
+ source "$ENV_FILE"
10
+ else
11
+ echo "Env file not found at $ENV_FILE" >&2
12
+ exit 1
13
+ fi
14
+
15
+ DATE_STAMP="$(date +%Y%m%d-%H%M%S)"
16
+ BACKUP_DIR_DB="$ROOT_DIR/backups/db"
17
+ BACKUP_DIR_WF="$ROOT_DIR/workflows/backup"
18
+ mkdir -p "$BACKUP_DIR_DB" "$BACKUP_DIR_WF"
19
+
20
+ echo "==> Backing up Supabase Postgres (SSL required)"
21
+ if [[ -z "${DB_POSTGRESDB_HOST:-}" || -z "${DB_POSTGRESDB_DATABASE:-}" || -z "${DB_POSTGRESDB_USER:-}" || -z "${DB_POSTGRESDB_PASSWORD:-}" ]]; then
22
+ echo "Database env vars missing. Check config/.env" >&2
23
+ exit 1
24
+ fi
25
+
26
+ export PGPASSWORD="$DB_POSTGRESDB_PASSWORD"
27
+ pg_dump \
28
+ --host="$DB_POSTGRESDB_HOST" \
29
+ --port="${DB_POSTGRESDB_PORT:-5432}" \
30
+ --username="$DB_POSTGRESDB_USER" \
31
+ --dbname="$DB_POSTGRESDB_DATABASE" \
32
+ --format=plain \
33
+ --no-owner \
34
+ --no-privileges \
35
+ --verbose \
36
+ --sslmode=require \
37
+ > "$BACKUP_DIR_DB/db-backup-$DATE_STAMP.sql"
38
+
39
+ echo "==> Exporting n8n workflows via API"
40
+ if [[ -z "${N8N_URL:-}" || -z "${N8N_API_KEY:-}" ]]; then
41
+ echo "N8N_URL or N8N_API_KEY missing. Skipping workflows export." >&2
42
+ else
43
+ # List workflows
44
+ WF_LIST_JSON="$(curl -fsSL -H "X-N8N-API-KEY: $N8N_API_KEY" "$N8N_URL/rest/workflows")"
45
+ echo "$WF_LIST_JSON" | jq -c '.data[]' | while read -r wf; do
46
+ ID="$(echo "$wf" | jq -r '.id')"
47
+ NAME="$(echo "$wf" | jq -r '.name' | tr ' /' '__')"
48
+ echo " - Exporting workflow $ID: $NAME"
49
+ curl -fsSL -H "X-N8N-API-KEY: $N8N_API_KEY" \
50
+ "$N8N_URL/rest/workflows/$ID" \
51
+ | jq '.' > "$BACKUP_DIR_WF/${DATE_STAMP}-${ID}-${NAME}.json"
52
+ done
53
+ fi
54
+
55
+ echo "==> Backup completed"
56
+
n8n-infra/scripts/restore.sh ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
5
+ ENV_FILE="$ROOT_DIR/config/.env"
6
+
7
+ if [ -f "$ENV_FILE" ]; then
8
+ # shellcheck disable=SC1090
9
+ source "$ENV_FILE"
10
+ else
11
+ echo "Env file not found at $ENV_FILE" >&2
12
+ exit 1
13
+ fi
14
+
15
+ SQL_FILE="${1:-}"
16
+ if [ -z "$SQL_FILE" ] || [ ! -f "$SQL_FILE" ]; then
17
+ echo "Usage: $0 path/to/backup.sql" >&2
18
+ exit 1
19
+ fi
20
+
21
+ if [[ -z "${DB_POSTGRESDB_HOST:-}" || -z "${DB_POSTGRESDB_DATABASE:-}" || -z "${DB_POSTGRESDB_USER:-}" || -z "${DB_POSTGRESDB_PASSWORD:-}" ]]; then
22
+ echo "Database env vars missing. Check config/.env" >&2
23
+ exit 1
24
+ fi
25
+
26
+ echo "==> Restoring database from $SQL_FILE"
27
+ export PGPASSWORD="$DB_POSTGRESDB_PASSWORD"
28
+ psql \
29
+ --host="$DB_POSTGRESDB_HOST" \
30
+ --port="${DB_POSTGRESDB_PORT:-5432}" \
31
+ --username="$DB_POSTGRESDB_USER" \
32
+ --dbname="$DB_POSTGRESDB_DATABASE" \
33
+ --set ON_ERROR_STOP=on \
34
+ --set sslmode=require \
35
+ --file "$SQL_FILE"
36
+
37
+ echo "==> Restore completed"
38
+
n8n-infra/scripts/sync-knowledge.sh ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
5
+ KNOW_DIR="$ROOT_DIR/knowledge"
6
+ ENV_FILE="$ROOT_DIR/config/.env"
7
+
8
+ if [ -f "$ENV_FILE" ]; then
9
+ # shellcheck disable=SC1090
10
+ source "$ENV_FILE"
11
+ fi
12
+
13
+ declare -A REPOS=(
14
+ ["n8n"]="https://github.com/danilonovaisv/CHATGPT-knowledge-base.git#projects/n8n"
15
+ ["videos-e-animacoes"]="https://github.com/danilonovaisv/CHATGPT-knowledge-base.git#projects/videos-e-animacoes"
16
+ ["midjourney-prompt"]="https://github.com/danilonovaisv/CHATGPT-knowledge-base.git#projects/midjorney-prompt"
17
+ )
18
+
19
+ clone_or_update() {
20
+ local target_subdir="$1"; shift
21
+ local url_ref="$1"; shift
22
+
23
+ local url="${url_ref%%#*}"
24
+ local ref="${url_ref#*#}"
25
+
26
+ local target_dir="$KNOW_DIR/$target_subdir"
27
+ mkdir -p "$target_dir"
28
+
29
+ if [ ! -d "$target_dir/.git" ]; then
30
+ echo "==> Cloning $url into $target_dir"
31
+ git clone --depth 1 "$url" "$target_dir"
32
+ else
33
+ echo "==> Updating $target_dir"
34
+ (cd "$target_dir" && git fetch --depth 1 origin && git reset --hard origin/HEAD)
35
+ fi
36
+
37
+ # If a subfolder ref is provided, mirror its content to the subdir root
38
+ if [ -n "$ref" ] && [ -d "$target_dir/$ref" ]; then
39
+ echo "==> Mirroring subfolder $ref into $target_dir"
40
+ rsync -a --delete "$target_dir/$ref/" "$target_dir/"
41
+ # Optionally, remove the rest of the cloned repo (keeping only desired content)
42
+ find "$target_dir" -mindepth 1 -maxdepth 1 -not -name "$(basename "$ref")" -not -name ".git" -exec rm -rf {} + || true
43
+ fi
44
+ }
45
+
46
+ for key in "${!REPOS[@]}"; do
47
+ clone_or_update "$key" "${REPOS[$key]}"
48
+ done
49
+
50
+ echo "==> Knowledge repos synced"
51
+
52
+ # Ingestion: choose one of the strategies below.
53
+
54
+ # Strategy A: Trigger an n8n ingestion workflow via webhook.
55
+ # Requires WEBHOOK_URL to point to a workflow expecting payload with paths.
56
+ if [[ -n "${WEBHOOK_URL:-}" ]]; then
57
+ echo "==> Triggering n8n webhook for ingestion"
58
+ curl -fsSL -X POST "$WEBHOOK_URL" \
59
+ -H "Content-Type: application/json" \
60
+ -d "{\n \"paths\": [\"$KNOW_DIR/n8n\", \"$KNOW_DIR/videos-e-animacoes\", \"$KNOW_DIR/midjourney-prompt\"],\n \"vector_target\": \"${QDRANT_URL:-pgvector}\"\n }" >/dev/null || true
61
+ else
62
+ echo "==> No WEBHOOK_URL set. Skipping ingestion trigger."
63
+ fi
64
+
65
+ echo "==> Sync completed"
66
+
n8n-infra/workflows/backup/.gitkeep ADDED
@@ -0,0 +1 @@
 
 
1
+