Compare commits
4 Commits
dev_n8n
...
de0f3f1e66
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de0f3f1e66 | ||
|
|
393e8c01cd | ||
|
|
0e578dd833 | ||
|
|
5cbe95dc24 |
209
.gitea/workflows/ci-cd-dev-staging.yml
Normal file
209
.gitea/workflows/ci-cd-dev-staging.yml
Normal file
@@ -0,0 +1,209 @@
|
||||
name: CI/CD Pipeline (Dev/Staging)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
DOCKER_IMAGE: portfolio-app
|
||||
CONTAINER_NAME: portfolio-app-staging
|
||||
|
||||
jobs:
|
||||
staging:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run linting
|
||||
run: npm run lint
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test:production
|
||||
|
||||
- name: Build application
|
||||
run: npm run build
|
||||
|
||||
- name: Run security scan
|
||||
run: |
|
||||
echo "🔍 Running npm audit..."
|
||||
npm audit --audit-level=high || echo "⚠️ Some vulnerabilities found, but continuing..."
|
||||
|
||||
- name: Verify Gitea Variables and Secrets
|
||||
run: |
|
||||
echo "🔍 Verifying Gitea Variables and Secrets..."
|
||||
|
||||
# Check Variables
|
||||
if [ -z "${{ vars.NEXT_PUBLIC_BASE_URL }}" ]; then
|
||||
echo "❌ NEXT_PUBLIC_BASE_URL variable is missing!"
|
||||
echo "Please set this variable in Gitea repository settings"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${{ vars.MY_EMAIL }}" ]; then
|
||||
echo "❌ MY_EMAIL variable is missing!"
|
||||
echo "Please set this variable in Gitea repository settings"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${{ vars.MY_INFO_EMAIL }}" ]; then
|
||||
echo "❌ MY_INFO_EMAIL variable is missing!"
|
||||
echo "Please set this variable in Gitea repository settings"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check Secrets
|
||||
if [ -z "${{ secrets.MY_PASSWORD }}" ]; then
|
||||
echo "❌ MY_PASSWORD secret is missing!"
|
||||
echo "Please set this secret in Gitea repository settings"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${{ secrets.MY_INFO_PASSWORD }}" ]; then
|
||||
echo "❌ MY_INFO_PASSWORD secret is missing!"
|
||||
echo "Please set this secret in Gitea repository settings"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${{ secrets.ADMIN_BASIC_AUTH }}" ]; then
|
||||
echo "❌ ADMIN_BASIC_AUTH secret is missing!"
|
||||
echo "Please set this secret in Gitea repository settings"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All required Gitea variables and secrets are present"
|
||||
echo "📝 Variables found:"
|
||||
echo " - NEXT_PUBLIC_BASE_URL: ${{ vars.NEXT_PUBLIC_BASE_URL }}"
|
||||
echo " - MY_EMAIL: ${{ vars.MY_EMAIL }}"
|
||||
echo " - MY_INFO_EMAIL: ${{ vars.MY_INFO_EMAIL }}"
|
||||
echo " - NODE_ENV: staging"
|
||||
echo " - LOG_LEVEL: ${{ vars.LOG_LEVEL }}"
|
||||
|
||||
- name: Build Docker image
|
||||
run: |
|
||||
echo "🏗️ Building Docker image..."
|
||||
docker build -t ${{ env.DOCKER_IMAGE }}:staging .
|
||||
docker tag ${{ env.DOCKER_IMAGE }}:staging ${{ env.DOCKER_IMAGE }}:staging-$(date +%Y%m%d-%H%M%S)
|
||||
echo "✅ Docker image built successfully"
|
||||
|
||||
- name: Deploy Staging using Gitea Variables and Secrets
|
||||
run: |
|
||||
echo "🚀 Deploying Staging using Gitea Variables and Secrets..."
|
||||
|
||||
echo "📝 Using Gitea Variables and Secrets:"
|
||||
echo " - NODE_ENV: staging"
|
||||
echo " - LOG_LEVEL: ${LOG_LEVEL}"
|
||||
echo " - NEXT_PUBLIC_BASE_URL: ${NEXT_PUBLIC_BASE_URL}"
|
||||
echo " - MY_EMAIL: ${MY_EMAIL}"
|
||||
echo " - MY_INFO_EMAIL: ${MY_INFO_EMAIL}"
|
||||
echo " - MY_PASSWORD: [SET FROM GITEA SECRET]"
|
||||
echo " - MY_INFO_PASSWORD: [SET FROM GITEA SECRET]"
|
||||
echo " - ADMIN_BASIC_AUTH: [SET FROM GITEA SECRET]"
|
||||
echo " - N8N_WEBHOOK_URL: ${N8N_WEBHOOK_URL}"
|
||||
|
||||
# Stop old staging containers
|
||||
echo "🛑 Stopping old staging containers..."
|
||||
docker compose -f docker-compose.staging.yml down || true
|
||||
|
||||
# Clean up orphaned containers
|
||||
echo "🧹 Cleaning up orphaned containers..."
|
||||
docker compose -f docker-compose.staging.yml down --remove-orphans || true
|
||||
|
||||
# Start new staging containers
|
||||
echo "🚀 Starting new staging containers..."
|
||||
docker compose -f docker-compose.staging.yml up -d
|
||||
|
||||
# Wait a moment for containers to start
|
||||
echo "⏳ Waiting for containers to start..."
|
||||
sleep 10
|
||||
|
||||
# Check container logs for debugging
|
||||
echo "📋 Container logs (first 20 lines):"
|
||||
docker compose -f docker-compose.staging.yml logs --tail=20
|
||||
|
||||
echo "✅ Staging deployment completed!"
|
||||
env:
|
||||
NODE_ENV: staging
|
||||
LOG_LEVEL: ${{ vars.LOG_LEVEL }}
|
||||
NEXT_PUBLIC_BASE_URL: ${{ vars.NEXT_PUBLIC_BASE_URL }}
|
||||
NEXT_PUBLIC_UMAMI_URL: ${{ vars.NEXT_PUBLIC_UMAMI_URL }}
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID: ${{ vars.NEXT_PUBLIC_UMAMI_WEBSITE_ID }}
|
||||
MY_EMAIL: ${{ vars.MY_EMAIL }}
|
||||
MY_INFO_EMAIL: ${{ vars.MY_INFO_EMAIL }}
|
||||
MY_PASSWORD: ${{ secrets.MY_PASSWORD }}
|
||||
MY_INFO_PASSWORD: ${{ secrets.MY_INFO_PASSWORD }}
|
||||
ADMIN_BASIC_AUTH: ${{ secrets.ADMIN_BASIC_AUTH }}
|
||||
N8N_WEBHOOK_URL: ${{ vars.N8N_WEBHOOK_URL }}
|
||||
N8N_API_KEY: ${{ secrets.N8N_API_KEY }}
|
||||
|
||||
- name: Wait for containers to be ready
|
||||
run: |
|
||||
echo "⏳ Waiting for containers to be ready..."
|
||||
sleep 45
|
||||
|
||||
# Check if all containers are running
|
||||
echo "📊 Checking container status..."
|
||||
docker compose -f docker-compose.staging.yml ps
|
||||
|
||||
# Wait for application container to be healthy
|
||||
echo "🏥 Waiting for application container to be healthy..."
|
||||
for i in {1..60}; do
|
||||
if docker exec portfolio-app-staging curl -f http://localhost:3000/api/health > /dev/null 2>&1; then
|
||||
echo "✅ Application container is healthy!"
|
||||
break
|
||||
fi
|
||||
echo "⏳ Waiting for application container... ($i/60)"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Additional wait for main page to be accessible
|
||||
echo "🌐 Waiting for main page to be accessible..."
|
||||
for i in {1..30}; do
|
||||
if curl -f http://localhost:3001/ > /dev/null 2>&1; then
|
||||
echo "✅ Main page is accessible!"
|
||||
break
|
||||
fi
|
||||
echo "⏳ Waiting for main page... ($i/30)"
|
||||
sleep 3
|
||||
done
|
||||
|
||||
- name: Health check
|
||||
run: |
|
||||
echo "🔍 Running comprehensive health checks..."
|
||||
|
||||
# Check container status
|
||||
echo "📊 Container status:"
|
||||
docker compose -f docker-compose.staging.yml ps
|
||||
|
||||
# Check application container
|
||||
echo "🏥 Checking application container..."
|
||||
if docker exec portfolio-app-staging curl -f http://localhost:3000/api/health; then
|
||||
echo "✅ Application health check passed!"
|
||||
else
|
||||
echo "❌ Application health check failed!"
|
||||
docker logs portfolio-app-staging --tail=50
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check main page
|
||||
if curl -f http://localhost:3001/ > /dev/null; then
|
||||
echo "✅ Main page is accessible!"
|
||||
else
|
||||
echo "❌ Main page is not accessible!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All health checks passed! Staging deployment successful!"
|
||||
|
||||
- name: Cleanup old images
|
||||
run: |
|
||||
echo "🧹 Cleaning up old images..."
|
||||
docker image prune -f
|
||||
docker system prune -f
|
||||
echo "✅ Cleanup completed"
|
||||
@@ -2,7 +2,7 @@ name: CI/CD Pipeline (Using Gitea Variables & Secrets)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev, main, production ]
|
||||
branches: [ production ]
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
@@ -94,23 +94,10 @@ jobs:
|
||||
|
||||
- name: Deploy using Gitea Variables and Secrets
|
||||
run: |
|
||||
# Determine if this is staging or production
|
||||
if [ "${{ github.ref }}" == "refs/heads/dev" ] || [ "${{ github.ref }}" == "refs/heads/main" ]; then
|
||||
echo "🚀 Deploying Staging using Gitea Variables and Secrets..."
|
||||
COMPOSE_FILE="docker-compose.staging.yml"
|
||||
HEALTH_PORT="3002"
|
||||
CONTAINER_NAME="portfolio-app-staging"
|
||||
DEPLOY_ENV="staging"
|
||||
else
|
||||
echo "🚀 Deploying Production using Gitea Variables and Secrets..."
|
||||
COMPOSE_FILE="docker-compose.production.yml"
|
||||
HEALTH_PORT="3000"
|
||||
CONTAINER_NAME="portfolio-app"
|
||||
DEPLOY_ENV="production"
|
||||
fi
|
||||
echo "🚀 Deploying using Gitea Variables and Secrets..."
|
||||
|
||||
echo "📝 Using Gitea Variables and Secrets:"
|
||||
echo " - NODE_ENV: ${DEPLOY_ENV}"
|
||||
echo " - NODE_ENV: ${NODE_ENV}"
|
||||
echo " - LOG_LEVEL: ${LOG_LEVEL}"
|
||||
echo " - NEXT_PUBLIC_BASE_URL: ${NEXT_PUBLIC_BASE_URL}"
|
||||
echo " - MY_EMAIL: ${MY_EMAIL}"
|
||||
@@ -118,32 +105,31 @@ jobs:
|
||||
echo " - MY_PASSWORD: [SET FROM GITEA SECRET]"
|
||||
echo " - MY_INFO_PASSWORD: [SET FROM GITEA SECRET]"
|
||||
echo " - ADMIN_BASIC_AUTH: [SET FROM GITEA SECRET]"
|
||||
echo " - N8N_WEBHOOK_URL: ${N8N_WEBHOOK_URL:-}"
|
||||
|
||||
# Stop old containers (only for the environment being deployed)
|
||||
echo "🛑 Stopping old ${DEPLOY_ENV} containers..."
|
||||
docker compose -f $COMPOSE_FILE down || true
|
||||
# Stop old containers
|
||||
echo "🛑 Stopping old containers..."
|
||||
docker compose down || true
|
||||
|
||||
# Clean up orphaned containers
|
||||
echo "🧹 Cleaning up orphaned ${DEPLOY_ENV} containers..."
|
||||
docker compose -f $COMPOSE_FILE down --remove-orphans || true
|
||||
echo "🧹 Cleaning up orphaned containers..."
|
||||
docker compose down --remove-orphans || true
|
||||
|
||||
# Start new containers
|
||||
echo "🚀 Starting new ${DEPLOY_ENV} containers..."
|
||||
docker compose -f $COMPOSE_FILE up -d --force-recreate
|
||||
echo "🚀 Starting new containers..."
|
||||
docker compose up -d
|
||||
|
||||
# Wait a moment for containers to start
|
||||
echo "⏳ Waiting for ${DEPLOY_ENV} containers to start..."
|
||||
sleep 15
|
||||
echo "⏳ Waiting for containers to start..."
|
||||
sleep 10
|
||||
|
||||
# Check container logs for debugging
|
||||
echo "📋 ${DEPLOY_ENV} container logs (first 30 lines):"
|
||||
docker compose -f $COMPOSE_FILE logs --tail=30
|
||||
echo "📋 Container logs (first 20 lines):"
|
||||
docker compose logs --tail=20
|
||||
|
||||
echo "✅ ${DEPLOY_ENV} deployment completed!"
|
||||
echo "✅ Deployment completed!"
|
||||
env:
|
||||
NODE_ENV: ${{ vars.NODE_ENV || 'production' }}
|
||||
LOG_LEVEL: ${{ vars.LOG_LEVEL || 'info' }}
|
||||
NODE_ENV: ${{ vars.NODE_ENV }}
|
||||
LOG_LEVEL: ${{ vars.LOG_LEVEL }}
|
||||
NEXT_PUBLIC_BASE_URL: ${{ vars.NEXT_PUBLIC_BASE_URL }}
|
||||
NEXT_PUBLIC_UMAMI_URL: ${{ vars.NEXT_PUBLIC_UMAMI_URL }}
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID: ${{ vars.NEXT_PUBLIC_UMAMI_WEBSITE_ID }}
|
||||
@@ -152,98 +138,65 @@ jobs:
|
||||
MY_PASSWORD: ${{ secrets.MY_PASSWORD }}
|
||||
MY_INFO_PASSWORD: ${{ secrets.MY_INFO_PASSWORD }}
|
||||
ADMIN_BASIC_AUTH: ${{ secrets.ADMIN_BASIC_AUTH }}
|
||||
N8N_WEBHOOK_URL: ${{ vars.N8N_WEBHOOK_URL || '' }}
|
||||
N8N_SECRET_TOKEN: ${{ secrets.N8N_SECRET_TOKEN || '' }}
|
||||
|
||||
- name: Wait for containers to be ready
|
||||
run: |
|
||||
# Determine environment
|
||||
if [ "${{ github.ref }}" == "refs/heads/dev" ] || [ "${{ github.ref }}" == "refs/heads/main" ]; then
|
||||
COMPOSE_FILE="docker-compose.staging.yml"
|
||||
HEALTH_PORT="3002"
|
||||
CONTAINER_NAME="portfolio-app-staging"
|
||||
DEPLOY_ENV="staging"
|
||||
else
|
||||
COMPOSE_FILE="docker-compose.production.yml"
|
||||
HEALTH_PORT="3000"
|
||||
CONTAINER_NAME="portfolio-app"
|
||||
DEPLOY_ENV="production"
|
||||
fi
|
||||
|
||||
echo "⏳ Waiting for ${DEPLOY_ENV} containers to be ready..."
|
||||
sleep 30
|
||||
echo "⏳ Waiting for containers to be ready..."
|
||||
sleep 45
|
||||
|
||||
# Check if all containers are running
|
||||
echo "📊 Checking ${DEPLOY_ENV} container status..."
|
||||
docker compose -f $COMPOSE_FILE ps
|
||||
echo "📊 Checking container status..."
|
||||
docker compose ps
|
||||
|
||||
# Wait for application container to be healthy
|
||||
echo "🏥 Waiting for ${DEPLOY_ENV} application container to be healthy..."
|
||||
for i in {1..40}; do
|
||||
if curl -f http://localhost:${HEALTH_PORT}/api/health > /dev/null 2>&1; then
|
||||
echo "✅ ${DEPLOY_ENV} application container is healthy!"
|
||||
echo "🏥 Waiting for application container to be healthy..."
|
||||
for i in {1..60}; do
|
||||
if docker exec portfolio-app curl -f http://localhost:3000/api/health > /dev/null 2>&1; then
|
||||
echo "✅ Application container is healthy!"
|
||||
break
|
||||
fi
|
||||
echo "⏳ Waiting for ${DEPLOY_ENV} application container... ($i/40)"
|
||||
sleep 3
|
||||
echo "⏳ Waiting for application container... ($i/60)"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Additional wait for main page to be accessible
|
||||
echo "🌐 Waiting for ${DEPLOY_ENV} main page to be accessible..."
|
||||
for i in {1..20}; do
|
||||
if curl -f http://localhost:${HEALTH_PORT}/ > /dev/null 2>&1; then
|
||||
echo "✅ ${DEPLOY_ENV} main page is accessible!"
|
||||
echo "🌐 Waiting for main page to be accessible..."
|
||||
for i in {1..30}; do
|
||||
if curl -f http://localhost:3000/ > /dev/null 2>&1; then
|
||||
echo "✅ Main page is accessible!"
|
||||
break
|
||||
fi
|
||||
echo "⏳ Waiting for ${DEPLOY_ENV} main page... ($i/20)"
|
||||
sleep 2
|
||||
echo "⏳ Waiting for main page... ($i/30)"
|
||||
sleep 3
|
||||
done
|
||||
|
||||
- name: Health check
|
||||
run: |
|
||||
# Determine environment
|
||||
if [ "${{ github.ref }}" == "refs/heads/dev" ] || [ "${{ github.ref }}" == "refs/heads/main" ]; then
|
||||
COMPOSE_FILE="docker-compose.staging.yml"
|
||||
HEALTH_PORT="3002"
|
||||
CONTAINER_NAME="portfolio-app-staging"
|
||||
DEPLOY_ENV="staging"
|
||||
else
|
||||
COMPOSE_FILE="docker-compose.production.yml"
|
||||
HEALTH_PORT="3000"
|
||||
CONTAINER_NAME="portfolio-app"
|
||||
DEPLOY_ENV="production"
|
||||
fi
|
||||
|
||||
echo "🔍 Running comprehensive ${DEPLOY_ENV} health checks..."
|
||||
echo "🔍 Running comprehensive health checks..."
|
||||
|
||||
# Check container status
|
||||
echo "📊 ${DEPLOY_ENV} container status:"
|
||||
docker compose -f $COMPOSE_FILE ps
|
||||
echo "📊 Container status:"
|
||||
docker compose ps
|
||||
|
||||
# Check application container
|
||||
echo "🏥 Checking ${DEPLOY_ENV} application container..."
|
||||
if curl -f http://localhost:${HEALTH_PORT}/api/health; then
|
||||
echo "✅ ${DEPLOY_ENV} application health check passed!"
|
||||
echo "🏥 Checking application container..."
|
||||
if docker exec portfolio-app curl -f http://localhost:3000/api/health; then
|
||||
echo "✅ Application health check passed!"
|
||||
else
|
||||
echo "⚠️ ${DEPLOY_ENV} application health check failed, but continuing..."
|
||||
docker compose -f $COMPOSE_FILE logs --tail=50
|
||||
# Don't exit 1 for staging, only for production
|
||||
if [ "$DEPLOY_ENV" == "production" ]; then
|
||||
exit 1
|
||||
fi
|
||||
echo "❌ Application health check failed!"
|
||||
docker logs portfolio-app --tail=50
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check main page
|
||||
if curl -f http://localhost:${HEALTH_PORT}/ > /dev/null; then
|
||||
echo "✅ ${DEPLOY_ENV} main page is accessible!"
|
||||
if curl -f http://localhost:3000/ > /dev/null; then
|
||||
echo "✅ Main page is accessible!"
|
||||
else
|
||||
echo "⚠️ ${DEPLOY_ENV} main page check failed, but continuing..."
|
||||
if [ "$DEPLOY_ENV" == "production" ]; then
|
||||
exit 1
|
||||
fi
|
||||
echo "❌ Main page is not accessible!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ ${DEPLOY_ENV} health checks completed!"
|
||||
echo "✅ All health checks passed! Deployment successful!"
|
||||
|
||||
- name: Cleanup old images
|
||||
run: |
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
name: Staging Deployment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev, main ]
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
DOCKER_IMAGE: portfolio-app
|
||||
CONTAINER_NAME: portfolio-app-staging
|
||||
|
||||
jobs:
|
||||
staging:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run linting
|
||||
run: npm run lint
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test
|
||||
|
||||
- name: Build application
|
||||
run: npm run build
|
||||
|
||||
- name: Build Docker image
|
||||
run: |
|
||||
echo "🏗️ Building Docker image for staging..."
|
||||
docker build -t ${{ env.DOCKER_IMAGE }}:staging .
|
||||
docker tag ${{ env.DOCKER_IMAGE }}:staging ${{ env.DOCKER_IMAGE }}:staging-$(date +%Y%m%d-%H%M%S)
|
||||
echo "✅ Docker image built successfully"
|
||||
|
||||
- name: Deploy Staging using Gitea Variables and Secrets
|
||||
run: |
|
||||
echo "🚀 Deploying Staging using Gitea Variables and Secrets..."
|
||||
|
||||
echo "📝 Using Gitea Variables and Secrets:"
|
||||
echo " - NODE_ENV: staging"
|
||||
echo " - LOG_LEVEL: ${LOG_LEVEL:-info}"
|
||||
echo " - NEXT_PUBLIC_BASE_URL: ${NEXT_PUBLIC_BASE_URL}"
|
||||
echo " - MY_EMAIL: ${MY_EMAIL}"
|
||||
echo " - MY_INFO_EMAIL: ${MY_INFO_EMAIL}"
|
||||
echo " - MY_PASSWORD: [SET FROM GITEA SECRET]"
|
||||
echo " - MY_INFO_PASSWORD: [SET FROM GITEA SECRET]"
|
||||
echo " - ADMIN_BASIC_AUTH: [SET FROM GITEA SECRET]"
|
||||
echo " - N8N_WEBHOOK_URL: ${N8N_WEBHOOK_URL:-}"
|
||||
|
||||
# Stop old staging containers only
|
||||
echo "🛑 Stopping old staging containers..."
|
||||
docker compose -f docker-compose.staging.yml down || true
|
||||
|
||||
# Clean up orphaned staging containers
|
||||
echo "🧹 Cleaning up orphaned staging containers..."
|
||||
docker compose -f docker-compose.staging.yml down --remove-orphans || true
|
||||
|
||||
# Start new staging containers
|
||||
echo "🚀 Starting new staging containers..."
|
||||
docker compose -f docker-compose.staging.yml up -d --force-recreate
|
||||
|
||||
# Wait a moment for containers to start
|
||||
echo "⏳ Waiting for staging containers to start..."
|
||||
sleep 15
|
||||
|
||||
# Check container logs for debugging
|
||||
echo "📋 Staging container logs (first 30 lines):"
|
||||
docker compose -f docker-compose.staging.yml logs --tail=30
|
||||
|
||||
echo "✅ Staging deployment completed!"
|
||||
env:
|
||||
NODE_ENV: staging
|
||||
LOG_LEVEL: ${{ vars.LOG_LEVEL || 'info' }}
|
||||
NEXT_PUBLIC_BASE_URL: ${{ vars.NEXT_PUBLIC_BASE_URL }}
|
||||
NEXT_PUBLIC_UMAMI_URL: ${{ vars.NEXT_PUBLIC_UMAMI_URL }}
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID: ${{ vars.NEXT_PUBLIC_UMAMI_WEBSITE_ID }}
|
||||
MY_EMAIL: ${{ vars.MY_EMAIL }}
|
||||
MY_INFO_EMAIL: ${{ vars.MY_INFO_EMAIL }}
|
||||
MY_PASSWORD: ${{ secrets.MY_PASSWORD }}
|
||||
MY_INFO_PASSWORD: ${{ secrets.MY_INFO_PASSWORD }}
|
||||
ADMIN_BASIC_AUTH: ${{ secrets.ADMIN_BASIC_AUTH }}
|
||||
N8N_WEBHOOK_URL: ${{ vars.N8N_WEBHOOK_URL || '' }}
|
||||
N8N_SECRET_TOKEN: ${{ secrets.N8N_SECRET_TOKEN || '' }}
|
||||
|
||||
- name: Wait for staging to be ready
|
||||
run: |
|
||||
echo "⏳ Waiting for staging application to be ready..."
|
||||
sleep 30
|
||||
|
||||
# Check if all staging containers are running
|
||||
echo "📊 Checking staging container status..."
|
||||
docker compose -f docker-compose.staging.yml ps
|
||||
|
||||
# Wait for application container to be healthy
|
||||
echo "🏥 Waiting for staging application container to be healthy..."
|
||||
for i in {1..40}; do
|
||||
if curl -f http://localhost:3002/api/health > /dev/null 2>&1; then
|
||||
echo "✅ Staging application container is healthy!"
|
||||
break
|
||||
fi
|
||||
echo "⏳ Waiting for staging application container... ($i/40)"
|
||||
sleep 3
|
||||
done
|
||||
|
||||
# Additional wait for main page to be accessible
|
||||
echo "🌐 Waiting for staging main page to be accessible..."
|
||||
for i in {1..20}; do
|
||||
if curl -f http://localhost:3002/ > /dev/null 2>&1; then
|
||||
echo "✅ Staging main page is accessible!"
|
||||
break
|
||||
fi
|
||||
echo "⏳ Waiting for staging main page... ($i/20)"
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Staging health check
|
||||
run: |
|
||||
echo "🔍 Running staging health checks..."
|
||||
|
||||
# Check container status
|
||||
echo "📊 Staging container status:"
|
||||
docker compose -f docker-compose.staging.yml ps
|
||||
|
||||
# Check application container
|
||||
echo "🏥 Checking staging application container..."
|
||||
if curl -f http://localhost:3002/api/health; then
|
||||
echo "✅ Staging application health check passed!"
|
||||
else
|
||||
echo "⚠️ Staging application health check failed, but continuing..."
|
||||
docker compose -f docker-compose.staging.yml logs --tail=50
|
||||
fi
|
||||
|
||||
# Check main page
|
||||
if curl -f http://localhost:3002/ > /dev/null; then
|
||||
echo "✅ Staging main page is accessible!"
|
||||
else
|
||||
echo "⚠️ Staging main page check failed, but continuing..."
|
||||
fi
|
||||
|
||||
echo "✅ Staging deployment verification completed!"
|
||||
|
||||
- name: Cleanup old staging images
|
||||
run: |
|
||||
echo "🧹 Cleaning up old staging images..."
|
||||
docker image prune -f --filter "label=stage=staging" || true
|
||||
echo "✅ Cleanup completed"
|
||||
4
.github/workflows/ci-cd.yml
vendored
4
.github/workflows/ci-cd.yml
vendored
@@ -198,7 +198,7 @@ jobs:
|
||||
# Wait for health check
|
||||
echo "Waiting for staging application to be healthy..."
|
||||
for i in {1..30}; do
|
||||
if curl -f http://localhost:3002/api/health > /dev/null 2>&1; then
|
||||
if curl -f http://localhost:3001/api/health > /dev/null 2>&1; then
|
||||
echo "✅ Staging deployment successful!"
|
||||
break
|
||||
fi
|
||||
@@ -206,7 +206,7 @@ jobs:
|
||||
done
|
||||
|
||||
# Verify deployment
|
||||
if curl -f http://localhost:3002/api/health; then
|
||||
if curl -f http://localhost:3001/api/health; then
|
||||
echo "✅ Staging deployment verified!"
|
||||
else
|
||||
echo "⚠️ Staging health check failed, but container is running"
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
# 🔧 Deployment Fixes Applied
|
||||
|
||||
## Issues Fixed
|
||||
|
||||
### 1. Port 3001 Already Allocated ❌ → ✅
|
||||
**Problem**: Port 3001 was already in use, causing staging deployment to fail.
|
||||
|
||||
**Fix**:
|
||||
- Changed staging port from `3001` to `3002`
|
||||
- Changed PostgreSQL staging port from `5433` to `5434`
|
||||
- Changed Redis staging port from `6380` to `6381`
|
||||
|
||||
### 2. Docker Compose Version Warning ❌ → ✅
|
||||
**Problem**: `version: '3.8'` is obsolete in newer Docker Compose.
|
||||
|
||||
**Fix**: Removed `version` line from `docker-compose.staging.yml`
|
||||
|
||||
### 3. Missing N8N Environment Variables ❌ → ✅
|
||||
**Problem**: `N8N_SECRET_TOKEN` warning appeared.
|
||||
|
||||
**Fix**: Added `N8N_WEBHOOK_URL` and `N8N_SECRET_TOKEN` to staging compose file
|
||||
|
||||
### 4. Wrong Compose File Used ❌ → ✅
|
||||
**Problem**: Gitea workflow was using wrong compose file (stopping production containers).
|
||||
|
||||
**Fix**:
|
||||
- Updated `ci-cd-with-gitea-vars.yml` to detect branch and use correct compose file
|
||||
- Created dedicated `staging-deploy.yml` workflow
|
||||
- Staging now uses `docker-compose.staging.yml`
|
||||
- Production uses `docker-compose.production.yml`
|
||||
|
||||
## Updated Ports
|
||||
|
||||
| Service | Staging | Production |
|
||||
|---------|---------|------------|
|
||||
| App | **3002** ✅ | **3000** |
|
||||
| PostgreSQL | **5434** ✅ | **5432** |
|
||||
| Redis | **6381** ✅ | **6379** |
|
||||
|
||||
## How It Works Now
|
||||
|
||||
### Staging (dev/main branch)
|
||||
```bash
|
||||
git push origin dev
|
||||
# → Uses docker-compose.staging.yml
|
||||
# → Deploys to port 3002
|
||||
# → Does NOT touch production containers
|
||||
```
|
||||
|
||||
### Production (production branch)
|
||||
```bash
|
||||
git push origin production
|
||||
# → Uses docker-compose.production.yml
|
||||
# → Deploys to port 3000
|
||||
# → Zero-downtime deployment
|
||||
# → Does NOT touch staging containers
|
||||
```
|
||||
|
||||
## Files Updated
|
||||
|
||||
- ✅ `docker-compose.staging.yml` - Fixed ports, removed version, added N8N vars
|
||||
- ✅ `.gitea/workflows/ci-cd-with-gitea-vars.yml` - Branch detection, correct compose files
|
||||
- ✅ `.gitea/workflows/staging-deploy.yml` - New dedicated staging workflow
|
||||
- ✅ `STAGING_SETUP.md` - Updated port references
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Test staging deployment**:
|
||||
```bash
|
||||
git push origin dev
|
||||
# Should deploy to port 3002 without errors
|
||||
```
|
||||
|
||||
2. **Verify staging**:
|
||||
```bash
|
||||
curl http://localhost:3002/api/health
|
||||
```
|
||||
|
||||
3. **When ready for production**:
|
||||
```bash
|
||||
git checkout production
|
||||
git merge main
|
||||
git push origin production
|
||||
# Deploys safely to port 3000
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**All fixes applied!** Staging and production are now completely isolated. 🚀
|
||||
236
DEV_TESTING.md
Normal file
236
DEV_TESTING.md
Normal file
@@ -0,0 +1,236 @@
|
||||
# 🧪 Dev Branch Testing Guide
|
||||
|
||||
## Übersicht
|
||||
|
||||
Dieses Dokument erklärt, wie du dein Portfolio-Projekt auf dem `dev` Branch testen kannst, bevor du es in Production deployst.
|
||||
|
||||
## Voraussetzungen
|
||||
|
||||
1. ✅ n8n läuft bereits auf `n8n.dk0.dev`
|
||||
2. ✅ Gitea Repository ist eingerichtet
|
||||
3. ✅ Docker und Docker Compose sind installiert
|
||||
|
||||
## Setup für lokales Testen mit n8n
|
||||
|
||||
### 1. Environment Variables konfigurieren
|
||||
|
||||
Erstelle eine `.env.local` Datei (oder aktualisiere deine bestehende `.env`):
|
||||
|
||||
```bash
|
||||
# n8n Integration
|
||||
N8N_WEBHOOK_URL=https://n8n.dk0.dev
|
||||
N8N_API_KEY=dein-n8n-api-key
|
||||
N8N_SECRET_TOKEN=dein-n8n-secret-token
|
||||
|
||||
# Application
|
||||
NODE_ENV=development
|
||||
NEXT_PUBLIC_BASE_URL=http://localhost:3000
|
||||
|
||||
# Database (wird automatisch von docker-compose.dev.minimal.yml gesetzt)
|
||||
# DATABASE_URL=postgresql://portfolio_user:portfolio_dev_pass@localhost:5432/portfolio_dev?schema=public
|
||||
|
||||
# Redis (wird automatisch von docker-compose.dev.minimal.yml gesetzt)
|
||||
# REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Email Configuration
|
||||
MY_EMAIL=contact@dk0.dev
|
||||
MY_INFO_EMAIL=info@dk0.dev
|
||||
MY_PASSWORD=dein-email-passwort
|
||||
MY_INFO_PASSWORD=dein-info-email-passwort
|
||||
|
||||
# Analytics
|
||||
NEXT_PUBLIC_UMAMI_URL=https://analytics.dk0.dev
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID=b3665829-927a-4ada-b9bb-fcf24171061e
|
||||
|
||||
# Security
|
||||
ADMIN_BASIC_AUTH=admin:dein-sicheres-passwort
|
||||
LOG_LEVEL=debug
|
||||
```
|
||||
|
||||
### 2. Lokal testen
|
||||
|
||||
```bash
|
||||
# 1. Starte Datenbank und Redis
|
||||
npm run dev:minimal
|
||||
|
||||
# 2. In einem neuen Terminal: Starte die Next.js App
|
||||
npm run dev
|
||||
|
||||
# 3. Öffne http://localhost:3000
|
||||
```
|
||||
|
||||
### 3. n8n Webhook testen
|
||||
|
||||
Teste die Verbindung zu deinem n8n Server:
|
||||
|
||||
```bash
|
||||
# Teste den Status Endpoint
|
||||
curl https://n8n.dk0.dev/webhook/denshooter-71242/status
|
||||
|
||||
# Teste den Chat Endpoint (wenn konfiguriert)
|
||||
curl -X POST https://n8n.dk0.dev/webhook/chat \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Bearer DEIN_N8N_SECRET_TOKEN" \
|
||||
-d '{"message": "Hallo", "history": []}'
|
||||
```
|
||||
|
||||
## Staging Deployment auf dem Server
|
||||
|
||||
### 1. Gitea Variables und Secrets konfigurieren
|
||||
|
||||
Gehe zu deinem Gitea Repository → Settings → Secrets/Variables und füge hinzu:
|
||||
|
||||
**Variables:**
|
||||
- `NEXT_PUBLIC_BASE_URL` = `https://staging.dk0.dev` (oder deine Staging URL)
|
||||
- `MY_EMAIL` = `contact@dk0.dev`
|
||||
- `MY_INFO_EMAIL` = `info@dk0.dev`
|
||||
- `NEXT_PUBLIC_UMAMI_URL` = `https://analytics.dk0.dev`
|
||||
- `NEXT_PUBLIC_UMAMI_WEBSITE_ID` = `b3665829-927a-4ada-b9bb-fcf24171061e`
|
||||
- `N8N_WEBHOOK_URL` = `https://n8n.dk0.dev`
|
||||
- `LOG_LEVEL` = `debug`
|
||||
|
||||
**Secrets:**
|
||||
- `MY_PASSWORD` = Dein Email Passwort
|
||||
- `MY_INFO_PASSWORD` = Dein Info Email Passwort
|
||||
- `ADMIN_BASIC_AUTH` = `admin:dein-sicheres-passwort`
|
||||
- `N8N_API_KEY` = Dein n8n API Key (optional)
|
||||
- `N8N_SECRET_TOKEN` = Dein n8n Secret Token (optional)
|
||||
|
||||
### 2. Push zum dev Branch
|
||||
|
||||
```bash
|
||||
# Stelle sicher, dass du auf dem dev Branch bist
|
||||
git checkout dev
|
||||
|
||||
# Committe deine Änderungen
|
||||
git add .
|
||||
git commit -m "Test: Dev deployment"
|
||||
|
||||
# Push zum dev Branch (triggert automatisch Staging Deployment)
|
||||
git push origin dev
|
||||
```
|
||||
|
||||
### 3. Deployment überwachen
|
||||
|
||||
Nach dem Push:
|
||||
1. Gehe zu deinem Gitea Repository → Actions
|
||||
2. Überwache den Workflow `CI/CD Pipeline (Dev/Staging)`
|
||||
3. Der Workflow wird:
|
||||
- Tests ausführen
|
||||
- Docker Image bauen
|
||||
- Staging Container auf Port 3001 deployen
|
||||
|
||||
### 4. Staging testen
|
||||
|
||||
```bash
|
||||
# Auf deinem Server: Prüfe Container Status
|
||||
docker ps | grep staging
|
||||
|
||||
# Prüfe Health Endpoint
|
||||
curl http://localhost:3001/api/health
|
||||
|
||||
# Prüfe n8n Status Endpoint
|
||||
curl http://localhost:3001/api/n8n/status
|
||||
|
||||
# Logs ansehen
|
||||
docker logs portfolio-app-staging -f
|
||||
```
|
||||
|
||||
### 5. Staging URL konfigurieren
|
||||
|
||||
Falls du eine Subdomain für Staging hast (z.B. `staging.dk0.dev`):
|
||||
- Konfiguriere deinen Reverse Proxy (Nginx/Traefik) um auf Port 3001 zu zeigen
|
||||
- Oder verwende direkt `http://dein-server-ip:3001`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Dev Container wird nicht erstellt
|
||||
|
||||
1. **Prüfe Gitea Workflow:**
|
||||
- Gehe zu Repository → Actions
|
||||
- Prüfe ob der Workflow `ci-cd-dev-staging.yml` existiert
|
||||
- Prüfe ob der Workflow auf `dev` Branch Push reagiert
|
||||
|
||||
2. **Prüfe Gitea Variables:**
|
||||
- Stelle sicher, dass alle erforderlichen Variables und Secrets gesetzt sind
|
||||
- Prüfe die Workflow Logs für fehlende Variablen
|
||||
|
||||
3. **Prüfe Docker:**
|
||||
```bash
|
||||
# Auf deinem Server
|
||||
docker ps -a
|
||||
docker images | grep portfolio-app
|
||||
```
|
||||
|
||||
### n8n Verbindungsfehler
|
||||
|
||||
1. **Prüfe n8n URL:**
|
||||
```bash
|
||||
# Teste ob n8n erreichbar ist
|
||||
curl https://n8n.dk0.dev/webhook/denshooter-71242/status
|
||||
```
|
||||
|
||||
2. **Prüfe Environment Variables:**
|
||||
```bash
|
||||
# Im Container
|
||||
docker exec portfolio-app-staging env | grep N8N
|
||||
```
|
||||
|
||||
3. **Prüfe n8n Webhook Konfiguration:**
|
||||
- Stelle sicher, dass der Webhook in n8n aktiviert ist
|
||||
- Prüfe ob der Webhook-Pfad korrekt ist (`/webhook/denshooter-71242/status`)
|
||||
|
||||
### Datenbank Fehler
|
||||
|
||||
```bash
|
||||
# Prüfe ob die Datenbank läuft
|
||||
docker ps | grep postgres-staging
|
||||
|
||||
# Prüfe Datenbank Logs
|
||||
docker logs portfolio-postgres-staging
|
||||
|
||||
# Prüfe Verbindung
|
||||
docker exec portfolio-postgres-staging pg_isready -U portfolio_user -d portfolio_staging_db
|
||||
```
|
||||
|
||||
## Workflow Übersicht
|
||||
|
||||
```
|
||||
┌─────────────────┐
|
||||
│ Push to dev │
|
||||
└────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ Run Tests │
|
||||
└────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ Build Docker │
|
||||
│ Image (staging) │
|
||||
└────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ Deploy Staging │
|
||||
│ (Port 3001) │
|
||||
└────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ Health Check │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
## Nächste Schritte
|
||||
|
||||
1. ✅ Teste lokal mit `npm run dev`
|
||||
2. ✅ Konfiguriere Gitea Variables und Secrets
|
||||
3. ✅ Push zum `dev` Branch
|
||||
4. ✅ Teste Staging auf Port 3001
|
||||
5. ✅ Wenn alles funktioniert: Merge zu `production` Branch
|
||||
|
||||
---
|
||||
|
||||
**Tipp:** Verwende `LOG_LEVEL=debug` in Staging um mehr Informationen zu sehen!
|
||||
150
DOCKER_BUILD_FIX.md
Normal file
150
DOCKER_BUILD_FIX.md
Normal file
@@ -0,0 +1,150 @@
|
||||
# Docker Build Fix - Standalone Output Issue
|
||||
|
||||
## Problem
|
||||
|
||||
Der Docker Build schlägt fehl mit:
|
||||
```
|
||||
ERROR: failed to calculate checksum of ref ... "/app/.next/standalone/app": not found
|
||||
```
|
||||
|
||||
## Ursache
|
||||
|
||||
Next.js erstellt das `standalone` Output nur, wenn:
|
||||
1. `output: "standalone"` in `next.config.ts` gesetzt ist ✅ (bereits konfiguriert)
|
||||
2. Der Build erfolgreich abgeschlossen wird
|
||||
3. Alle Abhängigkeiten korrekt aufgelöst werden
|
||||
|
||||
## Lösung
|
||||
|
||||
### 1. n8n Status Route Fix
|
||||
|
||||
Die Route wurde angepasst, um während des Builds nicht zu fehlschlagen, wenn `N8N_WEBHOOK_URL` nicht gesetzt ist:
|
||||
|
||||
```typescript
|
||||
// Prüft jetzt, ob N8N_WEBHOOK_URL gesetzt ist
|
||||
if (!n8nWebhookUrl) {
|
||||
return NextResponse.json({ /* fallback */ });
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Dockerfile Verbesserungen
|
||||
|
||||
- **Verification Step**: Prüft, ob das standalone Verzeichnis existiert
|
||||
- **Debug Output**: Zeigt die Verzeichnisstruktur, falls Probleme auftreten
|
||||
- **Robustere Fehlerbehandlung**: Bessere Fehlermeldungen
|
||||
|
||||
### 3. Mögliche Ursachen und Lösungen
|
||||
|
||||
#### Problem: Standalone Output wird nicht erstellt
|
||||
|
||||
**Lösung 1: Prüfe next.config.ts**
|
||||
```typescript
|
||||
// Stelle sicher, dass dies gesetzt ist:
|
||||
output: "standalone",
|
||||
outputFileTracingRoot: path.join(process.cwd()),
|
||||
```
|
||||
|
||||
**Lösung 2: Prüfe Build-Logs**
|
||||
```bash
|
||||
# Schaue in die Build-Logs, ob es Fehler gibt
|
||||
docker build . 2>&1 | grep -i "standalone\|error"
|
||||
```
|
||||
|
||||
**Lösung 3: Lokaler Test**
|
||||
```bash
|
||||
# Teste lokal, ob standalone erstellt wird
|
||||
npm run build
|
||||
ls -la .next/standalone/
|
||||
```
|
||||
|
||||
#### Problem: Falsche Verzeichnisstruktur
|
||||
|
||||
**✅ GELÖST**: Die Debug-Ausgabe zeigt, dass Next.js 15 die Struktur `.next/standalone/` direkt verwendet:
|
||||
- `.next/standalone/server.js` ✅
|
||||
- `.next/standalone/.next/` ✅
|
||||
- `.next/standalone/node_modules/` ✅
|
||||
- `.next/standalone/package.json` ✅
|
||||
|
||||
**NICHT**: `.next/standalone/app/server.js` ❌
|
||||
|
||||
Das Dockerfile wurde korrigiert, um `.next/standalone/` direkt zu kopieren.
|
||||
|
||||
## Debugging
|
||||
|
||||
### 1. Lokaler Build Test
|
||||
|
||||
```bash
|
||||
# Baue lokal
|
||||
npm run build
|
||||
|
||||
# Prüfe ob standalone existiert
|
||||
test -d .next/standalone && echo "✅ Standalone exists" || echo "❌ Standalone missing"
|
||||
|
||||
# Zeige Struktur
|
||||
ls -la .next/standalone/
|
||||
find .next/standalone -name "server.js"
|
||||
```
|
||||
|
||||
### 2. Docker Build mit Debug
|
||||
|
||||
```bash
|
||||
# Baue mit mehr Output
|
||||
docker build --progress=plain -t portfolio-app:test .
|
||||
|
||||
# Oder baue nur bis zum Builder Stage
|
||||
docker build --target builder -t portfolio-builder:test .
|
||||
docker run --rm portfolio-builder:test ls -la .next/standalone/
|
||||
```
|
||||
|
||||
### 3. Prüfe Build-Logs
|
||||
|
||||
Der aktualisierte Dockerfile gibt jetzt Debug-Informationen aus:
|
||||
- Zeigt `.next/` Verzeichnisstruktur
|
||||
- Sucht nach `standalone` Verzeichnis
|
||||
- Zeigt `server.js` Location
|
||||
|
||||
## Alternative: Fallback ohne Standalone
|
||||
|
||||
Falls das standalone Output weiterhin Probleme macht, kann man auf ein vollständiges Image zurückgreifen:
|
||||
|
||||
```dockerfile
|
||||
# Statt standalone zu kopieren, kopiere alles
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next ./.next
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/package.json ./package.json
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/prisma ./prisma
|
||||
|
||||
CMD ["npm", "start"]
|
||||
```
|
||||
|
||||
**Nachteil**: Größeres Image, aber funktioniert immer.
|
||||
|
||||
## Nächste Schritte
|
||||
|
||||
1. ✅ n8n Status Route Fix (bereits gemacht)
|
||||
2. ✅ Dockerfile Debug-Verbesserungen (bereits gemacht)
|
||||
3. 🔄 Push zum dev Branch und Build testen
|
||||
4. 📊 Build-Logs analysieren
|
||||
5. 🔧 Falls nötig: Dockerfile weiter anpassen
|
||||
|
||||
## Workflow Test
|
||||
|
||||
```bash
|
||||
# 1. Committe Änderungen
|
||||
git add .
|
||||
git commit -m "Fix: Docker build standalone output issue"
|
||||
|
||||
# 2. Push zum dev Branch
|
||||
git push origin dev
|
||||
|
||||
# 3. Überwache Gitea Actions
|
||||
# Gehe zu Repository → Actions → CI/CD Pipeline (Dev/Staging)
|
||||
|
||||
# 4. Prüfe Build-Logs
|
||||
# Schaue nach den Debug-Ausgaben im Build-Step
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Hinweis**: Falls das Problem weiterhin besteht, schaue in die Build-Logs nach den Debug-Ausgaben, die der aktualisierte Dockerfile jetzt ausgibt. Diese zeigen genau, wo das Problem liegt.
|
||||
19
Dockerfile
19
Dockerfile
@@ -35,6 +35,20 @@ ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV NODE_ENV=production
|
||||
RUN npm run build
|
||||
|
||||
# Verify standalone output was created and show structure for debugging
|
||||
RUN if [ ! -d .next/standalone ]; then \
|
||||
echo "ERROR: .next/standalone directory not found!"; \
|
||||
echo "Contents of .next directory:"; \
|
||||
ls -la .next/ || true; \
|
||||
echo "Checking if standalone exists in different location:"; \
|
||||
find .next -name "standalone" -type d || true; \
|
||||
exit 1; \
|
||||
fi && \
|
||||
echo "✅ Standalone output found" && \
|
||||
ls -la .next/standalone/ && \
|
||||
echo "Standalone structure:" && \
|
||||
find .next/standalone -type f -name "server.js" || echo "server.js not found in standalone"
|
||||
|
||||
# Production image, copy all the files and run next
|
||||
FROM base AS runner
|
||||
WORKDIR /app
|
||||
@@ -55,7 +69,10 @@ RUN chown nextjs:nodejs .next
|
||||
|
||||
# Automatically leverage output traces to reduce image size
|
||||
# https://nextjs.org/docs/advanced-features/output-file-tracing
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone/app ./
|
||||
# Copy standalone output (contains server.js and all dependencies)
|
||||
# The standalone output structure is: .next/standalone/ (not .next/standalone/app/)
|
||||
# Next.js creates: .next/standalone/server.js, .next/standalone/.next/, .next/standalone/node_modules/
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
# Copy Prisma files
|
||||
|
||||
@@ -5,11 +5,11 @@
|
||||
You now have **two separate Docker stacks**:
|
||||
|
||||
1. **Staging** - Deploys automatically on `dev` or `main` branch
|
||||
- Port: `3002`
|
||||
- Port: `3001`
|
||||
- Container: `portfolio-app-staging`
|
||||
- Database: `portfolio_staging_db` (port 5433)
|
||||
- Redis: `portfolio-redis-staging` (port 6380)
|
||||
- URL: `https://staging.dk0.dev` (or `http://localhost:3002`)
|
||||
- URL: `https://staging.dk0.dev` (or `http://localhost:3001`)
|
||||
|
||||
2. **Production** - Deploys automatically on `production` branch
|
||||
- Port: `3000`
|
||||
@@ -25,7 +25,7 @@ When you push to `dev` or `main` branch:
|
||||
1. ✅ Tests run
|
||||
2. ✅ Docker image is built and tagged as `staging`
|
||||
3. ✅ Staging stack deploys automatically
|
||||
4. ✅ Available on port 3002
|
||||
4. ✅ Available on port 3001
|
||||
|
||||
### Automatic Production Deployment
|
||||
When you merge to `production` branch:
|
||||
@@ -55,9 +55,9 @@ When you merge to `production` branch:
|
||||
|
||||
| Service | Staging | Production |
|
||||
|---------|---------|------------|
|
||||
| App | 3002 | 3000 |
|
||||
| PostgreSQL | 5434 | 5432 |
|
||||
| Redis | 6381 | 6379 |
|
||||
| App | 3001 | 3000 |
|
||||
| PostgreSQL | 5433 | 5432 |
|
||||
| Redis | 6380 | 6379 |
|
||||
|
||||
## Workflow
|
||||
|
||||
@@ -69,10 +69,10 @@ git checkout dev
|
||||
|
||||
# 2. Push to dev (triggers staging deployment)
|
||||
git push origin dev
|
||||
# → Staging deploys automatically on port 3002
|
||||
# → Staging deploys automatically on port 3001
|
||||
|
||||
# 3. Test staging
|
||||
curl http://localhost:3002/api/health
|
||||
curl http://localhost:3001/api/health
|
||||
|
||||
# 4. Merge to main (also triggers staging)
|
||||
git checkout main
|
||||
@@ -101,7 +101,7 @@ docker compose -f docker-compose.staging.yml down
|
||||
docker compose -f docker-compose.staging.yml logs -f
|
||||
|
||||
# Check staging health
|
||||
curl http://localhost:3002/api/health
|
||||
curl http://localhost:3001/api/health
|
||||
```
|
||||
|
||||
### Production
|
||||
@@ -142,7 +142,7 @@ curl http://localhost:3000/api/health
|
||||
### Check Both Environments
|
||||
```bash
|
||||
# Staging
|
||||
curl http://localhost:3002/api/health
|
||||
curl http://localhost:3001/api/health
|
||||
|
||||
# Production
|
||||
curl http://localhost:3000/api/health
|
||||
@@ -174,11 +174,11 @@ docker ps | grep -v staging
|
||||
4. Manual rollback: Restart old container if needed
|
||||
|
||||
### Port Conflicts
|
||||
- Staging uses 3002, 5434, 6381
|
||||
- Staging uses 3001, 5433, 6380
|
||||
- Production uses 3000, 5432, 6379
|
||||
- If conflicts occur, check what's using the ports:
|
||||
```bash
|
||||
lsof -i :3002
|
||||
lsof -i :3001
|
||||
lsof -i :3000
|
||||
```
|
||||
|
||||
|
||||
@@ -6,10 +6,23 @@ export const revalidate = 30;
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
// Check if n8n webhook URL is configured
|
||||
const n8nWebhookUrl = process.env.N8N_WEBHOOK_URL;
|
||||
|
||||
if (!n8nWebhookUrl) {
|
||||
// Return fallback if n8n is not configured
|
||||
return NextResponse.json({
|
||||
status: { text: "offline", color: "gray" },
|
||||
music: null,
|
||||
gaming: null,
|
||||
coding: null,
|
||||
});
|
||||
}
|
||||
|
||||
// Rufe den n8n Webhook auf
|
||||
// Add timestamp to query to bypass Cloudflare cache
|
||||
const res = await fetch(
|
||||
`${process.env.N8N_WEBHOOK_URL}/webhook/denshooter-71242/status?t=${Date.now()}`,
|
||||
`${n8nWebhookUrl}/webhook/denshooter-71242/status?t=${Date.now()}`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
|
||||
@@ -2,13 +2,15 @@
|
||||
# Deploys automatically on dev/main branch
|
||||
# Uses different ports and container names to avoid conflicts with production
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
portfolio-staging:
|
||||
image: portfolio-app:staging
|
||||
container_name: portfolio-app-staging
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3002:3000" # Different port from production (3000) - using 3002 to avoid conflicts
|
||||
- "3001:3000" # Different port from production (3000)
|
||||
environment:
|
||||
- NODE_ENV=staging
|
||||
- DATABASE_URL=postgresql://portfolio_user:portfolio_staging_pass@postgres-staging:5432/portfolio_staging_db?schema=public
|
||||
@@ -20,8 +22,9 @@ services:
|
||||
- MY_INFO_PASSWORD=${MY_INFO_PASSWORD}
|
||||
- ADMIN_BASIC_AUTH=${ADMIN_BASIC_AUTH:-admin:staging_password}
|
||||
- LOG_LEVEL=debug
|
||||
- N8N_WEBHOOK_URL=${N8N_WEBHOOK_URL:-}
|
||||
- N8N_SECRET_TOKEN=${N8N_SECRET_TOKEN:-}
|
||||
- N8N_WEBHOOK_URL=${N8N_WEBHOOK_URL:-https://n8n.dk0.dev}
|
||||
- N8N_API_KEY=${N8N_API_KEY}
|
||||
- N8N_SECRET_TOKEN=${N8N_SECRET_TOKEN}
|
||||
volumes:
|
||||
- portfolio_staging_data:/app/.next/cache
|
||||
networks:
|
||||
@@ -59,7 +62,7 @@ services:
|
||||
networks:
|
||||
- portfolio_staging_net
|
||||
ports:
|
||||
- "5434:5432" # Different port from production (5432) - using 5434 to avoid conflicts
|
||||
- "5433:5432" # Different port from production (5432)
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U portfolio_user -d portfolio_staging_db"]
|
||||
interval: 10s
|
||||
@@ -85,7 +88,7 @@ services:
|
||||
networks:
|
||||
- portfolio_staging_net
|
||||
ports:
|
||||
- "6381:6379" # Different port from production (6379) - using 6381 to avoid conflicts
|
||||
- "6380:6379" # Different port from production (6379)
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
|
||||
Reference in New Issue
Block a user