feat: db support vercel and docker compose

This commit is contained in:
Tiger Ren 2026-02-02 00:02:57 +08:00
parent 8f6cad6365
commit 4c1445a4d7
16 changed files with 1219 additions and 155 deletions

61
.dockerignore Normal file
View File

@ -0,0 +1,61 @@
# Dependencies
node_modules
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Next.js build output
.next
out
# Production build files
dist
build
# Environment files (except .env.production if needed)
.env
.env.local
.env.development
.env.test
# Git
.git
.gitignore
# IDE
.vscode
.idea
*.swp
*.swo
*~
# OS files
.DS_Store
Thumbs.db
# Testing
coverage
.nyc_output
# Logs
logs
*.log
# Database (we mount this as volume)
data/*.db
data/*.db-journal
data/*.db-wal
data/*.db-shm
# Documentation
docs
*.md
!README.md
# Screenshots
screenshots
# Misc
.cache
temp
tmp

64
Dockerfile Normal file
View File

@ -0,0 +1,64 @@
# Multi-stage build for Next.js application
FROM node:20-alpine AS base
# Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Install dependencies based on the preferred package manager
COPY package.json package-lock.json* ./
RUN npm ci
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
# Install libc6-compat for compatibility
RUN apk add --no-cache libc6-compat
# Copy dependencies from deps stage
COPY --from=deps /app/node_modules ./node_modules
# Copy source code
COPY . .
# Set environment variables for build
ENV NEXT_TELEMETRY_DISABLED=1
ENV NODE_ENV=production
ENV DB_TYPE=sqlite
ENV DATABASE_URL=file:/app/data/app.db
# Build the application
RUN npm run build
# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
ENV NEXT_TELEMETRY_DISABLED=1
ENV DB_TYPE=sqlite
ENV DATABASE_URL=file:/app/data/app.db
# Create data directory for SQLite (as root)
RUN mkdir -p /app/data
# Copy necessary files from builder
COPY --from=builder /app/public ./public
COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static
# Set the correct permissions (running as root)
# Note: Files will be owned by root:root
# Expose the port
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
# Start the application
CMD ["node", "server.js"]

View File

@ -16,8 +16,8 @@ A web application to manage, track, and execute multi-customer deployment workfl
- **Framework**: Next.js 14+ (App Router)
- **Language**: TypeScript
- **Database**: SQLite (via better-sqlite3)
- **ORM**: Drizzle ORM
- **Database**: SQLite (local) or Turso (cloud)
- **ORM**: Drizzle ORM (supports both SQLite and LibSQL)
- **Styling**: Tailwind CSS
- **UI Components**: shadcn/ui
@ -46,6 +46,32 @@ npm run dev
The application uses SQLite with a local database file stored in `data/app.db`. The database schema is automatically initialized on first run.
## Deployment
### Option 1: Docker Compose (Self-Hosted)
```bash
# Build and run
docker compose up -d
# View logs
docker compose logs -f
```
Data is persisted in `./data/app.db` via Docker volume.
### Option 2: Vercel + Turso (Cloud)
1. Sign up at [Vercel](https://vercel.com) and [Turso](https://turso.tech)
2. Connect your Git repository to Vercel
3. Set environment variables in Vercel dashboard:
- `DB_TYPE=turso`
- `TURSO_URL=libsql://your-db.turso.io`
- `TURSO_TOKEN=your-token`
4. Deploy automatically on git push
See [docs/DEPLOYMENT.md](docs/DEPLOYMENT.md) for detailed instructions.
## Usage
### 1. Setup Your Infrastructure

20
docker-compose.yml Normal file
View File

@ -0,0 +1,20 @@
services:
release-tracker:
build:
context: .
dockerfile: Dockerfile
platforms:
- linux/amd64
container_name: release-tracker
restart: unless-stopped
ports:
- "3000:3000"
environment:
- NODE_ENV=production
- DB_TYPE=sqlite
- DATABASE_URL=file:/app/data/app.db
volumes:
# Mount the data directory for SQLite persistence
- ./data:/app/data
# Run as root (uid=0, gid=0) - default behavior
# user: "0:0"

331
docs/DEPLOYMENT.md Normal file
View File

@ -0,0 +1,331 @@
# Deployment Guide
This document describes how to deploy the Release Tracker application using two different approaches:
1. **Docker Compose** (Self-Hosted) - Uses SQLite database
2. **Vercel** (Cloud) - Uses Turso (serverless SQLite)
## Quick Comparison
| Feature | Docker Compose | Vercel |
|---------|---------------|--------|
| **Database** | SQLite (local file) | Turso (serverless SQLite) |
| **Hosting** | Your own server | Vercel cloud |
| **Cost** | Free (server cost only) | Free tier available |
| **Setup complexity** | Medium | Low |
| **Data control** | Full | Database hosted by Turso |
| **Best for** | Single-user, internal tools | Multi-user, public access |
---
## Option 1: Docker Compose (Self-Hosted)
### Prerequisites
- Linux server (or macOS/Windows with Docker Desktop)
- Docker and Docker Compose installed
- (Optional) Intel Mac users: Docker Desktop supports linux/amd64 builds
### Deployment Steps
#### 1. Build and Run on Linux Server (Recommended)
```bash
# Clone or copy the project to your Linux server
git clone <your-repo> release-tracker
cd release-tracker
# Build the Docker image
sudo docker compose build
# Run the container
sudo docker compose up -d
```
#### 2. Build on Mac, Deploy to Linux
If you're on an Intel Mac and want to build locally then deploy to Linux:
```bash
# Build for linux/amd64 platform
docker build --platform linux/amd64 -t release-tracker:latest .
# Save the image
docker save release-tracker:latest | gzip > release-tracker.tar.gz
# Copy to Linux server
scp release-tracker.tar.gz user@linux-server:/tmp/
# On Linux server, load and run
ssh user@linux-server "cd /tmp && docker load < release-tracker.tar.gz"
# Create docker-compose.yml on Linux server and run
ssh user@linux-server "cd /opt/release-tracker && docker compose up -d"
```
#### 3. Data Persistence
The SQLite database is stored in `./data/app.db` and mounted as a volume. To backup:
```bash
# Backup
cp data/app.db backups/app.db.$(date +%Y%m%d)
# Restore
cp backups/app.db.20240101 data/app.db
sudo docker compose restart
```
### Environment Variables
Create a `.env` file in the project root:
```bash
# Database (SQLite is the only option for Docker)
DB_TYPE=sqlite
DATABASE_URL=file:./data/app.db
# Next.js
NODE_ENV=production
NEXT_TELEMETRY_DISABLED=1
```
### Updating the Application
```bash
# Pull latest code
git pull
# Rebuild and restart
sudo docker compose down
sudo docker compose build --no-cache
sudo docker compose up -d
```
---
## Option 2: Vercel (Cloud)
### Prerequisites
- Vercel account (sign up at [vercel.com](https://vercel.com))
- Git repository (GitHub, GitLab, or Bitbucket)
- Turso account (sign up at [turso.tech](https://turso.tech))
### Step 1: Set Up Turso Database
1. Install Turso CLI:
```bash
curl -sSfL https://get.tur.so/install.sh | bash
```
2. Login to Turso:
```bash
turso auth login
```
3. Create a database:
```bash
turso db create release-tracker
```
4. Get the database URL:
```bash
turso db show release-tracker
```
Copy the "LibSQL URL" (looks like: `libsql://release-tracker-username.turso.io`)
5. Create an authentication token:
```bash
turso db tokens create release-tracker
```
Save this token securely.
6. (Optional) Run migrations:
```bash
# Set environment variables temporarily
export DB_TYPE=turso
export TURSO_URL=libsql://your-database-url
export TURSO_TOKEN=your-token
# Push schema to Turso
npx drizzle-kit push
```
### Step 2: Deploy to Vercel
1. **Connect Repository**:
- Go to [vercel.com](https://vercel.com)
- Click "Add New Project"
- Import your Git repository
2. **Configure Build Settings**:
- Framework Preset: Next.js
- Build Command: `npm run build`
- Output Directory: `.next`
3. **Set Environment Variables**:
In the Vercel dashboard, go to Project Settings → Environment Variables, add:
| Variable | Value |
|----------|-------|
| `DB_TYPE` | `turso` |
| `TURSO_URL` | Your Turso database URL |
| `TURSO_TOKEN` | Your Turso auth token |
4. **Deploy**:
- Click "Deploy"
- Vercel will build and deploy automatically
### Updating on Vercel
Simply push to your Git repository:
```bash
git push origin main
```
Vercel will automatically rebuild and redeploy.
---
## Local Development
### Using SQLite (Default)
```bash
# Copy example env file
cp .env.local.example .env.local
# Install dependencies
npm install
# Run development server
npm run dev
```
### Using Turso (Optional, for testing)
```bash
# Copy example env file and edit
cp .env.local.example .env.local
# Edit .env.local:
DB_TYPE=turso
TURSO_URL=libsql://your-database-url
TURSO_TOKEN=your-token
# Run development server
npm run dev
```
---
## Troubleshooting
### Docker Issues
**Issue**: Container fails to start with permission errors
**Solution**: The container runs as root (uid=0, gid=0) by default. If you have permission issues:
```bash
# Fix data directory permissions
sudo chown -R 0:0 ./data
sudo chmod -R 755 ./data
```
**Issue**: Database file is locked
**Solution**: Stop the container, remove the lock files, and restart:
```bash
sudo docker compose down
rm -f data/*.db-wal data/*.db-shm
sudo docker compose up -d
```
### Vercel Issues
**Issue**: Build fails with database errors
**Solution**: Ensure environment variables are set in Vercel dashboard (not just in `.env.local`)
**Issue**: Data doesn't persist between deployments
**Solution**: This is expected with SQLite on Vercel. You must use Turso for persistence.
### Turso Issues
**Issue**: Connection refused or timeout
**Solution**: Check your `TURSO_URL` and `TURSO_TOKEN` are correct. Turso databases may sleep after inactivity on free tier.
---
## Architecture Notes
### Database Abstraction
The application uses a database factory pattern (`src/lib/db/index.ts`) that automatically selects the appropriate database driver based on the `DB_TYPE` environment variable:
- `DB_TYPE=sqlite`: Uses `better-sqlite3` driver with local file
- `DB_TYPE=turso`: Uses `@libsql/client` driver with Turso
Both use the same Drizzle ORM schema, so no code changes are needed when switching between them.
### Data Migration Between Environments
To migrate data from local SQLite to Turso:
1. Export from SQLite:
```bash
sqlite3 data/app.db .dump > backup.sql
```
2. Import to Turso:
```bash
turso db shell release-tracker < backup.sql
```
Note: Some SQLite-specific syntax may need adjustment for Turso compatibility.
---
## Security Considerations
### Docker Deployment
- Container runs as root (uid=0) by design for simplicity
- SQLite database file is owned by root
- Ensure proper firewall rules on your server
- Consider using HTTPS reverse proxy (nginx/traefik)
### Vercel Deployment
- Turso provides encryption at rest and in transit
- Vercel provides HTTPS by default
- Keep `TURSO_TOKEN` secure and rotate periodically
---
## Backup Strategy
### Docker/SQLite
```bash
# Automated daily backup via cron
0 2 * * * cp /opt/release-tracker/data/app.db /backups/app.db.$(date +\%Y\%m\%d)
```
### Vercel/Turso
Turso provides automatic backups. You can also export manually:
```bash
turso db dump release-tracker > backup.sql
```
---
## Support
For issues specific to:
- **Docker**: Check Docker logs with `docker compose logs`
- **Vercel**: Check Vercel dashboard deployment logs
- **Turso**: Check Turso dashboard or run `turso db inspect release-tracker`

View File

@ -1,10 +1,25 @@
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
schema: './src/lib/db/schema.ts',
out: './src/lib/db/migrations',
dialect: 'sqlite',
dbCredentials: {
url: process.env.DATABASE_URL || 'file:./data/app.db',
},
});
const dbType = process.env.DB_TYPE || 'sqlite';
// Configuration based on database type
const config = dbType === 'turso'
? {
schema: './src/lib/db/schema.ts',
out: './src/lib/db/migrations',
dialect: 'turso' as const,
dbCredentials: {
url: process.env.TURSO_URL || '',
authToken: process.env.TURSO_TOKEN,
},
}
: {
schema: './src/lib/db/schema.ts',
out: './src/lib/db/migrations',
dialect: 'sqlite' as const,
dbCredentials: {
url: process.env.DATABASE_URL || 'file:./data/app.db',
},
};
export default defineConfig(config);

View File

@ -6,6 +6,8 @@ const nextConfig: NextConfig = {
bodySizeLimit: '2mb',
},
},
// Enable standalone output for Docker deployment
output: 'standalone',
// Disable turbopack for now since we need webpack config for better-sqlite3
turbopack: {},
};

396
package-lock.json generated
View File

@ -11,6 +11,7 @@
"@dnd-kit/core": "^6.3.1",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
"@libsql/client": "^0.17.0",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
@ -2024,6 +2025,168 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@libsql/client": {
"version": "0.17.0",
"resolved": "https://registry.npmjs.org/@libsql/client/-/client-0.17.0.tgz",
"integrity": "sha512-TLjSU9Otdpq0SpKHl1tD1Nc9MKhrsZbCFGot3EbCxRa8m1E5R1mMwoOjKMMM31IyF7fr+hPNHLpYfwbMKNusmg==",
"license": "MIT",
"peer": true,
"dependencies": {
"@libsql/core": "^0.17.0",
"@libsql/hrana-client": "^0.9.0",
"js-base64": "^3.7.5",
"libsql": "^0.5.22",
"promise-limit": "^2.7.0"
}
},
"node_modules/@libsql/core": {
"version": "0.17.0",
"resolved": "https://registry.npmjs.org/@libsql/core/-/core-0.17.0.tgz",
"integrity": "sha512-hnZRnJHiS+nrhHKLGYPoJbc78FE903MSDrFJTbftxo+e52X+E0Y0fHOCVYsKWcg6XgB7BbJYUrz/xEkVTSaipw==",
"license": "MIT",
"dependencies": {
"js-base64": "^3.7.5"
}
},
"node_modules/@libsql/darwin-arm64": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/darwin-arm64/-/darwin-arm64-0.5.22.tgz",
"integrity": "sha512-4B8ZlX3nIDPndfct7GNe0nI3Yw6ibocEicWdC4fvQbSs/jdq/RC2oCsoJxJ4NzXkvktX70C1J4FcmmoBy069UA==",
"cpu": [
"arm64"
],
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@libsql/darwin-x64": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/darwin-x64/-/darwin-x64-0.5.22.tgz",
"integrity": "sha512-ny2HYWt6lFSIdNFzUFIJ04uiW6finXfMNJ7wypkAD8Pqdm6nAByO+Fdqu8t7sD0sqJGeUCiOg480icjyQ2/8VA==",
"cpu": [
"x64"
],
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@libsql/hrana-client": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/@libsql/hrana-client/-/hrana-client-0.9.0.tgz",
"integrity": "sha512-pxQ1986AuWfPX4oXzBvLwBnfgKDE5OMhAdR/5cZmRaB4Ygz5MecQybvwZupnRz341r2CtFmbk/BhSu7k2Lm+Jw==",
"license": "MIT",
"dependencies": {
"@libsql/isomorphic-ws": "^0.1.5",
"cross-fetch": "^4.0.0",
"js-base64": "^3.7.5",
"node-fetch": "^3.3.2"
}
},
"node_modules/@libsql/isomorphic-ws": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/@libsql/isomorphic-ws/-/isomorphic-ws-0.1.5.tgz",
"integrity": "sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==",
"license": "MIT",
"dependencies": {
"@types/ws": "^8.5.4",
"ws": "^8.13.0"
}
},
"node_modules/@libsql/linux-arm-gnueabihf": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/linux-arm-gnueabihf/-/linux-arm-gnueabihf-0.5.22.tgz",
"integrity": "sha512-3Uo3SoDPJe/zBnyZKosziRGtszXaEtv57raWrZIahtQDsjxBVjuzYQinCm9LRCJCUT5t2r5Z5nLDPJi2CwZVoA==",
"cpu": [
"arm"
],
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@libsql/linux-arm-musleabihf": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/linux-arm-musleabihf/-/linux-arm-musleabihf-0.5.22.tgz",
"integrity": "sha512-LCsXh07jvSojTNJptT9CowOzwITznD+YFGGW+1XxUr7fS+7/ydUrpDfsMX7UqTqjm7xG17eq86VkWJgHJfvpNg==",
"cpu": [
"arm"
],
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@libsql/linux-arm64-gnu": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/linux-arm64-gnu/-/linux-arm64-gnu-0.5.22.tgz",
"integrity": "sha512-KSdnOMy88c9mpOFKUEzPskSaF3VLflfSUCBwas/pn1/sV3pEhtMF6H8VUCd2rsedwoukeeCSEONqX7LLnQwRMA==",
"cpu": [
"arm64"
],
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@libsql/linux-arm64-musl": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/linux-arm64-musl/-/linux-arm64-musl-0.5.22.tgz",
"integrity": "sha512-mCHSMAsDTLK5YH//lcV3eFEgiR23Ym0U9oEvgZA0667gqRZg/2px+7LshDvErEKv2XZ8ixzw3p1IrBzLQHGSsw==",
"cpu": [
"arm64"
],
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@libsql/linux-x64-gnu": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/linux-x64-gnu/-/linux-x64-gnu-0.5.22.tgz",
"integrity": "sha512-kNBHaIkSg78Y4BqAdgjcR2mBilZXs4HYkAmi58J+4GRwDQZh5fIUWbnQvB9f95DkWUIGVeenqLRFY2pcTmlsew==",
"cpu": [
"x64"
],
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@libsql/linux-x64-musl": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/linux-x64-musl/-/linux-x64-musl-0.5.22.tgz",
"integrity": "sha512-UZ4Xdxm4pu3pQXjvfJiyCzZop/9j/eA2JjmhMaAhe3EVLH2g11Fy4fwyUp9sT1QJYR1kpc2JLuybPM0kuXv/Tg==",
"cpu": [
"x64"
],
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@libsql/win32-x64-msvc": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/@libsql/win32-x64-msvc/-/win32-x64-msvc-0.5.22.tgz",
"integrity": "sha512-Fj0j8RnBpo43tVZUVoNK6BV/9AtDUM5S7DF3LB4qTYg1LMSZqi3yeCneUTLJD6XomQJlZzbI4mst89yspVSAnA==",
"cpu": [
"x64"
],
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@napi-rs/wasm-runtime": {
"version": "0.2.12",
"resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz",
@ -2037,6 +2200,12 @@
"@tybys/wasm-util": "^0.10.0"
}
},
"node_modules/@neon-rs/load": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.0.4.tgz",
"integrity": "sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==",
"license": "MIT"
},
"node_modules/@next/env": {
"version": "16.1.6",
"resolved": "https://registry.npmjs.org/@next/env/-/env-16.1.6.tgz",
@ -3568,7 +3737,6 @@
"version": "20.19.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.30.tgz",
"integrity": "sha512-WJtwWJu7UdlvzEAUm484QNg5eAoq5QR08KDNx7g45Usrs2NtOPiX8ugDqmKdXkyL03rBqU5dYNYVQetEpBHq2g==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
@ -3603,6 +3771,15 @@
"@types/react": "^19.2.0"
}
},
"node_modules/@types/ws": {
"version": "8.18.1",
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.54.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.54.0.tgz",
@ -4766,6 +4943,35 @@
"dev": true,
"license": "MIT"
},
"node_modules/cross-fetch": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.1.0.tgz",
"integrity": "sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw==",
"license": "MIT",
"dependencies": {
"node-fetch": "^2.7.0"
}
},
"node_modules/cross-fetch/node_modules/node-fetch": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -4795,6 +5001,15 @@
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/data-uri-to-buffer": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
"license": "MIT",
"engines": {
"node": ">= 12"
}
},
"node_modules/data-view-buffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz",
@ -5907,6 +6122,29 @@
"reusify": "^1.0.4"
}
},
"node_modules/fetch-blob": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "paypal",
"url": "https://paypal.me/jimmywarting"
}
],
"license": "MIT",
"dependencies": {
"node-domexception": "^1.0.0",
"web-streams-polyfill": "^3.0.3"
},
"engines": {
"node": "^12.20 || >= 14.13"
}
},
"node_modules/file-entry-cache": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
@ -5993,6 +6231,18 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/formdata-polyfill": {
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
"license": "MIT",
"dependencies": {
"fetch-blob": "^3.1.2"
},
"engines": {
"node": ">=12.20.0"
}
},
"node_modules/fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
@ -6860,6 +7110,12 @@
"jiti": "lib/jiti-cli.mjs"
}
},
"node_modules/js-base64": {
"version": "3.7.8",
"resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.8.tgz",
"integrity": "sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==",
"license": "BSD-3-Clause"
},
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
@ -6987,6 +7243,47 @@
"node": ">= 0.8.0"
}
},
"node_modules/libsql": {
"version": "0.5.22",
"resolved": "https://registry.npmjs.org/libsql/-/libsql-0.5.22.tgz",
"integrity": "sha512-NscWthMQt7fpU8lqd7LXMvT9pi+KhhmTHAJWUB/Lj6MWa0MKFv0F2V4C6WKKpjCVZl0VwcDz4nOI3CyaT1DDiA==",
"cpu": [
"x64",
"arm64",
"wasm32",
"arm"
],
"license": "MIT",
"os": [
"darwin",
"linux",
"win32"
],
"dependencies": {
"@neon-rs/load": "^0.0.4",
"detect-libc": "2.0.2"
},
"optionalDependencies": {
"@libsql/darwin-arm64": "0.5.22",
"@libsql/darwin-x64": "0.5.22",
"@libsql/linux-arm-gnueabihf": "0.5.22",
"@libsql/linux-arm-musleabihf": "0.5.22",
"@libsql/linux-arm64-gnu": "0.5.22",
"@libsql/linux-arm64-musl": "0.5.22",
"@libsql/linux-x64-gnu": "0.5.22",
"@libsql/linux-x64-musl": "0.5.22",
"@libsql/win32-x64-msvc": "0.5.22"
}
},
"node_modules/libsql/node_modules/detect-libc": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz",
"integrity": "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==",
"license": "Apache-2.0",
"engines": {
"node": ">=8"
}
},
"node_modules/lightningcss": {
"version": "1.30.2",
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz",
@ -7546,6 +7843,44 @@
"node": ">=10"
}
},
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
"deprecated": "Use your platform's native DOMException instead",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
],
"license": "MIT",
"engines": {
"node": ">=10.5.0"
}
},
"node_modules/node-fetch": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
"license": "MIT",
"dependencies": {
"data-uri-to-buffer": "^4.0.0",
"fetch-blob": "^3.1.4",
"formdata-polyfill": "^4.0.10"
},
"engines": {
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/node-fetch"
}
},
"node_modules/node-releases": {
"version": "2.0.27",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
@ -7896,6 +8231,12 @@
"node": ">=6"
}
},
"node_modules/promise-limit": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/promise-limit/-/promise-limit-2.7.0.tgz",
"integrity": "sha512-7nJ6v5lnJsXwGprnGXga4wx6d1POjvi5Qmf1ivTRxTjH4Z/9Czja/UCMLVmB9N93GeWOU93XaFaEt6jbuoagNw==",
"license": "ISC"
},
"node_modules/prop-types": {
"version": "15.8.1",
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
@ -8914,6 +9255,12 @@
"node": ">=8.0"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"license": "MIT"
},
"node_modules/ts-api-utils": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
@ -9134,7 +9481,6 @@
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"devOptional": true,
"license": "MIT"
},
"node_modules/unrs-resolver": {
@ -9262,6 +9608,31 @@
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"license": "MIT"
},
"node_modules/web-streams-polyfill": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
"integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==",
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"license": "BSD-2-Clause"
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"license": "MIT",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@ -9383,6 +9754,27 @@
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC"
},
"node_modules/ws": {
"version": "8.19.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
"integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yallist": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",

View File

@ -12,6 +12,7 @@
"@dnd-kit/core": "^6.3.1",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
"@libsql/client": "^0.17.0",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",

View File

@ -3,9 +3,6 @@ import { Plus } from 'lucide-react';
import { Button } from '@/components/ui/button';
import { ClusterCard } from '@/components/clusters/cluster-card';
import { listClusters } from '@/lib/actions/clusters';
import { db } from '@/lib/db';
import { customers } from '@/lib/db/schema';
import { eq, count } from 'drizzle-orm';
export default async function ClustersPage() {
const clusters = await listClusters();
@ -13,13 +10,12 @@ export default async function ClustersPage() {
// Get customer counts for each cluster
const clustersWithCount = await Promise.all(
clusters.map(async (cluster) => {
const result = await db
.select({ value: count() })
.from(customers)
.where(eq(customers.clusterId, cluster.id));
// Use the server action to get customer count
const { listCustomersByCluster } = await import('@/lib/actions/customers');
const customers = await listCustomersByCluster(cluster.id);
return {
...cluster,
customerCount: result[0]?.value || 0,
customerCount: customers.length,
};
})
);

View File

@ -4,11 +4,8 @@ import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Button } from '@/components/ui/button';
import { Badge } from '@/components/ui/badge';
import { listClusters } from '@/lib/actions/clusters';
import { listCustomers } from '@/lib/actions/customers';
import { listCustomers, listCustomersByCluster } from '@/lib/actions/customers';
import { getActiveReleases, getReleaseStats } from '@/lib/actions/releases';
import { db } from '@/lib/db';
import { customers } from '@/lib/db/schema';
import { eq, count } from 'drizzle-orm';
export default async function DashboardPage() {
const [clusters, customersList, activeReleases, stats] = await Promise.all([
@ -21,13 +18,10 @@ export default async function DashboardPage() {
// Get customer counts per cluster
const clustersWithCount = await Promise.all(
clusters.map(async (cluster) => {
const result = await db
.select({ value: count() })
.from(customers)
.where(eq(customers.clusterId, cluster.id));
const customers = await listCustomersByCluster(cluster.id);
return {
...cluster,
customerCount: result[0]?.value || 0,
customerCount: customers.length,
};
})
);

View File

@ -2,7 +2,7 @@
import { db } from '@/lib/db';
import { clusters, customers } from '@/lib/db/schema';
import { eq, and, count } from 'drizzle-orm';
import { eq, and } from 'drizzle-orm';
import { revalidatePath } from 'next/cache';
export type ClusterInput = {
@ -33,18 +33,15 @@ export async function updateCluster(id: number, data: Partial<ClusterInput>) {
export async function deleteCluster(id: number) {
// Check if cluster has active customers
const result = await db
.select({ count: count() })
.from(customers)
.where(and(
const activeCustomers = await db.query.customers.findMany({
where: and(
eq(customers.clusterId, id),
eq(customers.isActive, true)
));
),
});
const customerCount = result[0]?.count || 0;
if (customerCount > 0) {
throw new Error(`Cannot delete cluster: ${customerCount} active customer(s) exist. Please move or delete customers first.`);
if (activeCustomers.length > 0) {
throw new Error(`Cannot delete cluster: ${activeCustomers.length} active customer(s) exist. Please move or delete customers first.`);
}
await db.update(clusters)

View File

@ -160,27 +160,17 @@ export async function getReleaseStats() {
const allReleases = await db.query.releases.findMany();
const activeReleases = allReleases.filter(r => r.status === 'active');
// Count total customer steps that are pending
const pendingSteps = await db
.select({ count: { value: customerSteps.id } })
.from(customerSteps)
.where(eq(customerSteps.status, 'pending'));
const doneSteps = await db
.select({ count: { value: customerSteps.id } })
.from(customerSteps)
.where(eq(customerSteps.status, 'done'));
const skippedSteps = await db
.select({ count: { value: customerSteps.id } })
.from(customerSteps)
.where(eq(customerSteps.status, 'skipped'));
// Count total customer steps by status
const allSteps = await db.query.customerSteps.findMany();
const pendingSteps = allSteps.filter(s => s.status === 'pending').length;
const doneSteps = allSteps.filter(s => s.status === 'done').length;
const skippedSteps = allSteps.filter(s => s.status === 'skipped').length;
return {
totalReleases: allReleases.length,
activeReleases: activeReleases.length,
pendingSteps: pendingSteps[0]?.count?.value || 0,
doneSteps: doneSteps[0]?.count?.value || 0,
skippedSteps: skippedSteps[0]?.count?.value || 0,
pendingSteps,
doneSteps,
skippedSteps,
};
}

View File

@ -116,13 +116,16 @@ export async function getStepTemplatesByCategory(releaseId: number, category: St
}
export async function getNextOrderIndex(releaseId: number, category: StepCategory) {
const result = await db
.select({ maxOrder: { value: stepTemplates.orderIndex } })
.from(stepTemplates)
.where(and(
const templates = await db.query.stepTemplates.findMany({
where: and(
eq(stepTemplates.releaseId, releaseId),
eq(stepTemplates.category, category)
));
),
});
return (result[0]?.maxOrder?.value ?? -1) + 1;
const maxOrder = templates.length > 0
? Math.max(...templates.map(t => t.orderIndex))
: -1;
return maxOrder + 1;
}

70
src/lib/db/client.ts Normal file
View File

@ -0,0 +1,70 @@
import { drizzle } from 'drizzle-orm/better-sqlite3';
import { drizzle as drizzleLibSQL } from 'drizzle-orm/libsql';
import Database from 'better-sqlite3';
import { createClient } from '@libsql/client';
import * as schema from './schema';
// Database configuration type
export type DatabaseConfig = {
type: 'sqlite' | 'turso';
// SQLite options
sqlitePath?: string;
// Turso options
tursoUrl?: string;
tursoToken?: string;
};
// Get database configuration from environment
export function getDatabaseConfig(): DatabaseConfig {
const dbType = process.env.DB_TYPE || 'sqlite';
if (dbType === 'turso') {
const tursoUrl = process.env.TURSO_URL;
const tursoToken = process.env.TURSO_TOKEN;
if (!tursoUrl) {
throw new Error('TURSO_URL environment variable is required when DB_TYPE=turso');
}
return {
type: 'turso',
tursoUrl,
tursoToken,
};
}
// Default to SQLite
return {
type: 'sqlite',
sqlitePath: process.env.DATABASE_URL?.replace('file:', '') || './data/app.db',
};
}
// Create database client based on configuration
export function createDatabaseClient(config: DatabaseConfig = getDatabaseConfig()) {
if (config.type === 'turso') {
const client = createClient({
url: config.tursoUrl!,
authToken: config.tursoToken,
});
return drizzleLibSQL(client, { schema });
}
// SQLite (default)
const sqlite = new Database(config.sqlitePath);
return drizzle(sqlite, { schema });
}
// Export the database instance
let dbInstance: ReturnType<typeof createDatabaseClient> | null = null;
export function getDatabase() {
if (!dbInstance) {
dbInstance = createDatabaseClient();
}
return dbInstance;
}
// For use in server actions and API routes
export const db = getDatabase();

View File

@ -1,106 +1,208 @@
import { drizzle } from 'drizzle-orm/better-sqlite3';
import { drizzle as drizzleLibSQL } from 'drizzle-orm/libsql';
import Database from 'better-sqlite3';
import { createClient } from '@libsql/client';
import * as schema from './schema';
import { mkdirSync, existsSync } from 'fs';
import { join } from 'path';
// Ensure data directory exists
const dataDir = join(process.cwd(), 'data');
if (!existsSync(dataDir)) {
mkdirSync(dataDir, { recursive: true });
// Database configuration type
export type DatabaseType = 'sqlite' | 'turso';
export type DatabaseConfig = {
type: DatabaseType;
// SQLite options
sqlitePath?: string;
// Turso options
tursoUrl?: string;
tursoToken?: string;
};
// Get database configuration from environment
export function getDatabaseConfig(): DatabaseConfig {
const dbType = (process.env.DB_TYPE as DatabaseType) || 'sqlite';
if (dbType === 'turso') {
const tursoUrl = process.env.TURSO_URL;
const tursoToken = process.env.TURSO_TOKEN;
if (!tursoUrl) {
throw new Error('TURSO_URL environment variable is required when DB_TYPE=turso');
}
return {
type: 'turso',
tursoUrl,
tursoToken,
};
}
// Default to SQLite
const dataDir = join(process.cwd(), 'data');
if (!existsSync(dataDir)) {
mkdirSync(dataDir, { recursive: true });
}
return {
type: 'sqlite',
sqlitePath: process.env.DATABASE_URL?.replace('file:', '') || join(dataDir, 'app.db'),
};
}
const dbPath = process.env.DATABASE_URL?.replace('file:', '') || join(dataDir, 'app.db');
// Create database client based on configuration
function createDatabaseClient(config: DatabaseConfig = getDatabaseConfig()) {
if (config.type === 'turso') {
const client = createClient({
url: config.tursoUrl!,
authToken: config.tursoToken,
});
const sqlite = new Database(dbPath);
sqlite.pragma('journal_mode = WAL');
return {
type: 'turso' as const,
client,
db: drizzleLibSQL(client, { schema }),
};
}
export const db = drizzle(sqlite, { schema });
// SQLite (default)
const sqlite = new Database(config.sqlitePath!);
sqlite.pragma('journal_mode = WAL');
return {
type: 'sqlite' as const,
client: sqlite,
db: drizzle(sqlite, { schema }),
};
}
// Database instance
let dbInstance: ReturnType<typeof createDatabaseClient> | null = null;
export function getDatabase() {
if (!dbInstance) {
dbInstance = createDatabaseClient();
}
return dbInstance.db;
}
export function getDatabaseType(): DatabaseType {
if (!dbInstance) {
dbInstance = createDatabaseClient();
}
return dbInstance.type;
}
// Export the database instance for use in server actions
export const db = getDatabase();
// SQL for table creation (compatible with both SQLite and Turso)
const CREATE_TABLES_SQL = `
CREATE TABLE IF NOT EXISTS clusters (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
kubeconfig_path TEXT,
description TEXT,
is_active INTEGER DEFAULT 1,
metadata TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000)
);
CREATE TABLE IF NOT EXISTS customers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
cluster_id INTEGER NOT NULL,
namespace TEXT NOT NULL,
name TEXT NOT NULL,
description TEXT,
is_active INTEGER DEFAULT 1,
metadata TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000),
FOREIGN KEY (cluster_id) REFERENCES clusters(id) ON DELETE RESTRICT,
UNIQUE(cluster_id, namespace)
);
CREATE TABLE IF NOT EXISTS releases (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('onboarding', 'release', 'hotfix')),
status TEXT DEFAULT 'draft' CHECK(status IN ('draft', 'active', 'archived')),
version_number TEXT,
release_date INTEGER,
description TEXT,
metadata TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000)
);
CREATE TABLE IF NOT EXISTS step_templates (
id INTEGER PRIMARY KEY AUTOINCREMENT,
release_id INTEGER NOT NULL,
name TEXT NOT NULL,
category TEXT NOT NULL CHECK(category IN ('deploy', 'verify')),
type TEXT NOT NULL CHECK(type IN ('bash', 'sql', 'text')),
content TEXT NOT NULL,
order_index INTEGER NOT NULL,
description TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE,
UNIQUE(release_id, category, order_index)
);
CREATE TABLE IF NOT EXISTS customer_steps (
id INTEGER PRIMARY KEY AUTOINCREMENT,
release_id INTEGER NOT NULL,
customer_id INTEGER NOT NULL,
template_id INTEGER,
name TEXT NOT NULL,
category TEXT NOT NULL CHECK(category IN ('deploy', 'verify')),
type TEXT NOT NULL CHECK(type IN ('bash', 'sql', 'text')),
content TEXT NOT NULL,
order_index INTEGER NOT NULL,
status TEXT DEFAULT 'pending' CHECK(status IN ('pending', 'done', 'skipped', 'reverted')),
executed_at INTEGER,
executed_by TEXT,
skip_reason TEXT,
notes TEXT,
is_custom INTEGER DEFAULT 0,
is_overridden INTEGER DEFAULT 0,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000),
FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE,
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE,
FOREIGN KEY (template_id) REFERENCES step_templates(id) ON DELETE SET NULL,
UNIQUE(release_id, customer_id, template_id)
);
CREATE INDEX IF NOT EXISTS idx_customers_cluster ON customers(cluster_id);
CREATE INDEX IF NOT EXISTS idx_customer_steps_release ON customer_steps(release_id);
CREATE INDEX IF NOT EXISTS idx_customer_steps_customer ON customer_steps(customer_id);
CREATE INDEX IF NOT EXISTS idx_step_templates_release ON step_templates(release_id);
`;
// Initialize database with migrations
export async function initDb() {
// Create tables if they don't exist
sqlite.exec(`
CREATE TABLE IF NOT EXISTS clusters (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
kubeconfig_path TEXT,
description TEXT,
is_active INTEGER DEFAULT 1,
metadata TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000)
);
const instance = dbInstance || createDatabaseClient();
CREATE TABLE IF NOT EXISTS customers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
cluster_id INTEGER NOT NULL,
namespace TEXT NOT NULL,
name TEXT NOT NULL,
description TEXT,
is_active INTEGER DEFAULT 1,
metadata TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000),
FOREIGN KEY (cluster_id) REFERENCES clusters(id) ON DELETE RESTRICT,
UNIQUE(cluster_id, namespace)
);
if (instance.type === 'turso') {
// Execute SQL for Turso
const statements = CREATE_TABLES_SQL
.split(';')
.map(s => s.trim())
.filter(s => s.length > 0);
CREATE TABLE IF NOT EXISTS releases (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
type TEXT NOT NULL CHECK(type IN ('onboarding', 'release', 'hotfix')),
status TEXT DEFAULT 'draft' CHECK(status IN ('draft', 'active', 'archived')),
version_number TEXT,
release_date INTEGER,
description TEXT,
metadata TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000)
);
CREATE TABLE IF NOT EXISTS step_templates (
id INTEGER PRIMARY KEY AUTOINCREMENT,
release_id INTEGER NOT NULL,
name TEXT NOT NULL,
category TEXT NOT NULL CHECK(category IN ('deploy', 'verify')),
type TEXT NOT NULL CHECK(type IN ('bash', 'sql', 'text')),
content TEXT NOT NULL,
order_index INTEGER NOT NULL,
description TEXT,
created_at INTEGER DEFAULT (unixepoch() * 1000),
FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE,
UNIQUE(release_id, category, order_index)
);
CREATE TABLE IF NOT EXISTS customer_steps (
id INTEGER PRIMARY KEY AUTOINCREMENT,
release_id INTEGER NOT NULL,
customer_id INTEGER NOT NULL,
template_id INTEGER,
name TEXT NOT NULL,
category TEXT NOT NULL CHECK(category IN ('deploy', 'verify')),
type TEXT NOT NULL CHECK(type IN ('bash', 'sql', 'text')),
content TEXT NOT NULL,
order_index INTEGER NOT NULL,
status TEXT DEFAULT 'pending' CHECK(status IN ('pending', 'done', 'skipped', 'reverted')),
executed_at INTEGER,
executed_by TEXT,
skip_reason TEXT,
notes TEXT,
is_custom INTEGER DEFAULT 0,
is_overridden INTEGER DEFAULT 0,
created_at INTEGER DEFAULT (unixepoch() * 1000),
updated_at INTEGER DEFAULT (unixepoch() * 1000),
FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE,
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE,
FOREIGN KEY (template_id) REFERENCES step_templates(id) ON DELETE SET NULL,
UNIQUE(release_id, customer_id, template_id)
);
CREATE INDEX IF NOT EXISTS idx_customers_cluster ON customers(cluster_id);
CREATE INDEX IF NOT EXISTS idx_customer_steps_release ON customer_steps(release_id);
CREATE INDEX IF NOT EXISTS idx_customer_steps_customer ON customer_steps(customer_id);
CREATE INDEX IF NOT EXISTS idx_step_templates_release ON step_templates(release_id);
`);
for (const sql of statements) {
try {
await instance.client.execute(sql + ';');
} catch (error) {
// Ignore "already exists" errors
if (!(error instanceof Error && error.message.includes('already exists'))) {
console.error('Migration error:', error);
}
}
}
} else {
// Execute SQL for SQLite
instance.client.exec(CREATE_TABLES_SQL);
}
}